##// END OF EJS Templates
global: bulk replace simple pycompat.iteritems(x) with x.items()...
Gregory Szorc -
r49768:f254fc73 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,1164 +1,1161 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34
34
35 import collections
35 import collections
36
36
37 from mercurial.i18n import _
37 from mercurial.i18n import _
38 from mercurial.node import (
38 from mercurial.node import (
39 hex,
39 hex,
40 short,
40 short,
41 )
41 )
42 from mercurial import (
42 from mercurial import (
43 cmdutil,
43 cmdutil,
44 commands,
44 commands,
45 context,
45 context,
46 crecord,
46 crecord,
47 error,
47 error,
48 linelog,
48 linelog,
49 mdiff,
49 mdiff,
50 obsolete,
50 obsolete,
51 patch,
51 patch,
52 phases,
52 phases,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 rewriteutil,
55 rewriteutil,
56 scmutil,
56 scmutil,
57 util,
57 util,
58 )
58 )
59 from mercurial.utils import stringutil
59 from mercurial.utils import stringutil
60
60
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 # be specifying the version(s) of Mercurial they are tested with, or
63 # be specifying the version(s) of Mercurial they are tested with, or
64 # leave the attribute unspecified.
64 # leave the attribute unspecified.
65 testedwith = b'ships-with-hg-core'
65 testedwith = b'ships-with-hg-core'
66
66
67 cmdtable = {}
67 cmdtable = {}
68 command = registrar.command(cmdtable)
68 command = registrar.command(cmdtable)
69
69
70 configtable = {}
70 configtable = {}
71 configitem = registrar.configitem(configtable)
71 configitem = registrar.configitem(configtable)
72
72
73 configitem(b'absorb', b'add-noise', default=True)
73 configitem(b'absorb', b'add-noise', default=True)
74 configitem(b'absorb', b'amend-flag', default=None)
74 configitem(b'absorb', b'amend-flag', default=None)
75 configitem(b'absorb', b'max-stack-size', default=50)
75 configitem(b'absorb', b'max-stack-size', default=50)
76
76
77 colortable = {
77 colortable = {
78 b'absorb.description': b'yellow',
78 b'absorb.description': b'yellow',
79 b'absorb.node': b'blue bold',
79 b'absorb.node': b'blue bold',
80 b'absorb.path': b'bold',
80 b'absorb.path': b'bold',
81 }
81 }
82
82
83 defaultdict = collections.defaultdict
83 defaultdict = collections.defaultdict
84
84
85
85
86 class nullui(object):
86 class nullui(object):
87 """blank ui object doing nothing"""
87 """blank ui object doing nothing"""
88
88
89 debugflag = False
89 debugflag = False
90 verbose = False
90 verbose = False
91 quiet = True
91 quiet = True
92
92
93 def __getitem__(name):
93 def __getitem__(name):
94 def nullfunc(*args, **kwds):
94 def nullfunc(*args, **kwds):
95 return
95 return
96
96
97 return nullfunc
97 return nullfunc
98
98
99
99
100 class emptyfilecontext(object):
100 class emptyfilecontext(object):
101 """minimal filecontext representing an empty file"""
101 """minimal filecontext representing an empty file"""
102
102
103 def __init__(self, repo):
103 def __init__(self, repo):
104 self._repo = repo
104 self._repo = repo
105
105
106 def data(self):
106 def data(self):
107 return b''
107 return b''
108
108
109 def node(self):
109 def node(self):
110 return self._repo.nullid
110 return self._repo.nullid
111
111
112
112
113 def uniq(lst):
113 def uniq(lst):
114 """list -> list. remove duplicated items without changing the order"""
114 """list -> list. remove duplicated items without changing the order"""
115 seen = set()
115 seen = set()
116 result = []
116 result = []
117 for x in lst:
117 for x in lst:
118 if x not in seen:
118 if x not in seen:
119 seen.add(x)
119 seen.add(x)
120 result.append(x)
120 result.append(x)
121 return result
121 return result
122
122
123
123
124 def getdraftstack(headctx, limit=None):
124 def getdraftstack(headctx, limit=None):
125 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
125 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
126
126
127 changesets are sorted in topo order, oldest first.
127 changesets are sorted in topo order, oldest first.
128 return at most limit items, if limit is a positive number.
128 return at most limit items, if limit is a positive number.
129
129
130 merges are considered as non-draft as well. i.e. every commit
130 merges are considered as non-draft as well. i.e. every commit
131 returned has and only has 1 parent.
131 returned has and only has 1 parent.
132 """
132 """
133 ctx = headctx
133 ctx = headctx
134 result = []
134 result = []
135 while ctx.phase() != phases.public:
135 while ctx.phase() != phases.public:
136 if limit and len(result) >= limit:
136 if limit and len(result) >= limit:
137 break
137 break
138 parents = ctx.parents()
138 parents = ctx.parents()
139 if len(parents) != 1:
139 if len(parents) != 1:
140 break
140 break
141 result.append(ctx)
141 result.append(ctx)
142 ctx = parents[0]
142 ctx = parents[0]
143 result.reverse()
143 result.reverse()
144 return result
144 return result
145
145
146
146
147 def getfilestack(stack, path, seenfctxs=None):
147 def getfilestack(stack, path, seenfctxs=None):
148 """([ctx], str, set) -> [fctx], {ctx: fctx}
148 """([ctx], str, set) -> [fctx], {ctx: fctx}
149
149
150 stack is a list of contexts, from old to new. usually they are what
150 stack is a list of contexts, from old to new. usually they are what
151 "getdraftstack" returns.
151 "getdraftstack" returns.
152
152
153 follows renames, but not copies.
153 follows renames, but not copies.
154
154
155 seenfctxs is a set of filecontexts that will be considered "immutable".
155 seenfctxs is a set of filecontexts that will be considered "immutable".
156 they are usually what this function returned in earlier calls, useful
156 they are usually what this function returned in earlier calls, useful
157 to avoid issues that a file was "moved" to multiple places and was then
157 to avoid issues that a file was "moved" to multiple places and was then
158 modified differently, like: "a" was copied to "b", "a" was also copied to
158 modified differently, like: "a" was copied to "b", "a" was also copied to
159 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
159 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
160 and we enforce only one of them to be able to affect "a"'s content.
160 and we enforce only one of them to be able to affect "a"'s content.
161
161
162 return an empty list and an empty dict, if the specified path does not
162 return an empty list and an empty dict, if the specified path does not
163 exist in stack[-1] (the top of the stack).
163 exist in stack[-1] (the top of the stack).
164
164
165 otherwise, return a list of de-duplicated filecontexts, and the map to
165 otherwise, return a list of de-duplicated filecontexts, and the map to
166 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
166 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
167 of the list would be outside the stack and should be considered immutable.
167 of the list would be outside the stack and should be considered immutable.
168 the remaining items are within the stack.
168 the remaining items are within the stack.
169
169
170 for example, given the following changelog and corresponding filelog
170 for example, given the following changelog and corresponding filelog
171 revisions:
171 revisions:
172
172
173 changelog: 3----4----5----6----7
173 changelog: 3----4----5----6----7
174 filelog: x 0----1----1----2 (x: no such file yet)
174 filelog: x 0----1----1----2 (x: no such file yet)
175
175
176 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
176 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
177 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
177 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
178 dummy empty filecontext.
178 dummy empty filecontext.
179 - if stack = [2], returns ([], {})
179 - if stack = [2], returns ([], {})
180 - if stack = [7], returns ([1, 2], {7: 2})
180 - if stack = [7], returns ([1, 2], {7: 2})
181 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
181 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
182 removed, since 1 is immutable.
182 removed, since 1 is immutable.
183 """
183 """
184 if seenfctxs is None:
184 if seenfctxs is None:
185 seenfctxs = set()
185 seenfctxs = set()
186 assert stack
186 assert stack
187
187
188 if path not in stack[-1]:
188 if path not in stack[-1]:
189 return [], {}
189 return [], {}
190
190
191 fctxs = []
191 fctxs = []
192 fctxmap = {}
192 fctxmap = {}
193
193
194 pctx = stack[0].p1() # the public (immutable) ctx we stop at
194 pctx = stack[0].p1() # the public (immutable) ctx we stop at
195 for ctx in reversed(stack):
195 for ctx in reversed(stack):
196 if path not in ctx: # the file is added in the next commit
196 if path not in ctx: # the file is added in the next commit
197 pctx = ctx
197 pctx = ctx
198 break
198 break
199 fctx = ctx[path]
199 fctx = ctx[path]
200 fctxs.append(fctx)
200 fctxs.append(fctx)
201 if fctx in seenfctxs: # treat fctx as the immutable one
201 if fctx in seenfctxs: # treat fctx as the immutable one
202 pctx = None # do not add another immutable fctx
202 pctx = None # do not add another immutable fctx
203 break
203 break
204 fctxmap[ctx] = fctx # only for mutable fctxs
204 fctxmap[ctx] = fctx # only for mutable fctxs
205 copy = fctx.copysource()
205 copy = fctx.copysource()
206 if copy:
206 if copy:
207 path = copy # follow rename
207 path = copy # follow rename
208 if path in ctx: # but do not follow copy
208 if path in ctx: # but do not follow copy
209 pctx = ctx.p1()
209 pctx = ctx.p1()
210 break
210 break
211
211
212 if pctx is not None: # need an extra immutable fctx
212 if pctx is not None: # need an extra immutable fctx
213 if path in pctx:
213 if path in pctx:
214 fctxs.append(pctx[path])
214 fctxs.append(pctx[path])
215 else:
215 else:
216 fctxs.append(emptyfilecontext(pctx.repo()))
216 fctxs.append(emptyfilecontext(pctx.repo()))
217
217
218 fctxs.reverse()
218 fctxs.reverse()
219 # note: we rely on a property of hg: filerev is not reused for linear
219 # note: we rely on a property of hg: filerev is not reused for linear
220 # history. i.e. it's impossible to have:
220 # history. i.e. it's impossible to have:
221 # changelog: 4----5----6 (linear, no merges)
221 # changelog: 4----5----6 (linear, no merges)
222 # filelog: 1----2----1
222 # filelog: 1----2----1
223 # ^ reuse filerev (impossible)
223 # ^ reuse filerev (impossible)
224 # because parents are part of the hash. if that's not true, we need to
224 # because parents are part of the hash. if that's not true, we need to
225 # remove uniq and find a different way to identify fctxs.
225 # remove uniq and find a different way to identify fctxs.
226 return uniq(fctxs), fctxmap
226 return uniq(fctxs), fctxmap
227
227
228
228
229 class overlaystore(patch.filestore):
229 class overlaystore(patch.filestore):
230 """read-only, hybrid store based on a dict and ctx.
230 """read-only, hybrid store based on a dict and ctx.
231 memworkingcopy: {path: content}, overrides file contents.
231 memworkingcopy: {path: content}, overrides file contents.
232 """
232 """
233
233
234 def __init__(self, basectx, memworkingcopy):
234 def __init__(self, basectx, memworkingcopy):
235 self.basectx = basectx
235 self.basectx = basectx
236 self.memworkingcopy = memworkingcopy
236 self.memworkingcopy = memworkingcopy
237
237
238 def getfile(self, path):
238 def getfile(self, path):
239 """comply with mercurial.patch.filestore.getfile"""
239 """comply with mercurial.patch.filestore.getfile"""
240 if path not in self.basectx:
240 if path not in self.basectx:
241 return None, None, None
241 return None, None, None
242 fctx = self.basectx[path]
242 fctx = self.basectx[path]
243 if path in self.memworkingcopy:
243 if path in self.memworkingcopy:
244 content = self.memworkingcopy[path]
244 content = self.memworkingcopy[path]
245 else:
245 else:
246 content = fctx.data()
246 content = fctx.data()
247 mode = (fctx.islink(), fctx.isexec())
247 mode = (fctx.islink(), fctx.isexec())
248 copy = fctx.copysource()
248 copy = fctx.copysource()
249 return content, mode, copy
249 return content, mode, copy
250
250
251
251
252 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None):
252 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None):
253 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
253 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
254 memworkingcopy overrides file contents.
254 memworkingcopy overrides file contents.
255 """
255 """
256 # parents must contain 2 items: (node1, node2)
256 # parents must contain 2 items: (node1, node2)
257 if parents is None:
257 if parents is None:
258 parents = ctx.repo().changelog.parents(ctx.node())
258 parents = ctx.repo().changelog.parents(ctx.node())
259 if extra is None:
259 if extra is None:
260 extra = ctx.extra()
260 extra = ctx.extra()
261 if desc is None:
261 if desc is None:
262 desc = ctx.description()
262 desc = ctx.description()
263 date = ctx.date()
263 date = ctx.date()
264 user = ctx.user()
264 user = ctx.user()
265 files = set(ctx.files()).union(memworkingcopy)
265 files = set(ctx.files()).union(memworkingcopy)
266 store = overlaystore(ctx, memworkingcopy)
266 store = overlaystore(ctx, memworkingcopy)
267 return context.memctx(
267 return context.memctx(
268 repo=ctx.repo(),
268 repo=ctx.repo(),
269 parents=parents,
269 parents=parents,
270 text=desc,
270 text=desc,
271 files=files,
271 files=files,
272 filectxfn=store,
272 filectxfn=store,
273 user=user,
273 user=user,
274 date=date,
274 date=date,
275 branch=None,
275 branch=None,
276 extra=extra,
276 extra=extra,
277 )
277 )
278
278
279
279
280 class filefixupstate(object):
280 class filefixupstate(object):
281 """state needed to apply fixups to a single file
281 """state needed to apply fixups to a single file
282
282
283 internally, it keeps file contents of several revisions and a linelog.
283 internally, it keeps file contents of several revisions and a linelog.
284
284
285 the linelog uses odd revision numbers for original contents (fctxs passed
285 the linelog uses odd revision numbers for original contents (fctxs passed
286 to __init__), and even revision numbers for fixups, like:
286 to __init__), and even revision numbers for fixups, like:
287
287
288 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
288 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
289 linelog rev 2: fixups made to self.fctxs[0]
289 linelog rev 2: fixups made to self.fctxs[0]
290 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
290 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
291 linelog rev 4: fixups made to self.fctxs[1]
291 linelog rev 4: fixups made to self.fctxs[1]
292 ...
292 ...
293
293
294 a typical use is like:
294 a typical use is like:
295
295
296 1. call diffwith, to calculate self.fixups
296 1. call diffwith, to calculate self.fixups
297 2. (optionally), present self.fixups to the user, or change it
297 2. (optionally), present self.fixups to the user, or change it
298 3. call apply, to apply changes
298 3. call apply, to apply changes
299 4. read results from "finalcontents", or call getfinalcontent
299 4. read results from "finalcontents", or call getfinalcontent
300 """
300 """
301
301
302 def __init__(self, fctxs, path, ui=None, opts=None):
302 def __init__(self, fctxs, path, ui=None, opts=None):
303 """([fctx], ui or None) -> None
303 """([fctx], ui or None) -> None
304
304
305 fctxs should be linear, and sorted by topo order - oldest first.
305 fctxs should be linear, and sorted by topo order - oldest first.
306 fctxs[0] will be considered as "immutable" and will not be changed.
306 fctxs[0] will be considered as "immutable" and will not be changed.
307 """
307 """
308 self.fctxs = fctxs
308 self.fctxs = fctxs
309 self.path = path
309 self.path = path
310 self.ui = ui or nullui()
310 self.ui = ui or nullui()
311 self.opts = opts or {}
311 self.opts = opts or {}
312
312
313 # following fields are built from fctxs. they exist for perf reason
313 # following fields are built from fctxs. they exist for perf reason
314 self.contents = [f.data() for f in fctxs]
314 self.contents = [f.data() for f in fctxs]
315 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
315 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
316 self.linelog = self._buildlinelog()
316 self.linelog = self._buildlinelog()
317 if self.ui.debugflag:
317 if self.ui.debugflag:
318 assert self._checkoutlinelog() == self.contents
318 assert self._checkoutlinelog() == self.contents
319
319
320 # following fields will be filled later
320 # following fields will be filled later
321 self.chunkstats = [0, 0] # [adopted, total : int]
321 self.chunkstats = [0, 0] # [adopted, total : int]
322 self.targetlines = [] # [str]
322 self.targetlines = [] # [str]
323 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
323 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
324 self.finalcontents = [] # [str]
324 self.finalcontents = [] # [str]
325 self.ctxaffected = set()
325 self.ctxaffected = set()
326
326
327 def diffwith(self, targetfctx, fm=None):
327 def diffwith(self, targetfctx, fm=None):
328 """calculate fixups needed by examining the differences between
328 """calculate fixups needed by examining the differences between
329 self.fctxs[-1] and targetfctx, chunk by chunk.
329 self.fctxs[-1] and targetfctx, chunk by chunk.
330
330
331 targetfctx is the target state we move towards. we may or may not be
331 targetfctx is the target state we move towards. we may or may not be
332 able to get there because not all modified chunks can be amended into
332 able to get there because not all modified chunks can be amended into
333 a non-public fctx unambiguously.
333 a non-public fctx unambiguously.
334
334
335 call this only once, before apply().
335 call this only once, before apply().
336
336
337 update self.fixups, self.chunkstats, and self.targetlines.
337 update self.fixups, self.chunkstats, and self.targetlines.
338 """
338 """
339 a = self.contents[-1]
339 a = self.contents[-1]
340 alines = self.contentlines[-1]
340 alines = self.contentlines[-1]
341 b = targetfctx.data()
341 b = targetfctx.data()
342 blines = mdiff.splitnewlines(b)
342 blines = mdiff.splitnewlines(b)
343 self.targetlines = blines
343 self.targetlines = blines
344
344
345 self.linelog.annotate(self.linelog.maxrev)
345 self.linelog.annotate(self.linelog.maxrev)
346 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
346 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
347 assert len(annotated) == len(alines)
347 assert len(annotated) == len(alines)
348 # add a dummy end line to make insertion at the end easier
348 # add a dummy end line to make insertion at the end easier
349 if annotated:
349 if annotated:
350 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
350 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
351 annotated.append(dummyendline)
351 annotated.append(dummyendline)
352
352
353 # analyse diff blocks
353 # analyse diff blocks
354 for chunk in self._alldiffchunks(a, b, alines, blines):
354 for chunk in self._alldiffchunks(a, b, alines, blines):
355 newfixups = self._analysediffchunk(chunk, annotated)
355 newfixups = self._analysediffchunk(chunk, annotated)
356 self.chunkstats[0] += bool(newfixups) # 1 or 0
356 self.chunkstats[0] += bool(newfixups) # 1 or 0
357 self.chunkstats[1] += 1
357 self.chunkstats[1] += 1
358 self.fixups += newfixups
358 self.fixups += newfixups
359 if fm is not None:
359 if fm is not None:
360 self._showchanges(fm, alines, blines, chunk, newfixups)
360 self._showchanges(fm, alines, blines, chunk, newfixups)
361
361
362 def apply(self):
362 def apply(self):
363 """apply self.fixups. update self.linelog, self.finalcontents.
363 """apply self.fixups. update self.linelog, self.finalcontents.
364
364
365 call this only once, before getfinalcontent(), after diffwith().
365 call this only once, before getfinalcontent(), after diffwith().
366 """
366 """
367 # the following is unnecessary, as it's done by "diffwith":
367 # the following is unnecessary, as it's done by "diffwith":
368 # self.linelog.annotate(self.linelog.maxrev)
368 # self.linelog.annotate(self.linelog.maxrev)
369 for rev, a1, a2, b1, b2 in reversed(self.fixups):
369 for rev, a1, a2, b1, b2 in reversed(self.fixups):
370 blines = self.targetlines[b1:b2]
370 blines = self.targetlines[b1:b2]
371 if self.ui.debugflag:
371 if self.ui.debugflag:
372 idx = (max(rev - 1, 0)) // 2
372 idx = (max(rev - 1, 0)) // 2
373 self.ui.write(
373 self.ui.write(
374 _(b'%s: chunk %d:%d -> %d lines\n')
374 _(b'%s: chunk %d:%d -> %d lines\n')
375 % (short(self.fctxs[idx].node()), a1, a2, len(blines))
375 % (short(self.fctxs[idx].node()), a1, a2, len(blines))
376 )
376 )
377 self.linelog.replacelines(rev, a1, a2, b1, b2)
377 self.linelog.replacelines(rev, a1, a2, b1, b2)
378 if self.opts.get(b'edit_lines', False):
378 if self.opts.get(b'edit_lines', False):
379 self.finalcontents = self._checkoutlinelogwithedits()
379 self.finalcontents = self._checkoutlinelogwithedits()
380 else:
380 else:
381 self.finalcontents = self._checkoutlinelog()
381 self.finalcontents = self._checkoutlinelog()
382
382
383 def getfinalcontent(self, fctx):
383 def getfinalcontent(self, fctx):
384 """(fctx) -> str. get modified file content for a given filecontext"""
384 """(fctx) -> str. get modified file content for a given filecontext"""
385 idx = self.fctxs.index(fctx)
385 idx = self.fctxs.index(fctx)
386 return self.finalcontents[idx]
386 return self.finalcontents[idx]
387
387
388 def _analysediffchunk(self, chunk, annotated):
388 def _analysediffchunk(self, chunk, annotated):
389 """analyse a different chunk and return new fixups found
389 """analyse a different chunk and return new fixups found
390
390
391 return [] if no lines from the chunk can be safely applied.
391 return [] if no lines from the chunk can be safely applied.
392
392
393 the chunk (or lines) cannot be safely applied, if, for example:
393 the chunk (or lines) cannot be safely applied, if, for example:
394 - the modified (deleted) lines belong to a public changeset
394 - the modified (deleted) lines belong to a public changeset
395 (self.fctxs[0])
395 (self.fctxs[0])
396 - the chunk is a pure insertion and the adjacent lines (at most 2
396 - the chunk is a pure insertion and the adjacent lines (at most 2
397 lines) belong to different non-public changesets, or do not belong
397 lines) belong to different non-public changesets, or do not belong
398 to any non-public changesets.
398 to any non-public changesets.
399 - the chunk is modifying lines from different changesets.
399 - the chunk is modifying lines from different changesets.
400 in this case, if the number of lines deleted equals to the number
400 in this case, if the number of lines deleted equals to the number
401 of lines added, assume it's a simple 1:1 map (could be wrong).
401 of lines added, assume it's a simple 1:1 map (could be wrong).
402 otherwise, give up.
402 otherwise, give up.
403 - the chunk is modifying lines from a single non-public changeset,
403 - the chunk is modifying lines from a single non-public changeset,
404 but other revisions touch the area as well. i.e. the lines are
404 but other revisions touch the area as well. i.e. the lines are
405 not continuous as seen from the linelog.
405 not continuous as seen from the linelog.
406 """
406 """
407 a1, a2, b1, b2 = chunk
407 a1, a2, b1, b2 = chunk
408 # find involved indexes from annotate result
408 # find involved indexes from annotate result
409 involved = annotated[a1:a2]
409 involved = annotated[a1:a2]
410 if not involved and annotated: # a1 == a2 and a is not empty
410 if not involved and annotated: # a1 == a2 and a is not empty
411 # pure insertion, check nearby lines. ignore lines belong
411 # pure insertion, check nearby lines. ignore lines belong
412 # to the public (first) changeset (i.e. annotated[i][0] == 1)
412 # to the public (first) changeset (i.e. annotated[i][0] == 1)
413 nearbylinenums = {a2, max(0, a1 - 1)}
413 nearbylinenums = {a2, max(0, a1 - 1)}
414 involved = [
414 involved = [
415 annotated[i] for i in nearbylinenums if annotated[i][0] != 1
415 annotated[i] for i in nearbylinenums if annotated[i][0] != 1
416 ]
416 ]
417 involvedrevs = list({r for r, l in involved})
417 involvedrevs = list({r for r, l in involved})
418 newfixups = []
418 newfixups = []
419 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
419 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
420 # chunk belongs to a single revision
420 # chunk belongs to a single revision
421 rev = involvedrevs[0]
421 rev = involvedrevs[0]
422 if rev > 1:
422 if rev > 1:
423 fixuprev = rev + 1
423 fixuprev = rev + 1
424 newfixups.append((fixuprev, a1, a2, b1, b2))
424 newfixups.append((fixuprev, a1, a2, b1, b2))
425 elif a2 - a1 == b2 - b1 or b1 == b2:
425 elif a2 - a1 == b2 - b1 or b1 == b2:
426 # 1:1 line mapping, or chunk was deleted
426 # 1:1 line mapping, or chunk was deleted
427 for i in pycompat.xrange(a1, a2):
427 for i in pycompat.xrange(a1, a2):
428 rev, linenum = annotated[i]
428 rev, linenum = annotated[i]
429 if rev > 1:
429 if rev > 1:
430 if b1 == b2: # deletion, simply remove that single line
430 if b1 == b2: # deletion, simply remove that single line
431 nb1 = nb2 = 0
431 nb1 = nb2 = 0
432 else: # 1:1 line mapping, change the corresponding rev
432 else: # 1:1 line mapping, change the corresponding rev
433 nb1 = b1 + i - a1
433 nb1 = b1 + i - a1
434 nb2 = nb1 + 1
434 nb2 = nb1 + 1
435 fixuprev = rev + 1
435 fixuprev = rev + 1
436 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
436 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
437 return self._optimizefixups(newfixups)
437 return self._optimizefixups(newfixups)
438
438
439 @staticmethod
439 @staticmethod
440 def _alldiffchunks(a, b, alines, blines):
440 def _alldiffchunks(a, b, alines, blines):
441 """like mdiff.allblocks, but only care about differences"""
441 """like mdiff.allblocks, but only care about differences"""
442 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
442 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
443 for chunk, btype in blocks:
443 for chunk, btype in blocks:
444 if btype != b'!':
444 if btype != b'!':
445 continue
445 continue
446 yield chunk
446 yield chunk
447
447
448 def _buildlinelog(self):
448 def _buildlinelog(self):
449 """calculate the initial linelog based on self.content{,line}s.
449 """calculate the initial linelog based on self.content{,line}s.
450 this is similar to running a partial "annotate".
450 this is similar to running a partial "annotate".
451 """
451 """
452 llog = linelog.linelog()
452 llog = linelog.linelog()
453 a, alines = b'', []
453 a, alines = b'', []
454 for i in pycompat.xrange(len(self.contents)):
454 for i in pycompat.xrange(len(self.contents)):
455 b, blines = self.contents[i], self.contentlines[i]
455 b, blines = self.contents[i], self.contentlines[i]
456 llrev = i * 2 + 1
456 llrev = i * 2 + 1
457 chunks = self._alldiffchunks(a, b, alines, blines)
457 chunks = self._alldiffchunks(a, b, alines, blines)
458 for a1, a2, b1, b2 in reversed(list(chunks)):
458 for a1, a2, b1, b2 in reversed(list(chunks)):
459 llog.replacelines(llrev, a1, a2, b1, b2)
459 llog.replacelines(llrev, a1, a2, b1, b2)
460 a, alines = b, blines
460 a, alines = b, blines
461 return llog
461 return llog
462
462
463 def _checkoutlinelog(self):
463 def _checkoutlinelog(self):
464 """() -> [str]. check out file contents from linelog"""
464 """() -> [str]. check out file contents from linelog"""
465 contents = []
465 contents = []
466 for i in pycompat.xrange(len(self.contents)):
466 for i in pycompat.xrange(len(self.contents)):
467 rev = (i + 1) * 2
467 rev = (i + 1) * 2
468 self.linelog.annotate(rev)
468 self.linelog.annotate(rev)
469 content = b''.join(map(self._getline, self.linelog.annotateresult))
469 content = b''.join(map(self._getline, self.linelog.annotateresult))
470 contents.append(content)
470 contents.append(content)
471 return contents
471 return contents
472
472
473 def _checkoutlinelogwithedits(self):
473 def _checkoutlinelogwithedits(self):
474 """() -> [str]. prompt all lines for edit"""
474 """() -> [str]. prompt all lines for edit"""
475 alllines = self.linelog.getalllines()
475 alllines = self.linelog.getalllines()
476 # header
476 # header
477 editortext = (
477 editortext = (
478 _(
478 _(
479 b'HG: editing %s\nHG: "y" means the line to the right '
479 b'HG: editing %s\nHG: "y" means the line to the right '
480 b'exists in the changeset to the top\nHG:\n'
480 b'exists in the changeset to the top\nHG:\n'
481 )
481 )
482 % self.fctxs[-1].path()
482 % self.fctxs[-1].path()
483 )
483 )
484 # [(idx, fctx)]. hide the dummy emptyfilecontext
484 # [(idx, fctx)]. hide the dummy emptyfilecontext
485 visiblefctxs = [
485 visiblefctxs = [
486 (i, f)
486 (i, f)
487 for i, f in enumerate(self.fctxs)
487 for i, f in enumerate(self.fctxs)
488 if not isinstance(f, emptyfilecontext)
488 if not isinstance(f, emptyfilecontext)
489 ]
489 ]
490 for i, (j, f) in enumerate(visiblefctxs):
490 for i, (j, f) in enumerate(visiblefctxs):
491 editortext += _(b'HG: %s/%s %s %s\n') % (
491 editortext += _(b'HG: %s/%s %s %s\n') % (
492 b'|' * i,
492 b'|' * i,
493 b'-' * (len(visiblefctxs) - i + 1),
493 b'-' * (len(visiblefctxs) - i + 1),
494 short(f.node()),
494 short(f.node()),
495 f.description().split(b'\n', 1)[0],
495 f.description().split(b'\n', 1)[0],
496 )
496 )
497 editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
497 editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
498 # figure out the lifetime of a line, this is relatively inefficient,
498 # figure out the lifetime of a line, this is relatively inefficient,
499 # but probably fine
499 # but probably fine
500 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
500 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
501 for i, f in visiblefctxs:
501 for i, f in visiblefctxs:
502 self.linelog.annotate((i + 1) * 2)
502 self.linelog.annotate((i + 1) * 2)
503 for l in self.linelog.annotateresult:
503 for l in self.linelog.annotateresult:
504 lineset[l].add(i)
504 lineset[l].add(i)
505 # append lines
505 # append lines
506 for l in alllines:
506 for l in alllines:
507 editortext += b' %s : %s' % (
507 editortext += b' %s : %s' % (
508 b''.join(
508 b''.join(
509 [
509 [
510 (b'y' if i in lineset[l] else b' ')
510 (b'y' if i in lineset[l] else b' ')
511 for i, _f in visiblefctxs
511 for i, _f in visiblefctxs
512 ]
512 ]
513 ),
513 ),
514 self._getline(l),
514 self._getline(l),
515 )
515 )
516 # run editor
516 # run editor
517 editedtext = self.ui.edit(editortext, b'', action=b'absorb')
517 editedtext = self.ui.edit(editortext, b'', action=b'absorb')
518 if not editedtext:
518 if not editedtext:
519 raise error.InputError(_(b'empty editor text'))
519 raise error.InputError(_(b'empty editor text'))
520 # parse edited result
520 # parse edited result
521 contents = [b''] * len(self.fctxs)
521 contents = [b''] * len(self.fctxs)
522 leftpadpos = 4
522 leftpadpos = 4
523 colonpos = leftpadpos + len(visiblefctxs) + 1
523 colonpos = leftpadpos + len(visiblefctxs) + 1
524 for l in mdiff.splitnewlines(editedtext):
524 for l in mdiff.splitnewlines(editedtext):
525 if l.startswith(b'HG:'):
525 if l.startswith(b'HG:'):
526 continue
526 continue
527 if l[colonpos - 1 : colonpos + 2] != b' : ':
527 if l[colonpos - 1 : colonpos + 2] != b' : ':
528 raise error.InputError(_(b'malformed line: %s') % l)
528 raise error.InputError(_(b'malformed line: %s') % l)
529 linecontent = l[colonpos + 2 :]
529 linecontent = l[colonpos + 2 :]
530 for i, ch in enumerate(
530 for i, ch in enumerate(
531 pycompat.bytestr(l[leftpadpos : colonpos - 1])
531 pycompat.bytestr(l[leftpadpos : colonpos - 1])
532 ):
532 ):
533 if ch == b'y':
533 if ch == b'y':
534 contents[visiblefctxs[i][0]] += linecontent
534 contents[visiblefctxs[i][0]] += linecontent
535 # chunkstats is hard to calculate if anything changes, therefore
535 # chunkstats is hard to calculate if anything changes, therefore
536 # set them to just a simple value (1, 1).
536 # set them to just a simple value (1, 1).
537 if editedtext != editortext:
537 if editedtext != editortext:
538 self.chunkstats = [1, 1]
538 self.chunkstats = [1, 1]
539 return contents
539 return contents
540
540
541 def _getline(self, lineinfo):
541 def _getline(self, lineinfo):
542 """((rev, linenum)) -> str. convert rev+line number to line content"""
542 """((rev, linenum)) -> str. convert rev+line number to line content"""
543 rev, linenum = lineinfo
543 rev, linenum = lineinfo
544 if rev & 1: # odd: original line taken from fctxs
544 if rev & 1: # odd: original line taken from fctxs
545 return self.contentlines[rev // 2][linenum]
545 return self.contentlines[rev // 2][linenum]
546 else: # even: fixup line from targetfctx
546 else: # even: fixup line from targetfctx
547 return self.targetlines[linenum]
547 return self.targetlines[linenum]
548
548
549 def _iscontinuous(self, a1, a2, closedinterval=False):
549 def _iscontinuous(self, a1, a2, closedinterval=False):
550 """(a1, a2 : int) -> bool
550 """(a1, a2 : int) -> bool
551
551
552 check if these lines are continuous. i.e. no other insertions or
552 check if these lines are continuous. i.e. no other insertions or
553 deletions (from other revisions) among these lines.
553 deletions (from other revisions) among these lines.
554
554
555 closedinterval decides whether a2 should be included or not. i.e. is
555 closedinterval decides whether a2 should be included or not. i.e. is
556 it [a1, a2), or [a1, a2] ?
556 it [a1, a2), or [a1, a2] ?
557 """
557 """
558 if a1 >= a2:
558 if a1 >= a2:
559 return True
559 return True
560 llog = self.linelog
560 llog = self.linelog
561 offset1 = llog.getoffset(a1)
561 offset1 = llog.getoffset(a1)
562 offset2 = llog.getoffset(a2) + int(closedinterval)
562 offset2 = llog.getoffset(a2) + int(closedinterval)
563 linesinbetween = llog.getalllines(offset1, offset2)
563 linesinbetween = llog.getalllines(offset1, offset2)
564 return len(linesinbetween) == a2 - a1 + int(closedinterval)
564 return len(linesinbetween) == a2 - a1 + int(closedinterval)
565
565
566 def _optimizefixups(self, fixups):
566 def _optimizefixups(self, fixups):
567 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
567 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
568 merge adjacent fixups to make them less fragmented.
568 merge adjacent fixups to make them less fragmented.
569 """
569 """
570 result = []
570 result = []
571 pcurrentchunk = [[-1, -1, -1, -1, -1]]
571 pcurrentchunk = [[-1, -1, -1, -1, -1]]
572
572
573 def pushchunk():
573 def pushchunk():
574 if pcurrentchunk[0][0] != -1:
574 if pcurrentchunk[0][0] != -1:
575 result.append(tuple(pcurrentchunk[0]))
575 result.append(tuple(pcurrentchunk[0]))
576
576
577 for i, chunk in enumerate(fixups):
577 for i, chunk in enumerate(fixups):
578 rev, a1, a2, b1, b2 = chunk
578 rev, a1, a2, b1, b2 = chunk
579 lastrev = pcurrentchunk[0][0]
579 lastrev = pcurrentchunk[0][0]
580 lasta2 = pcurrentchunk[0][2]
580 lasta2 = pcurrentchunk[0][2]
581 lastb2 = pcurrentchunk[0][4]
581 lastb2 = pcurrentchunk[0][4]
582 if (
582 if (
583 a1 == lasta2
583 a1 == lasta2
584 and b1 == lastb2
584 and b1 == lastb2
585 and rev == lastrev
585 and rev == lastrev
586 and self._iscontinuous(max(a1 - 1, 0), a1)
586 and self._iscontinuous(max(a1 - 1, 0), a1)
587 ):
587 ):
588 # merge into currentchunk
588 # merge into currentchunk
589 pcurrentchunk[0][2] = a2
589 pcurrentchunk[0][2] = a2
590 pcurrentchunk[0][4] = b2
590 pcurrentchunk[0][4] = b2
591 else:
591 else:
592 pushchunk()
592 pushchunk()
593 pcurrentchunk[0] = list(chunk)
593 pcurrentchunk[0] = list(chunk)
594 pushchunk()
594 pushchunk()
595 return result
595 return result
596
596
597 def _showchanges(self, fm, alines, blines, chunk, fixups):
597 def _showchanges(self, fm, alines, blines, chunk, fixups):
598 def trim(line):
598 def trim(line):
599 if line.endswith(b'\n'):
599 if line.endswith(b'\n'):
600 line = line[:-1]
600 line = line[:-1]
601 return line
601 return line
602
602
603 # this is not optimized for perf but _showchanges only gets executed
603 # this is not optimized for perf but _showchanges only gets executed
604 # with an extra command-line flag.
604 # with an extra command-line flag.
605 a1, a2, b1, b2 = chunk
605 a1, a2, b1, b2 = chunk
606 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
606 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
607 for idx, fa1, fa2, fb1, fb2 in fixups:
607 for idx, fa1, fa2, fb1, fb2 in fixups:
608 for i in pycompat.xrange(fa1, fa2):
608 for i in pycompat.xrange(fa1, fa2):
609 aidxs[i - a1] = (max(idx, 1) - 1) // 2
609 aidxs[i - a1] = (max(idx, 1) - 1) // 2
610 for i in pycompat.xrange(fb1, fb2):
610 for i in pycompat.xrange(fb1, fb2):
611 bidxs[i - b1] = (max(idx, 1) - 1) // 2
611 bidxs[i - b1] = (max(idx, 1) - 1) // 2
612
612
613 fm.startitem()
613 fm.startitem()
614 fm.write(
614 fm.write(
615 b'hunk',
615 b'hunk',
616 b' %s\n',
616 b' %s\n',
617 b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
617 b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
618 label=b'diff.hunk',
618 label=b'diff.hunk',
619 )
619 )
620 fm.data(path=self.path, linetype=b'hunk')
620 fm.data(path=self.path, linetype=b'hunk')
621
621
622 def writeline(idx, diffchar, line, linetype, linelabel):
622 def writeline(idx, diffchar, line, linetype, linelabel):
623 fm.startitem()
623 fm.startitem()
624 node = b''
624 node = b''
625 if idx:
625 if idx:
626 ctx = self.fctxs[idx]
626 ctx = self.fctxs[idx]
627 fm.context(fctx=ctx)
627 fm.context(fctx=ctx)
628 node = ctx.hex()
628 node = ctx.hex()
629 self.ctxaffected.add(ctx.changectx())
629 self.ctxaffected.add(ctx.changectx())
630 fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node')
630 fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node')
631 fm.write(
631 fm.write(
632 b'diffchar ' + linetype,
632 b'diffchar ' + linetype,
633 b'%s%s\n',
633 b'%s%s\n',
634 diffchar,
634 diffchar,
635 line,
635 line,
636 label=linelabel,
636 label=linelabel,
637 )
637 )
638 fm.data(path=self.path, linetype=linetype)
638 fm.data(path=self.path, linetype=linetype)
639
639
640 for i in pycompat.xrange(a1, a2):
640 for i in pycompat.xrange(a1, a2):
641 writeline(
641 writeline(
642 aidxs[i - a1],
642 aidxs[i - a1],
643 b'-',
643 b'-',
644 trim(alines[i]),
644 trim(alines[i]),
645 b'deleted',
645 b'deleted',
646 b'diff.deleted',
646 b'diff.deleted',
647 )
647 )
648 for i in pycompat.xrange(b1, b2):
648 for i in pycompat.xrange(b1, b2):
649 writeline(
649 writeline(
650 bidxs[i - b1],
650 bidxs[i - b1],
651 b'+',
651 b'+',
652 trim(blines[i]),
652 trim(blines[i]),
653 b'inserted',
653 b'inserted',
654 b'diff.inserted',
654 b'diff.inserted',
655 )
655 )
656
656
657
657
658 class fixupstate(object):
658 class fixupstate(object):
659 """state needed to run absorb
659 """state needed to run absorb
660
660
661 internally, it keeps paths and filefixupstates.
661 internally, it keeps paths and filefixupstates.
662
662
663 a typical use is like filefixupstates:
663 a typical use is like filefixupstates:
664
664
665 1. call diffwith, to calculate fixups
665 1. call diffwith, to calculate fixups
666 2. (optionally), present fixups to the user, or edit fixups
666 2. (optionally), present fixups to the user, or edit fixups
667 3. call apply, to apply changes to memory
667 3. call apply, to apply changes to memory
668 4. call commit, to commit changes to hg database
668 4. call commit, to commit changes to hg database
669 """
669 """
670
670
671 def __init__(self, stack, ui=None, opts=None):
671 def __init__(self, stack, ui=None, opts=None):
672 """([ctx], ui or None) -> None
672 """([ctx], ui or None) -> None
673
673
674 stack: should be linear, and sorted by topo order - oldest first.
674 stack: should be linear, and sorted by topo order - oldest first.
675 all commits in stack are considered mutable.
675 all commits in stack are considered mutable.
676 """
676 """
677 assert stack
677 assert stack
678 self.ui = ui or nullui()
678 self.ui = ui or nullui()
679 self.opts = opts or {}
679 self.opts = opts or {}
680 self.stack = stack
680 self.stack = stack
681 self.repo = stack[-1].repo().unfiltered()
681 self.repo = stack[-1].repo().unfiltered()
682
682
683 # following fields will be filled later
683 # following fields will be filled later
684 self.paths = [] # [str]
684 self.paths = [] # [str]
685 self.status = None # ctx.status output
685 self.status = None # ctx.status output
686 self.fctxmap = {} # {path: {ctx: fctx}}
686 self.fctxmap = {} # {path: {ctx: fctx}}
687 self.fixupmap = {} # {path: filefixupstate}
687 self.fixupmap = {} # {path: filefixupstate}
688 self.replacemap = {} # {oldnode: newnode or None}
688 self.replacemap = {} # {oldnode: newnode or None}
689 self.finalnode = None # head after all fixups
689 self.finalnode = None # head after all fixups
690 self.ctxaffected = set() # ctx that will be absorbed into
690 self.ctxaffected = set() # ctx that will be absorbed into
691
691
692 def diffwith(self, targetctx, match=None, fm=None):
692 def diffwith(self, targetctx, match=None, fm=None):
693 """diff and prepare fixups. update self.fixupmap, self.paths"""
693 """diff and prepare fixups. update self.fixupmap, self.paths"""
694 # only care about modified files
694 # only care about modified files
695 self.status = self.stack[-1].status(targetctx, match)
695 self.status = self.stack[-1].status(targetctx, match)
696 self.paths = []
696 self.paths = []
697 # but if --edit-lines is used, the user may want to edit files
697 # but if --edit-lines is used, the user may want to edit files
698 # even if they are not modified
698 # even if they are not modified
699 editopt = self.opts.get(b'edit_lines')
699 editopt = self.opts.get(b'edit_lines')
700 if not self.status.modified and editopt and match:
700 if not self.status.modified and editopt and match:
701 interestingpaths = match.files()
701 interestingpaths = match.files()
702 else:
702 else:
703 interestingpaths = self.status.modified
703 interestingpaths = self.status.modified
704 # prepare the filefixupstate
704 # prepare the filefixupstate
705 seenfctxs = set()
705 seenfctxs = set()
706 # sorting is necessary to eliminate ambiguity for the "double move"
706 # sorting is necessary to eliminate ambiguity for the "double move"
707 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
707 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
708 for path in sorted(interestingpaths):
708 for path in sorted(interestingpaths):
709 self.ui.debug(b'calculating fixups for %s\n' % path)
709 self.ui.debug(b'calculating fixups for %s\n' % path)
710 targetfctx = targetctx[path]
710 targetfctx = targetctx[path]
711 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
711 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
712 # ignore symbolic links or binary, or unchanged files
712 # ignore symbolic links or binary, or unchanged files
713 if any(
713 if any(
714 f.islink() or stringutil.binary(f.data())
714 f.islink() or stringutil.binary(f.data())
715 for f in [targetfctx] + fctxs
715 for f in [targetfctx] + fctxs
716 if not isinstance(f, emptyfilecontext)
716 if not isinstance(f, emptyfilecontext)
717 ):
717 ):
718 continue
718 continue
719 if targetfctx.data() == fctxs[-1].data() and not editopt:
719 if targetfctx.data() == fctxs[-1].data() and not editopt:
720 continue
720 continue
721 seenfctxs.update(fctxs[1:])
721 seenfctxs.update(fctxs[1:])
722 self.fctxmap[path] = ctx2fctx
722 self.fctxmap[path] = ctx2fctx
723 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
723 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
724 if fm is not None:
724 if fm is not None:
725 fm.startitem()
725 fm.startitem()
726 fm.plain(b'showing changes for ')
726 fm.plain(b'showing changes for ')
727 fm.write(b'path', b'%s\n', path, label=b'absorb.path')
727 fm.write(b'path', b'%s\n', path, label=b'absorb.path')
728 fm.data(linetype=b'path')
728 fm.data(linetype=b'path')
729 fstate.diffwith(targetfctx, fm)
729 fstate.diffwith(targetfctx, fm)
730 self.fixupmap[path] = fstate
730 self.fixupmap[path] = fstate
731 self.paths.append(path)
731 self.paths.append(path)
732 self.ctxaffected.update(fstate.ctxaffected)
732 self.ctxaffected.update(fstate.ctxaffected)
733
733
734 def apply(self):
734 def apply(self):
735 """apply fixups to individual filefixupstates"""
735 """apply fixups to individual filefixupstates"""
736 for path, state in pycompat.iteritems(self.fixupmap):
736 for path, state in self.fixupmap.items():
737 if self.ui.debugflag:
737 if self.ui.debugflag:
738 self.ui.write(_(b'applying fixups to %s\n') % path)
738 self.ui.write(_(b'applying fixups to %s\n') % path)
739 state.apply()
739 state.apply()
740
740
741 @property
741 @property
742 def chunkstats(self):
742 def chunkstats(self):
743 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
743 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
744 return {
744 return {path: state.chunkstats for path, state in self.fixupmap.items()}
745 path: state.chunkstats
746 for path, state in pycompat.iteritems(self.fixupmap)
747 }
748
745
749 def commit(self):
746 def commit(self):
750 """commit changes. update self.finalnode, self.replacemap"""
747 """commit changes. update self.finalnode, self.replacemap"""
751 with self.repo.transaction(b'absorb') as tr:
748 with self.repo.transaction(b'absorb') as tr:
752 self._commitstack()
749 self._commitstack()
753 self._movebookmarks(tr)
750 self._movebookmarks(tr)
754 if self.repo[b'.'].node() in self.replacemap:
751 if self.repo[b'.'].node() in self.replacemap:
755 self._moveworkingdirectoryparent()
752 self._moveworkingdirectoryparent()
756 self._cleanupoldcommits()
753 self._cleanupoldcommits()
757 return self.finalnode
754 return self.finalnode
758
755
759 def printchunkstats(self):
756 def printchunkstats(self):
760 """print things like '1 of 2 chunk(s) applied'"""
757 """print things like '1 of 2 chunk(s) applied'"""
761 ui = self.ui
758 ui = self.ui
762 chunkstats = self.chunkstats
759 chunkstats = self.chunkstats
763 if ui.verbose:
760 if ui.verbose:
764 # chunkstats for each file
761 # chunkstats for each file
765 for path, stat in pycompat.iteritems(chunkstats):
762 for path, stat in chunkstats.items():
766 if stat[0]:
763 if stat[0]:
767 ui.write(
764 ui.write(
768 _(b'%s: %d of %d chunk(s) applied\n')
765 _(b'%s: %d of %d chunk(s) applied\n')
769 % (path, stat[0], stat[1])
766 % (path, stat[0], stat[1])
770 )
767 )
771 elif not ui.quiet:
768 elif not ui.quiet:
772 # a summary for all files
769 # a summary for all files
773 stats = chunkstats.values()
770 stats = chunkstats.values()
774 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
771 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
775 ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total))
772 ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total))
776
773
777 def _commitstack(self):
774 def _commitstack(self):
778 """make new commits. update self.finalnode, self.replacemap.
775 """make new commits. update self.finalnode, self.replacemap.
779 it is splitted from "commit" to avoid too much indentation.
776 it is splitted from "commit" to avoid too much indentation.
780 """
777 """
781 # last node (20-char) committed by us
778 # last node (20-char) committed by us
782 lastcommitted = None
779 lastcommitted = None
783 # p1 which overrides the parent of the next commit, "None" means use
780 # p1 which overrides the parent of the next commit, "None" means use
784 # the original parent unchanged
781 # the original parent unchanged
785 nextp1 = None
782 nextp1 = None
786 for ctx in self.stack:
783 for ctx in self.stack:
787 memworkingcopy = self._getnewfilecontents(ctx)
784 memworkingcopy = self._getnewfilecontents(ctx)
788 if not memworkingcopy and not lastcommitted:
785 if not memworkingcopy and not lastcommitted:
789 # nothing changed, nothing commited
786 # nothing changed, nothing commited
790 nextp1 = ctx
787 nextp1 = ctx
791 continue
788 continue
792 willbecomenoop = ctx.files() and self._willbecomenoop(
789 willbecomenoop = ctx.files() and self._willbecomenoop(
793 memworkingcopy, ctx, nextp1
790 memworkingcopy, ctx, nextp1
794 )
791 )
795 if self.skip_empty_successor and willbecomenoop:
792 if self.skip_empty_successor and willbecomenoop:
796 # changeset is no longer necessary
793 # changeset is no longer necessary
797 self.replacemap[ctx.node()] = None
794 self.replacemap[ctx.node()] = None
798 msg = _(b'became empty and was dropped')
795 msg = _(b'became empty and was dropped')
799 else:
796 else:
800 # changeset needs re-commit
797 # changeset needs re-commit
801 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
798 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
802 lastcommitted = self.repo[nodestr]
799 lastcommitted = self.repo[nodestr]
803 nextp1 = lastcommitted
800 nextp1 = lastcommitted
804 self.replacemap[ctx.node()] = lastcommitted.node()
801 self.replacemap[ctx.node()] = lastcommitted.node()
805 if memworkingcopy:
802 if memworkingcopy:
806 if willbecomenoop:
803 if willbecomenoop:
807 msg = _(b'%d file(s) changed, became empty as %s')
804 msg = _(b'%d file(s) changed, became empty as %s')
808 else:
805 else:
809 msg = _(b'%d file(s) changed, became %s')
806 msg = _(b'%d file(s) changed, became %s')
810 msg = msg % (
807 msg = msg % (
811 len(memworkingcopy),
808 len(memworkingcopy),
812 self._ctx2str(lastcommitted),
809 self._ctx2str(lastcommitted),
813 )
810 )
814 else:
811 else:
815 msg = _(b'became %s') % self._ctx2str(lastcommitted)
812 msg = _(b'became %s') % self._ctx2str(lastcommitted)
816 if self.ui.verbose and msg:
813 if self.ui.verbose and msg:
817 self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg))
814 self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg))
818 self.finalnode = lastcommitted and lastcommitted.node()
815 self.finalnode = lastcommitted and lastcommitted.node()
819
816
820 def _ctx2str(self, ctx):
817 def _ctx2str(self, ctx):
821 if self.ui.debugflag:
818 if self.ui.debugflag:
822 return b'%d:%s' % (ctx.rev(), ctx.hex())
819 return b'%d:%s' % (ctx.rev(), ctx.hex())
823 else:
820 else:
824 return b'%d:%s' % (ctx.rev(), short(ctx.node()))
821 return b'%d:%s' % (ctx.rev(), short(ctx.node()))
825
822
826 def _getnewfilecontents(self, ctx):
823 def _getnewfilecontents(self, ctx):
827 """(ctx) -> {path: str}
824 """(ctx) -> {path: str}
828
825
829 fetch file contents from filefixupstates.
826 fetch file contents from filefixupstates.
830 return the working copy overrides - files different from ctx.
827 return the working copy overrides - files different from ctx.
831 """
828 """
832 result = {}
829 result = {}
833 for path in self.paths:
830 for path in self.paths:
834 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
831 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
835 if ctx not in ctx2fctx:
832 if ctx not in ctx2fctx:
836 continue
833 continue
837 fctx = ctx2fctx[ctx]
834 fctx = ctx2fctx[ctx]
838 content = fctx.data()
835 content = fctx.data()
839 newcontent = self.fixupmap[path].getfinalcontent(fctx)
836 newcontent = self.fixupmap[path].getfinalcontent(fctx)
840 if content != newcontent:
837 if content != newcontent:
841 result[fctx.path()] = newcontent
838 result[fctx.path()] = newcontent
842 return result
839 return result
843
840
844 def _movebookmarks(self, tr):
841 def _movebookmarks(self, tr):
845 repo = self.repo
842 repo = self.repo
846 needupdate = [
843 needupdate = [
847 (name, self.replacemap[hsh])
844 (name, self.replacemap[hsh])
848 for name, hsh in pycompat.iteritems(repo._bookmarks)
845 for name, hsh in repo._bookmarks.items()
849 if hsh in self.replacemap
846 if hsh in self.replacemap
850 ]
847 ]
851 changes = []
848 changes = []
852 for name, hsh in needupdate:
849 for name, hsh in needupdate:
853 if hsh:
850 if hsh:
854 changes.append((name, hsh))
851 changes.append((name, hsh))
855 if self.ui.verbose:
852 if self.ui.verbose:
856 self.ui.write(
853 self.ui.write(
857 _(b'moving bookmark %s to %s\n') % (name, hex(hsh))
854 _(b'moving bookmark %s to %s\n') % (name, hex(hsh))
858 )
855 )
859 else:
856 else:
860 changes.append((name, None))
857 changes.append((name, None))
861 if self.ui.verbose:
858 if self.ui.verbose:
862 self.ui.write(_(b'deleting bookmark %s\n') % name)
859 self.ui.write(_(b'deleting bookmark %s\n') % name)
863 repo._bookmarks.applychanges(repo, tr, changes)
860 repo._bookmarks.applychanges(repo, tr, changes)
864
861
865 def _moveworkingdirectoryparent(self):
862 def _moveworkingdirectoryparent(self):
866 if not self.finalnode:
863 if not self.finalnode:
867 # Find the latest not-{obsoleted,stripped} parent.
864 # Find the latest not-{obsoleted,stripped} parent.
868 revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys())
865 revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys())
869 ctx = self.repo[revs.first()]
866 ctx = self.repo[revs.first()]
870 self.finalnode = ctx.node()
867 self.finalnode = ctx.node()
871 else:
868 else:
872 ctx = self.repo[self.finalnode]
869 ctx = self.repo[self.finalnode]
873
870
874 dirstate = self.repo.dirstate
871 dirstate = self.repo.dirstate
875 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
872 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
876 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
873 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
877 noop = lambda: 0
874 noop = lambda: 0
878 restore = noop
875 restore = noop
879 if util.safehasattr(dirstate, '_fsmonitorstate'):
876 if util.safehasattr(dirstate, '_fsmonitorstate'):
880 bak = dirstate._fsmonitorstate.invalidate
877 bak = dirstate._fsmonitorstate.invalidate
881
878
882 def restore():
879 def restore():
883 dirstate._fsmonitorstate.invalidate = bak
880 dirstate._fsmonitorstate.invalidate = bak
884
881
885 dirstate._fsmonitorstate.invalidate = noop
882 dirstate._fsmonitorstate.invalidate = noop
886 try:
883 try:
887 with dirstate.parentchange():
884 with dirstate.parentchange():
888 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
885 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
889 finally:
886 finally:
890 restore()
887 restore()
891
888
892 @staticmethod
889 @staticmethod
893 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
890 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
894 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
891 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
895
892
896 if it will become an empty commit (does not change anything, after the
893 if it will become an empty commit (does not change anything, after the
897 memworkingcopy overrides), return True. otherwise return False.
894 memworkingcopy overrides), return True. otherwise return False.
898 """
895 """
899 if not pctx:
896 if not pctx:
900 parents = ctx.parents()
897 parents = ctx.parents()
901 if len(parents) != 1:
898 if len(parents) != 1:
902 return False
899 return False
903 pctx = parents[0]
900 pctx = parents[0]
904 if ctx.branch() != pctx.branch():
901 if ctx.branch() != pctx.branch():
905 return False
902 return False
906 if ctx.extra().get(b'close'):
903 if ctx.extra().get(b'close'):
907 return False
904 return False
908 # ctx changes more files (not a subset of memworkingcopy)
905 # ctx changes more files (not a subset of memworkingcopy)
909 if not set(ctx.files()).issubset(set(memworkingcopy)):
906 if not set(ctx.files()).issubset(set(memworkingcopy)):
910 return False
907 return False
911 for path, content in pycompat.iteritems(memworkingcopy):
908 for path, content in memworkingcopy.items():
912 if path not in pctx or path not in ctx:
909 if path not in pctx or path not in ctx:
913 return False
910 return False
914 fctx = ctx[path]
911 fctx = ctx[path]
915 pfctx = pctx[path]
912 pfctx = pctx[path]
916 if pfctx.flags() != fctx.flags():
913 if pfctx.flags() != fctx.flags():
917 return False
914 return False
918 if pfctx.data() != content:
915 if pfctx.data() != content:
919 return False
916 return False
920 return True
917 return True
921
918
922 def _commitsingle(self, memworkingcopy, ctx, p1=None):
919 def _commitsingle(self, memworkingcopy, ctx, p1=None):
923 """(ctx, {path: content}, node) -> node. make a single commit
920 """(ctx, {path: content}, node) -> node. make a single commit
924
921
925 the commit is a clone from ctx, with a (optionally) different p1, and
922 the commit is a clone from ctx, with a (optionally) different p1, and
926 different file contents replaced by memworkingcopy.
923 different file contents replaced by memworkingcopy.
927 """
924 """
928 parents = p1 and (p1, self.repo.nullid)
925 parents = p1 and (p1, self.repo.nullid)
929 extra = ctx.extra()
926 extra = ctx.extra()
930 if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
927 if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
931 extra[b'absorb_source'] = ctx.hex()
928 extra[b'absorb_source'] = ctx.hex()
932
929
933 desc = rewriteutil.update_hash_refs(
930 desc = rewriteutil.update_hash_refs(
934 ctx.repo(),
931 ctx.repo(),
935 ctx.description(),
932 ctx.description(),
936 {
933 {
937 oldnode: [newnode]
934 oldnode: [newnode]
938 for oldnode, newnode in self.replacemap.items()
935 for oldnode, newnode in self.replacemap.items()
939 },
936 },
940 )
937 )
941 mctx = overlaycontext(
938 mctx = overlaycontext(
942 memworkingcopy, ctx, parents, extra=extra, desc=desc
939 memworkingcopy, ctx, parents, extra=extra, desc=desc
943 )
940 )
944 return mctx.commit()
941 return mctx.commit()
945
942
946 @util.propertycache
943 @util.propertycache
947 def _useobsolete(self):
944 def _useobsolete(self):
948 """() -> bool"""
945 """() -> bool"""
949 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
946 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
950
947
951 def _cleanupoldcommits(self):
948 def _cleanupoldcommits(self):
952 replacements = {
949 replacements = {
953 k: ([v] if v is not None else [])
950 k: ([v] if v is not None else [])
954 for k, v in pycompat.iteritems(self.replacemap)
951 for k, v in self.replacemap.items()
955 }
952 }
956 if replacements:
953 if replacements:
957 scmutil.cleanupnodes(
954 scmutil.cleanupnodes(
958 self.repo, replacements, operation=b'absorb', fixphase=True
955 self.repo, replacements, operation=b'absorb', fixphase=True
959 )
956 )
960
957
961 @util.propertycache
958 @util.propertycache
962 def skip_empty_successor(self):
959 def skip_empty_successor(self):
963 return rewriteutil.skip_empty_successor(self.ui, b'absorb')
960 return rewriteutil.skip_empty_successor(self.ui, b'absorb')
964
961
965
962
966 def _parsechunk(hunk):
963 def _parsechunk(hunk):
967 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
964 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
968 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
965 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
969 return None, None
966 return None, None
970 path = hunk.header.filename()
967 path = hunk.header.filename()
971 a1 = hunk.fromline + len(hunk.before) - 1
968 a1 = hunk.fromline + len(hunk.before) - 1
972 # remove before and after context
969 # remove before and after context
973 hunk.before = hunk.after = []
970 hunk.before = hunk.after = []
974 buf = util.stringio()
971 buf = util.stringio()
975 hunk.write(buf)
972 hunk.write(buf)
976 patchlines = mdiff.splitnewlines(buf.getvalue())
973 patchlines = mdiff.splitnewlines(buf.getvalue())
977 # hunk.prettystr() will update hunk.removed
974 # hunk.prettystr() will update hunk.removed
978 a2 = a1 + hunk.removed
975 a2 = a1 + hunk.removed
979 blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')]
976 blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')]
980 return path, (a1, a2, blines)
977 return path, (a1, a2, blines)
981
978
982
979
983 def overlaydiffcontext(ctx, chunks):
980 def overlaydiffcontext(ctx, chunks):
984 """(ctx, [crecord.uihunk]) -> memctx
981 """(ctx, [crecord.uihunk]) -> memctx
985
982
986 return a memctx with some [1] patches (chunks) applied to ctx.
983 return a memctx with some [1] patches (chunks) applied to ctx.
987 [1]: modifications are handled. renames, mode changes, etc. are ignored.
984 [1]: modifications are handled. renames, mode changes, etc. are ignored.
988 """
985 """
989 # sadly the applying-patch logic is hardly reusable, and messy:
986 # sadly the applying-patch logic is hardly reusable, and messy:
990 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
987 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
991 # needs a file stream of a patch and will re-parse it, while we have
988 # needs a file stream of a patch and will re-parse it, while we have
992 # structured hunk objects at hand.
989 # structured hunk objects at hand.
993 # 2. a lot of different implementations about "chunk" (patch.hunk,
990 # 2. a lot of different implementations about "chunk" (patch.hunk,
994 # patch.recordhunk, crecord.uihunk)
991 # patch.recordhunk, crecord.uihunk)
995 # as we only care about applying changes to modified files, no mode
992 # as we only care about applying changes to modified files, no mode
996 # change, no binary diff, and no renames, it's probably okay to
993 # change, no binary diff, and no renames, it's probably okay to
997 # re-invent the logic using much simpler code here.
994 # re-invent the logic using much simpler code here.
998 memworkingcopy = {} # {path: content}
995 memworkingcopy = {} # {path: content}
999 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
996 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
1000 for path, info in map(_parsechunk, chunks):
997 for path, info in map(_parsechunk, chunks):
1001 if not path or not info:
998 if not path or not info:
1002 continue
999 continue
1003 patchmap[path].append(info)
1000 patchmap[path].append(info)
1004 for path, patches in pycompat.iteritems(patchmap):
1001 for path, patches in patchmap.items():
1005 if path not in ctx or not patches:
1002 if path not in ctx or not patches:
1006 continue
1003 continue
1007 patches.sort(reverse=True)
1004 patches.sort(reverse=True)
1008 lines = mdiff.splitnewlines(ctx[path].data())
1005 lines = mdiff.splitnewlines(ctx[path].data())
1009 for a1, a2, blines in patches:
1006 for a1, a2, blines in patches:
1010 lines[a1:a2] = blines
1007 lines[a1:a2] = blines
1011 memworkingcopy[path] = b''.join(lines)
1008 memworkingcopy[path] = b''.join(lines)
1012 return overlaycontext(memworkingcopy, ctx)
1009 return overlaycontext(memworkingcopy, ctx)
1013
1010
1014
1011
1015 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
1012 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
1016 """pick fixup chunks from targetctx, apply them to stack.
1013 """pick fixup chunks from targetctx, apply them to stack.
1017
1014
1018 if targetctx is None, the working copy context will be used.
1015 if targetctx is None, the working copy context will be used.
1019 if stack is None, the current draft stack will be used.
1016 if stack is None, the current draft stack will be used.
1020 return fixupstate.
1017 return fixupstate.
1021 """
1018 """
1022 if stack is None:
1019 if stack is None:
1023 limit = ui.configint(b'absorb', b'max-stack-size')
1020 limit = ui.configint(b'absorb', b'max-stack-size')
1024 headctx = repo[b'.']
1021 headctx = repo[b'.']
1025 if len(headctx.parents()) > 1:
1022 if len(headctx.parents()) > 1:
1026 raise error.InputError(_(b'cannot absorb into a merge'))
1023 raise error.InputError(_(b'cannot absorb into a merge'))
1027 stack = getdraftstack(headctx, limit)
1024 stack = getdraftstack(headctx, limit)
1028 if limit and len(stack) >= limit:
1025 if limit and len(stack) >= limit:
1029 ui.warn(
1026 ui.warn(
1030 _(
1027 _(
1031 b'absorb: only the recent %d changesets will '
1028 b'absorb: only the recent %d changesets will '
1032 b'be analysed\n'
1029 b'be analysed\n'
1033 )
1030 )
1034 % limit
1031 % limit
1035 )
1032 )
1036 if not stack:
1033 if not stack:
1037 raise error.InputError(_(b'no mutable changeset to change'))
1034 raise error.InputError(_(b'no mutable changeset to change'))
1038 if targetctx is None: # default to working copy
1035 if targetctx is None: # default to working copy
1039 targetctx = repo[None]
1036 targetctx = repo[None]
1040 if pats is None:
1037 if pats is None:
1041 pats = ()
1038 pats = ()
1042 if opts is None:
1039 if opts is None:
1043 opts = {}
1040 opts = {}
1044 state = fixupstate(stack, ui=ui, opts=opts)
1041 state = fixupstate(stack, ui=ui, opts=opts)
1045 matcher = scmutil.match(targetctx, pats, opts)
1042 matcher = scmutil.match(targetctx, pats, opts)
1046 if opts.get(b'interactive'):
1043 if opts.get(b'interactive'):
1047 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
1044 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
1048 origchunks = patch.parsepatch(diff)
1045 origchunks = patch.parsepatch(diff)
1049 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1046 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1050 targetctx = overlaydiffcontext(stack[-1], chunks)
1047 targetctx = overlaydiffcontext(stack[-1], chunks)
1051 fm = None
1048 fm = None
1052 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1049 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1053 fm = ui.formatter(b'absorb', opts)
1050 fm = ui.formatter(b'absorb', opts)
1054 state.diffwith(targetctx, matcher, fm)
1051 state.diffwith(targetctx, matcher, fm)
1055 if fm is not None:
1052 if fm is not None:
1056 fm.startitem()
1053 fm.startitem()
1057 fm.write(
1054 fm.write(
1058 b"count", b"\n%d changesets affected\n", len(state.ctxaffected)
1055 b"count", b"\n%d changesets affected\n", len(state.ctxaffected)
1059 )
1056 )
1060 fm.data(linetype=b'summary')
1057 fm.data(linetype=b'summary')
1061 for ctx in reversed(stack):
1058 for ctx in reversed(stack):
1062 if ctx not in state.ctxaffected:
1059 if ctx not in state.ctxaffected:
1063 continue
1060 continue
1064 fm.startitem()
1061 fm.startitem()
1065 fm.context(ctx=ctx)
1062 fm.context(ctx=ctx)
1066 fm.data(linetype=b'changeset')
1063 fm.data(linetype=b'changeset')
1067 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1064 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1068 descfirstline = ctx.description().splitlines()[0]
1065 descfirstline = ctx.description().splitlines()[0]
1069 fm.write(
1066 fm.write(
1070 b'descfirstline',
1067 b'descfirstline',
1071 b'%s\n',
1068 b'%s\n',
1072 descfirstline,
1069 descfirstline,
1073 label=b'absorb.description',
1070 label=b'absorb.description',
1074 )
1071 )
1075 fm.end()
1072 fm.end()
1076 if not opts.get(b'dry_run'):
1073 if not opts.get(b'dry_run'):
1077 if (
1074 if (
1078 not opts.get(b'apply_changes')
1075 not opts.get(b'apply_changes')
1079 and state.ctxaffected
1076 and state.ctxaffected
1080 and ui.promptchoice(
1077 and ui.promptchoice(
1081 b"apply changes (y/N)? $$ &Yes $$ &No", default=1
1078 b"apply changes (y/N)? $$ &Yes $$ &No", default=1
1082 )
1079 )
1083 ):
1080 ):
1084 raise error.CanceledError(_(b'absorb cancelled\n'))
1081 raise error.CanceledError(_(b'absorb cancelled\n'))
1085
1082
1086 state.apply()
1083 state.apply()
1087 if state.commit():
1084 if state.commit():
1088 state.printchunkstats()
1085 state.printchunkstats()
1089 elif not ui.quiet:
1086 elif not ui.quiet:
1090 ui.write(_(b'nothing applied\n'))
1087 ui.write(_(b'nothing applied\n'))
1091 return state
1088 return state
1092
1089
1093
1090
1094 @command(
1091 @command(
1095 b'absorb',
1092 b'absorb',
1096 [
1093 [
1097 (
1094 (
1098 b'a',
1095 b'a',
1099 b'apply-changes',
1096 b'apply-changes',
1100 None,
1097 None,
1101 _(b'apply changes without prompting for confirmation'),
1098 _(b'apply changes without prompting for confirmation'),
1102 ),
1099 ),
1103 (
1100 (
1104 b'p',
1101 b'p',
1105 b'print-changes',
1102 b'print-changes',
1106 None,
1103 None,
1107 _(b'always print which changesets are modified by which changes'),
1104 _(b'always print which changesets are modified by which changes'),
1108 ),
1105 ),
1109 (
1106 (
1110 b'i',
1107 b'i',
1111 b'interactive',
1108 b'interactive',
1112 None,
1109 None,
1113 _(b'interactively select which chunks to apply'),
1110 _(b'interactively select which chunks to apply'),
1114 ),
1111 ),
1115 (
1112 (
1116 b'e',
1113 b'e',
1117 b'edit-lines',
1114 b'edit-lines',
1118 None,
1115 None,
1119 _(
1116 _(
1120 b'edit what lines belong to which changesets before commit '
1117 b'edit what lines belong to which changesets before commit '
1121 b'(EXPERIMENTAL)'
1118 b'(EXPERIMENTAL)'
1122 ),
1119 ),
1123 ),
1120 ),
1124 ]
1121 ]
1125 + commands.dryrunopts
1122 + commands.dryrunopts
1126 + commands.templateopts
1123 + commands.templateopts
1127 + commands.walkopts,
1124 + commands.walkopts,
1128 _(b'hg absorb [OPTION] [FILE]...'),
1125 _(b'hg absorb [OPTION] [FILE]...'),
1129 helpcategory=command.CATEGORY_COMMITTING,
1126 helpcategory=command.CATEGORY_COMMITTING,
1130 helpbasic=True,
1127 helpbasic=True,
1131 )
1128 )
1132 def absorbcmd(ui, repo, *pats, **opts):
1129 def absorbcmd(ui, repo, *pats, **opts):
1133 """incorporate corrections into the stack of draft changesets
1130 """incorporate corrections into the stack of draft changesets
1134
1131
1135 absorb analyzes each change in your working directory and attempts to
1132 absorb analyzes each change in your working directory and attempts to
1136 amend the changed lines into the changesets in your stack that first
1133 amend the changed lines into the changesets in your stack that first
1137 introduced those lines.
1134 introduced those lines.
1138
1135
1139 If absorb cannot find an unambiguous changeset to amend for a change,
1136 If absorb cannot find an unambiguous changeset to amend for a change,
1140 that change will be left in the working directory, untouched. They can be
1137 that change will be left in the working directory, untouched. They can be
1141 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1138 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1142 absorb does not write to the working directory.
1139 absorb does not write to the working directory.
1143
1140
1144 Changesets outside the revset `::. and not public() and not merge()` will
1141 Changesets outside the revset `::. and not public() and not merge()` will
1145 not be changed.
1142 not be changed.
1146
1143
1147 Changesets that become empty after applying the changes will be deleted.
1144 Changesets that become empty after applying the changes will be deleted.
1148
1145
1149 By default, absorb will show what it plans to do and prompt for
1146 By default, absorb will show what it plans to do and prompt for
1150 confirmation. If you are confident that the changes will be absorbed
1147 confirmation. If you are confident that the changes will be absorbed
1151 to the correct place, run :hg:`absorb -a` to apply the changes
1148 to the correct place, run :hg:`absorb -a` to apply the changes
1152 immediately.
1149 immediately.
1153
1150
1154 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1151 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1155 """
1152 """
1156 opts = pycompat.byteskwargs(opts)
1153 opts = pycompat.byteskwargs(opts)
1157
1154
1158 with repo.wlock(), repo.lock():
1155 with repo.wlock(), repo.lock():
1159 if not opts[b'dry_run']:
1156 if not opts[b'dry_run']:
1160 cmdutil.checkunfinished(repo)
1157 cmdutil.checkunfinished(repo)
1161
1158
1162 state = absorb(ui, repo, pats=pats, opts=opts)
1159 state = absorb(ui, repo, pats=pats, opts=opts)
1163 if sum(s[0] for s in state.chunkstats.values()) == 0:
1160 if sum(s[0] for s in state.chunkstats.values()) == 0:
1164 return 1
1161 return 1
@@ -1,338 +1,337 b''
1 # bzr.py - bzr support for the convert extension
1 # bzr.py - bzr support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This module is for handling Breezy imports or `brz`, but it's also compatible
8 # This module is for handling Breezy imports or `brz`, but it's also compatible
9 # with Bazaar or `bzr`, that was formerly known as Bazaar-NG;
9 # with Bazaar or `bzr`, that was formerly known as Bazaar-NG;
10 # it cannot access `bar` repositories, but they were never used very much.
10 # it cannot access `bar` repositories, but they were never used very much.
11
11
12 import os
12 import os
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 demandimport,
16 demandimport,
17 error,
17 error,
18 pycompat,
19 util,
18 util,
20 )
19 )
21 from . import common
20 from . import common
22
21
23
22
24 # these do not work with demandimport, blacklist
23 # these do not work with demandimport, blacklist
25 demandimport.IGNORES.update(
24 demandimport.IGNORES.update(
26 [
25 [
27 b'breezy.transactions',
26 b'breezy.transactions',
28 b'breezy.urlutils',
27 b'breezy.urlutils',
29 b'ElementPath',
28 b'ElementPath',
30 ]
29 ]
31 )
30 )
32
31
33 try:
32 try:
34 # bazaar imports
33 # bazaar imports
35 import breezy.bzr.bzrdir
34 import breezy.bzr.bzrdir
36 import breezy.errors
35 import breezy.errors
37 import breezy.revision
36 import breezy.revision
38 import breezy.revisionspec
37 import breezy.revisionspec
39
38
40 bzrdir = breezy.bzr.bzrdir
39 bzrdir = breezy.bzr.bzrdir
41 errors = breezy.errors
40 errors = breezy.errors
42 revision = breezy.revision
41 revision = breezy.revision
43 revisionspec = breezy.revisionspec
42 revisionspec = breezy.revisionspec
44 revisionspec.RevisionSpec
43 revisionspec.RevisionSpec
45 except ImportError:
44 except ImportError:
46 pass
45 pass
47
46
48 supportedkinds = ('file', 'symlink')
47 supportedkinds = ('file', 'symlink')
49
48
50
49
51 class bzr_source(common.converter_source):
50 class bzr_source(common.converter_source):
52 """Reads Bazaar repositories by using the Bazaar Python libraries"""
51 """Reads Bazaar repositories by using the Bazaar Python libraries"""
53
52
54 def __init__(self, ui, repotype, path, revs=None):
53 def __init__(self, ui, repotype, path, revs=None):
55 super(bzr_source, self).__init__(ui, repotype, path, revs=revs)
54 super(bzr_source, self).__init__(ui, repotype, path, revs=revs)
56
55
57 if not os.path.exists(os.path.join(path, b'.bzr')):
56 if not os.path.exists(os.path.join(path, b'.bzr')):
58 raise common.NoRepo(
57 raise common.NoRepo(
59 _(b'%s does not look like a Bazaar repository') % path
58 _(b'%s does not look like a Bazaar repository') % path
60 )
59 )
61
60
62 try:
61 try:
63 # access breezy stuff
62 # access breezy stuff
64 bzrdir
63 bzrdir
65 except NameError:
64 except NameError:
66 raise common.NoRepo(_(b'Bazaar modules could not be loaded'))
65 raise common.NoRepo(_(b'Bazaar modules could not be loaded'))
67
66
68 path = util.abspath(path)
67 path = util.abspath(path)
69 self._checkrepotype(path)
68 self._checkrepotype(path)
70 try:
69 try:
71 bzr_dir = bzrdir.BzrDir.open(path.decode())
70 bzr_dir = bzrdir.BzrDir.open(path.decode())
72 self.sourcerepo = bzr_dir.open_repository()
71 self.sourcerepo = bzr_dir.open_repository()
73 except errors.NoRepositoryPresent:
72 except errors.NoRepositoryPresent:
74 raise common.NoRepo(
73 raise common.NoRepo(
75 _(b'%s does not look like a Bazaar repository') % path
74 _(b'%s does not look like a Bazaar repository') % path
76 )
75 )
77 self._parentids = {}
76 self._parentids = {}
78 self._saverev = ui.configbool(b'convert', b'bzr.saverev')
77 self._saverev = ui.configbool(b'convert', b'bzr.saverev')
79
78
80 def _checkrepotype(self, path):
79 def _checkrepotype(self, path):
81 # Lightweight checkouts detection is informational but probably
80 # Lightweight checkouts detection is informational but probably
82 # fragile at API level. It should not terminate the conversion.
81 # fragile at API level. It should not terminate the conversion.
83 try:
82 try:
84 dir = bzrdir.BzrDir.open_containing(path.decode())[0]
83 dir = bzrdir.BzrDir.open_containing(path.decode())[0]
85 try:
84 try:
86 tree = dir.open_workingtree(recommend_upgrade=False)
85 tree = dir.open_workingtree(recommend_upgrade=False)
87 branch = tree.branch
86 branch = tree.branch
88 except (errors.NoWorkingTree, errors.NotLocalUrl):
87 except (errors.NoWorkingTree, errors.NotLocalUrl):
89 tree = None
88 tree = None
90 branch = dir.open_branch()
89 branch = dir.open_branch()
91 if (
90 if (
92 tree is not None
91 tree is not None
93 and tree.controldir.root_transport.base
92 and tree.controldir.root_transport.base
94 != branch.controldir.root_transport.base
93 != branch.controldir.root_transport.base
95 ):
94 ):
96 self.ui.warn(
95 self.ui.warn(
97 _(
96 _(
98 b'warning: lightweight checkouts may cause '
97 b'warning: lightweight checkouts may cause '
99 b'conversion failures, try with a regular '
98 b'conversion failures, try with a regular '
100 b'branch instead.\n'
99 b'branch instead.\n'
101 )
100 )
102 )
101 )
103 except Exception:
102 except Exception:
104 self.ui.note(_(b'bzr source type could not be determined\n'))
103 self.ui.note(_(b'bzr source type could not be determined\n'))
105
104
106 def before(self):
105 def before(self):
107 """Before the conversion begins, acquire a read lock
106 """Before the conversion begins, acquire a read lock
108 for all the operations that might need it. Fortunately
107 for all the operations that might need it. Fortunately
109 read locks don't block other reads or writes to the
108 read locks don't block other reads or writes to the
110 repository, so this shouldn't have any impact on the usage of
109 repository, so this shouldn't have any impact on the usage of
111 the source repository.
110 the source repository.
112
111
113 The alternative would be locking on every operation that
112 The alternative would be locking on every operation that
114 needs locks (there are currently two: getting the file and
113 needs locks (there are currently two: getting the file and
115 getting the parent map) and releasing immediately after,
114 getting the parent map) and releasing immediately after,
116 but this approach can take even 40% longer."""
115 but this approach can take even 40% longer."""
117 self.sourcerepo.lock_read()
116 self.sourcerepo.lock_read()
118
117
119 def after(self):
118 def after(self):
120 self.sourcerepo.unlock()
119 self.sourcerepo.unlock()
121
120
122 def _bzrbranches(self):
121 def _bzrbranches(self):
123 return self.sourcerepo.find_branches(using=True)
122 return self.sourcerepo.find_branches(using=True)
124
123
125 def getheads(self):
124 def getheads(self):
126 if not self.revs:
125 if not self.revs:
127 # Set using=True to avoid nested repositories (see issue3254)
126 # Set using=True to avoid nested repositories (see issue3254)
128 heads = sorted([b.last_revision() for b in self._bzrbranches()])
127 heads = sorted([b.last_revision() for b in self._bzrbranches()])
129 else:
128 else:
130 revid = None
129 revid = None
131 for branch in self._bzrbranches():
130 for branch in self._bzrbranches():
132 try:
131 try:
133 revspec = self.revs[0].decode()
132 revspec = self.revs[0].decode()
134 r = revisionspec.RevisionSpec.from_string(revspec)
133 r = revisionspec.RevisionSpec.from_string(revspec)
135 info = r.in_history(branch)
134 info = r.in_history(branch)
136 except errors.BzrError:
135 except errors.BzrError:
137 pass
136 pass
138 revid = info.rev_id
137 revid = info.rev_id
139 if revid is None:
138 if revid is None:
140 raise error.Abort(
139 raise error.Abort(
141 _(b'%s is not a valid revision') % self.revs[0]
140 _(b'%s is not a valid revision') % self.revs[0]
142 )
141 )
143 heads = [revid]
142 heads = [revid]
144 # Empty repositories return 'null:', which cannot be retrieved
143 # Empty repositories return 'null:', which cannot be retrieved
145 heads = [h for h in heads if h != b'null:']
144 heads = [h for h in heads if h != b'null:']
146 return heads
145 return heads
147
146
148 def getfile(self, name, rev):
147 def getfile(self, name, rev):
149 name = name.decode()
148 name = name.decode()
150 revtree = self.sourcerepo.revision_tree(rev)
149 revtree = self.sourcerepo.revision_tree(rev)
151
150
152 try:
151 try:
153 kind = revtree.kind(name)
152 kind = revtree.kind(name)
154 except breezy.errors.NoSuchFile:
153 except breezy.errors.NoSuchFile:
155 return None, None
154 return None, None
156 if kind not in supportedkinds:
155 if kind not in supportedkinds:
157 # the file is not available anymore - was deleted
156 # the file is not available anymore - was deleted
158 return None, None
157 return None, None
159 mode = self._modecache[(name.encode(), rev)]
158 mode = self._modecache[(name.encode(), rev)]
160 if kind == 'symlink':
159 if kind == 'symlink':
161 target = revtree.get_symlink_target(name)
160 target = revtree.get_symlink_target(name)
162 if target is None:
161 if target is None:
163 raise error.Abort(
162 raise error.Abort(
164 _(b'%s.%s symlink has no target') % (name, rev)
163 _(b'%s.%s symlink has no target') % (name, rev)
165 )
164 )
166 return target.encode(), mode
165 return target.encode(), mode
167 else:
166 else:
168 sio = revtree.get_file(name)
167 sio = revtree.get_file(name)
169 return sio.read(), mode
168 return sio.read(), mode
170
169
171 def getchanges(self, version, full):
170 def getchanges(self, version, full):
172 if full:
171 if full:
173 raise error.Abort(_(b"convert from cvs does not support --full"))
172 raise error.Abort(_(b"convert from cvs does not support --full"))
174 self._modecache = {}
173 self._modecache = {}
175 self._revtree = self.sourcerepo.revision_tree(version)
174 self._revtree = self.sourcerepo.revision_tree(version)
176 # get the parentids from the cache
175 # get the parentids from the cache
177 parentids = self._parentids.pop(version)
176 parentids = self._parentids.pop(version)
178 # only diff against first parent id
177 # only diff against first parent id
179 prevtree = self.sourcerepo.revision_tree(parentids[0])
178 prevtree = self.sourcerepo.revision_tree(parentids[0])
180 files, changes = self._gettreechanges(self._revtree, prevtree)
179 files, changes = self._gettreechanges(self._revtree, prevtree)
181 return files, changes, set()
180 return files, changes, set()
182
181
183 def getcommit(self, version):
182 def getcommit(self, version):
184 rev = self.sourcerepo.get_revision(version)
183 rev = self.sourcerepo.get_revision(version)
185 # populate parent id cache
184 # populate parent id cache
186 if not rev.parent_ids:
185 if not rev.parent_ids:
187 parents = []
186 parents = []
188 self._parentids[version] = (revision.NULL_REVISION,)
187 self._parentids[version] = (revision.NULL_REVISION,)
189 else:
188 else:
190 parents = self._filterghosts(rev.parent_ids)
189 parents = self._filterghosts(rev.parent_ids)
191 self._parentids[version] = parents
190 self._parentids[version] = parents
192
191
193 branch = rev.properties.get('branch-nick', 'default')
192 branch = rev.properties.get('branch-nick', 'default')
194 if branch == 'trunk':
193 if branch == 'trunk':
195 branch = 'default'
194 branch = 'default'
196 return common.commit(
195 return common.commit(
197 parents=parents,
196 parents=parents,
198 date=b'%d %d' % (rev.timestamp, -rev.timezone),
197 date=b'%d %d' % (rev.timestamp, -rev.timezone),
199 author=self.recode(rev.committer),
198 author=self.recode(rev.committer),
200 desc=self.recode(rev.message),
199 desc=self.recode(rev.message),
201 branch=branch.encode('utf8'),
200 branch=branch.encode('utf8'),
202 rev=version,
201 rev=version,
203 saverev=self._saverev,
202 saverev=self._saverev,
204 )
203 )
205
204
206 def gettags(self):
205 def gettags(self):
207 bytetags = {}
206 bytetags = {}
208 for branch in self._bzrbranches():
207 for branch in self._bzrbranches():
209 if not branch.supports_tags():
208 if not branch.supports_tags():
210 return {}
209 return {}
211 tagdict = branch.tags.get_tag_dict()
210 tagdict = branch.tags.get_tag_dict()
212 for name, rev in pycompat.iteritems(tagdict):
211 for name, rev in tagdict.items():
213 bytetags[self.recode(name)] = rev
212 bytetags[self.recode(name)] = rev
214 return bytetags
213 return bytetags
215
214
216 def getchangedfiles(self, rev, i):
215 def getchangedfiles(self, rev, i):
217 self._modecache = {}
216 self._modecache = {}
218 curtree = self.sourcerepo.revision_tree(rev)
217 curtree = self.sourcerepo.revision_tree(rev)
219 if i is not None:
218 if i is not None:
220 parentid = self._parentids[rev][i]
219 parentid = self._parentids[rev][i]
221 else:
220 else:
222 # no parent id, get the empty revision
221 # no parent id, get the empty revision
223 parentid = revision.NULL_REVISION
222 parentid = revision.NULL_REVISION
224
223
225 prevtree = self.sourcerepo.revision_tree(parentid)
224 prevtree = self.sourcerepo.revision_tree(parentid)
226 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
225 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
227 return changes
226 return changes
228
227
229 def _gettreechanges(self, current, origin):
228 def _gettreechanges(self, current, origin):
230 revid = current._revision_id
229 revid = current._revision_id
231 changes = []
230 changes = []
232 renames = {}
231 renames = {}
233 seen = set()
232 seen = set()
234
233
235 # Fall back to the deprecated attribute for legacy installations.
234 # Fall back to the deprecated attribute for legacy installations.
236 try:
235 try:
237 inventory = origin.root_inventory
236 inventory = origin.root_inventory
238 except AttributeError:
237 except AttributeError:
239 inventory = origin.inventory
238 inventory = origin.inventory
240
239
241 # Process the entries by reverse lexicographic name order to
240 # Process the entries by reverse lexicographic name order to
242 # handle nested renames correctly, most specific first.
241 # handle nested renames correctly, most specific first.
243
242
244 def key(c):
243 def key(c):
245 return c.path[0] or c.path[1] or ""
244 return c.path[0] or c.path[1] or ""
246
245
247 curchanges = sorted(
246 curchanges = sorted(
248 current.iter_changes(origin),
247 current.iter_changes(origin),
249 key=key,
248 key=key,
250 reverse=True,
249 reverse=True,
251 )
250 )
252 for change in curchanges:
251 for change in curchanges:
253 paths = change.path
252 paths = change.path
254 kind = change.kind
253 kind = change.kind
255 executable = change.executable
254 executable = change.executable
256 if paths[0] == u'' or paths[1] == u'':
255 if paths[0] == u'' or paths[1] == u'':
257 # ignore changes to tree root
256 # ignore changes to tree root
258 continue
257 continue
259
258
260 # bazaar tracks directories, mercurial does not, so
259 # bazaar tracks directories, mercurial does not, so
261 # we have to rename the directory contents
260 # we have to rename the directory contents
262 if kind[1] == 'directory':
261 if kind[1] == 'directory':
263 if kind[0] not in (None, 'directory'):
262 if kind[0] not in (None, 'directory'):
264 # Replacing 'something' with a directory, record it
263 # Replacing 'something' with a directory, record it
265 # so it can be removed.
264 # so it can be removed.
266 changes.append((self.recode(paths[0]), revid))
265 changes.append((self.recode(paths[0]), revid))
267
266
268 if kind[0] == 'directory' and None not in paths:
267 if kind[0] == 'directory' and None not in paths:
269 renaming = paths[0] != paths[1]
268 renaming = paths[0] != paths[1]
270 # neither an add nor an delete - a move
269 # neither an add nor an delete - a move
271 # rename all directory contents manually
270 # rename all directory contents manually
272 subdir = inventory.path2id(paths[0])
271 subdir = inventory.path2id(paths[0])
273 # get all child-entries of the directory
272 # get all child-entries of the directory
274 for name, entry in inventory.iter_entries(subdir):
273 for name, entry in inventory.iter_entries(subdir):
275 # hg does not track directory renames
274 # hg does not track directory renames
276 if entry.kind == 'directory':
275 if entry.kind == 'directory':
277 continue
276 continue
278 frompath = self.recode(paths[0] + '/' + name)
277 frompath = self.recode(paths[0] + '/' + name)
279 if frompath in seen:
278 if frompath in seen:
280 # Already handled by a more specific change entry
279 # Already handled by a more specific change entry
281 # This is important when you have:
280 # This is important when you have:
282 # a => b
281 # a => b
283 # a/c => a/c
282 # a/c => a/c
284 # Here a/c must not be renamed into b/c
283 # Here a/c must not be renamed into b/c
285 continue
284 continue
286 seen.add(frompath)
285 seen.add(frompath)
287 if not renaming:
286 if not renaming:
288 continue
287 continue
289 topath = self.recode(paths[1] + '/' + name)
288 topath = self.recode(paths[1] + '/' + name)
290 # register the files as changed
289 # register the files as changed
291 changes.append((frompath, revid))
290 changes.append((frompath, revid))
292 changes.append((topath, revid))
291 changes.append((topath, revid))
293 # add to mode cache
292 # add to mode cache
294 mode = (
293 mode = (
295 (entry.executable and b'x')
294 (entry.executable and b'x')
296 or (entry.kind == 'symlink' and b's')
295 or (entry.kind == 'symlink' and b's')
297 or b''
296 or b''
298 )
297 )
299 self._modecache[(topath, revid)] = mode
298 self._modecache[(topath, revid)] = mode
300 # register the change as move
299 # register the change as move
301 renames[topath] = frompath
300 renames[topath] = frompath
302
301
303 # no further changes, go to the next change
302 # no further changes, go to the next change
304 continue
303 continue
305
304
306 # we got unicode paths, need to convert them
305 # we got unicode paths, need to convert them
307 path, topath = paths
306 path, topath = paths
308 if path is not None:
307 if path is not None:
309 path = self.recode(path)
308 path = self.recode(path)
310 if topath is not None:
309 if topath is not None:
311 topath = self.recode(topath)
310 topath = self.recode(topath)
312 seen.add(path or topath)
311 seen.add(path or topath)
313
312
314 if topath is None:
313 if topath is None:
315 # file deleted
314 # file deleted
316 changes.append((path, revid))
315 changes.append((path, revid))
317 continue
316 continue
318
317
319 # renamed
318 # renamed
320 if path and path != topath:
319 if path and path != topath:
321 renames[topath] = path
320 renames[topath] = path
322 changes.append((path, revid))
321 changes.append((path, revid))
323
322
324 # populate the mode cache
323 # populate the mode cache
325 kind, executable = [e[1] for e in (kind, executable)]
324 kind, executable = [e[1] for e in (kind, executable)]
326 mode = (executable and b'x') or (kind == 'symlink' and b'l') or b''
325 mode = (executable and b'x') or (kind == 'symlink' and b'l') or b''
327 self._modecache[(topath, revid)] = mode
326 self._modecache[(topath, revid)] = mode
328 changes.append((topath, revid))
327 changes.append((topath, revid))
329
328
330 return changes, renames
329 return changes, renames
331
330
332 def _filterghosts(self, ids):
331 def _filterghosts(self, ids):
333 """Filters out ghost revisions which hg does not support, see
332 """Filters out ghost revisions which hg does not support, see
334 <http://bazaar-vcs.org/GhostRevision>
333 <http://bazaar-vcs.org/GhostRevision>
335 """
334 """
336 parentmap = self.sourcerepo.get_parent_map(ids)
335 parentmap = self.sourcerepo.get_parent_map(ids)
337 parents = tuple([parent for parent in ids if parent in parentmap])
336 parents = tuple([parent for parent in ids if parent in parentmap])
338 return parents
337 return parents
@@ -1,597 +1,597 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64
8 import base64
9 import datetime
9 import datetime
10 import errno
10 import errno
11 import os
11 import os
12 import pickle
12 import pickle
13 import re
13 import re
14 import shlex
14 import shlex
15 import subprocess
15 import subprocess
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.pycompat import open
18 from mercurial.pycompat import open
19 from mercurial import (
19 from mercurial import (
20 encoding,
20 encoding,
21 error,
21 error,
22 phases,
22 phases,
23 pycompat,
23 pycompat,
24 util,
24 util,
25 )
25 )
26 from mercurial.utils import procutil
26 from mercurial.utils import procutil
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30
30
31 def _encodeornone(d):
31 def _encodeornone(d):
32 if d is None:
32 if d is None:
33 return
33 return
34 return d.encode('latin1')
34 return d.encode('latin1')
35
35
36
36
37 class _shlexpy3proxy(object):
37 class _shlexpy3proxy(object):
38 def __init__(self, l):
38 def __init__(self, l):
39 self._l = l
39 self._l = l
40
40
41 def __iter__(self):
41 def __iter__(self):
42 return (_encodeornone(v) for v in self._l)
42 return (_encodeornone(v) for v in self._l)
43
43
44 def get_token(self):
44 def get_token(self):
45 return _encodeornone(self._l.get_token())
45 return _encodeornone(self._l.get_token())
46
46
47 @property
47 @property
48 def infile(self):
48 def infile(self):
49 return self._l.infile or b'<unknown>'
49 return self._l.infile or b'<unknown>'
50
50
51 @property
51 @property
52 def lineno(self):
52 def lineno(self):
53 return self._l.lineno
53 return self._l.lineno
54
54
55
55
56 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
56 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
57 if data is None:
57 if data is None:
58 if pycompat.ispy3:
58 if pycompat.ispy3:
59 data = open(filepath, b'r', encoding='latin1')
59 data = open(filepath, b'r', encoding='latin1')
60 else:
60 else:
61 data = open(filepath, b'r')
61 data = open(filepath, b'r')
62 else:
62 else:
63 if filepath is not None:
63 if filepath is not None:
64 raise error.ProgrammingError(
64 raise error.ProgrammingError(
65 b'shlexer only accepts data or filepath, not both'
65 b'shlexer only accepts data or filepath, not both'
66 )
66 )
67 if pycompat.ispy3:
67 if pycompat.ispy3:
68 data = data.decode('latin1')
68 data = data.decode('latin1')
69 l = shlex.shlex(data, infile=filepath, posix=True)
69 l = shlex.shlex(data, infile=filepath, posix=True)
70 if whitespace is not None:
70 if whitespace is not None:
71 l.whitespace_split = True
71 l.whitespace_split = True
72 if pycompat.ispy3:
72 if pycompat.ispy3:
73 l.whitespace += whitespace.decode('latin1')
73 l.whitespace += whitespace.decode('latin1')
74 else:
74 else:
75 l.whitespace += whitespace
75 l.whitespace += whitespace
76 if wordchars is not None:
76 if wordchars is not None:
77 if pycompat.ispy3:
77 if pycompat.ispy3:
78 l.wordchars += wordchars.decode('latin1')
78 l.wordchars += wordchars.decode('latin1')
79 else:
79 else:
80 l.wordchars += wordchars
80 l.wordchars += wordchars
81 if pycompat.ispy3:
81 if pycompat.ispy3:
82 return _shlexpy3proxy(l)
82 return _shlexpy3proxy(l)
83 return l
83 return l
84
84
85
85
86 if pycompat.ispy3:
86 if pycompat.ispy3:
87 base64_encodebytes = base64.encodebytes
87 base64_encodebytes = base64.encodebytes
88 base64_decodebytes = base64.decodebytes
88 base64_decodebytes = base64.decodebytes
89 else:
89 else:
90 base64_encodebytes = base64.encodestring
90 base64_encodebytes = base64.encodestring
91 base64_decodebytes = base64.decodestring
91 base64_decodebytes = base64.decodestring
92
92
93
93
94 def encodeargs(args):
94 def encodeargs(args):
95 def encodearg(s):
95 def encodearg(s):
96 lines = base64_encodebytes(s)
96 lines = base64_encodebytes(s)
97 lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)]
97 lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)]
98 return b''.join(lines)
98 return b''.join(lines)
99
99
100 s = pickle.dumps(args)
100 s = pickle.dumps(args)
101 return encodearg(s)
101 return encodearg(s)
102
102
103
103
104 def decodeargs(s):
104 def decodeargs(s):
105 s = base64_decodebytes(s)
105 s = base64_decodebytes(s)
106 return pickle.loads(s)
106 return pickle.loads(s)
107
107
108
108
109 class MissingTool(Exception):
109 class MissingTool(Exception):
110 pass
110 pass
111
111
112
112
113 def checktool(exe, name=None, abort=True):
113 def checktool(exe, name=None, abort=True):
114 name = name or exe
114 name = name or exe
115 if not procutil.findexe(exe):
115 if not procutil.findexe(exe):
116 if abort:
116 if abort:
117 exc = error.Abort
117 exc = error.Abort
118 else:
118 else:
119 exc = MissingTool
119 exc = MissingTool
120 raise exc(_(b'cannot find required "%s" tool') % name)
120 raise exc(_(b'cannot find required "%s" tool') % name)
121
121
122
122
123 class NoRepo(Exception):
123 class NoRepo(Exception):
124 pass
124 pass
125
125
126
126
127 SKIPREV = b'SKIP'
127 SKIPREV = b'SKIP'
128
128
129
129
130 class commit(object):
130 class commit(object):
131 def __init__(
131 def __init__(
132 self,
132 self,
133 author,
133 author,
134 date,
134 date,
135 desc,
135 desc,
136 parents,
136 parents,
137 branch=None,
137 branch=None,
138 rev=None,
138 rev=None,
139 extra=None,
139 extra=None,
140 sortkey=None,
140 sortkey=None,
141 saverev=True,
141 saverev=True,
142 phase=phases.draft,
142 phase=phases.draft,
143 optparents=None,
143 optparents=None,
144 ctx=None,
144 ctx=None,
145 ):
145 ):
146 self.author = author or b'unknown'
146 self.author = author or b'unknown'
147 self.date = date or b'0 0'
147 self.date = date or b'0 0'
148 self.desc = desc
148 self.desc = desc
149 self.parents = parents # will be converted and used as parents
149 self.parents = parents # will be converted and used as parents
150 self.optparents = optparents or [] # will be used if already converted
150 self.optparents = optparents or [] # will be used if already converted
151 self.branch = branch
151 self.branch = branch
152 self.rev = rev
152 self.rev = rev
153 self.extra = extra or {}
153 self.extra = extra or {}
154 self.sortkey = sortkey
154 self.sortkey = sortkey
155 self.saverev = saverev
155 self.saverev = saverev
156 self.phase = phase
156 self.phase = phase
157 self.ctx = ctx # for hg to hg conversions
157 self.ctx = ctx # for hg to hg conversions
158
158
159
159
160 class converter_source(object):
160 class converter_source(object):
161 """Conversion source interface"""
161 """Conversion source interface"""
162
162
163 def __init__(self, ui, repotype, path=None, revs=None):
163 def __init__(self, ui, repotype, path=None, revs=None):
164 """Initialize conversion source (or raise NoRepo("message")
164 """Initialize conversion source (or raise NoRepo("message")
165 exception if path is not a valid repository)"""
165 exception if path is not a valid repository)"""
166 self.ui = ui
166 self.ui = ui
167 self.path = path
167 self.path = path
168 self.revs = revs
168 self.revs = revs
169 self.repotype = repotype
169 self.repotype = repotype
170
170
171 self.encoding = b'utf-8'
171 self.encoding = b'utf-8'
172
172
173 def checkhexformat(self, revstr, mapname=b'splicemap'):
173 def checkhexformat(self, revstr, mapname=b'splicemap'):
174 """fails if revstr is not a 40 byte hex. mercurial and git both uses
174 """fails if revstr is not a 40 byte hex. mercurial and git both uses
175 such format for their revision numbering
175 such format for their revision numbering
176 """
176 """
177 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
177 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
178 raise error.Abort(
178 raise error.Abort(
179 _(b'%s entry %s is not a valid revision identifier')
179 _(b'%s entry %s is not a valid revision identifier')
180 % (mapname, revstr)
180 % (mapname, revstr)
181 )
181 )
182
182
183 def before(self):
183 def before(self):
184 pass
184 pass
185
185
186 def after(self):
186 def after(self):
187 pass
187 pass
188
188
189 def targetfilebelongstosource(self, targetfilename):
189 def targetfilebelongstosource(self, targetfilename):
190 """Returns true if the given targetfile belongs to the source repo. This
190 """Returns true if the given targetfile belongs to the source repo. This
191 is useful when only a subdirectory of the target belongs to the source
191 is useful when only a subdirectory of the target belongs to the source
192 repo."""
192 repo."""
193 # For normal full repo converts, this is always True.
193 # For normal full repo converts, this is always True.
194 return True
194 return True
195
195
196 def setrevmap(self, revmap):
196 def setrevmap(self, revmap):
197 """set the map of already-converted revisions"""
197 """set the map of already-converted revisions"""
198
198
199 def getheads(self):
199 def getheads(self):
200 """Return a list of this repository's heads"""
200 """Return a list of this repository's heads"""
201 raise NotImplementedError
201 raise NotImplementedError
202
202
203 def getfile(self, name, rev):
203 def getfile(self, name, rev):
204 """Return a pair (data, mode) where data is the file content
204 """Return a pair (data, mode) where data is the file content
205 as a string and mode one of '', 'x' or 'l'. rev is the
205 as a string and mode one of '', 'x' or 'l'. rev is the
206 identifier returned by a previous call to getchanges().
206 identifier returned by a previous call to getchanges().
207 Data is None if file is missing/deleted in rev.
207 Data is None if file is missing/deleted in rev.
208 """
208 """
209 raise NotImplementedError
209 raise NotImplementedError
210
210
211 def getchanges(self, version, full):
211 def getchanges(self, version, full):
212 """Returns a tuple of (files, copies, cleanp2).
212 """Returns a tuple of (files, copies, cleanp2).
213
213
214 files is a sorted list of (filename, id) tuples for all files
214 files is a sorted list of (filename, id) tuples for all files
215 changed between version and its first parent returned by
215 changed between version and its first parent returned by
216 getcommit(). If full, all files in that revision is returned.
216 getcommit(). If full, all files in that revision is returned.
217 id is the source revision id of the file.
217 id is the source revision id of the file.
218
218
219 copies is a dictionary of dest: source
219 copies is a dictionary of dest: source
220
220
221 cleanp2 is the set of files filenames that are clean against p2.
221 cleanp2 is the set of files filenames that are clean against p2.
222 (Files that are clean against p1 are already not in files (unless
222 (Files that are clean against p1 are already not in files (unless
223 full). This makes it possible to handle p2 clean files similarly.)
223 full). This makes it possible to handle p2 clean files similarly.)
224 """
224 """
225 raise NotImplementedError
225 raise NotImplementedError
226
226
227 def getcommit(self, version):
227 def getcommit(self, version):
228 """Return the commit object for version"""
228 """Return the commit object for version"""
229 raise NotImplementedError
229 raise NotImplementedError
230
230
231 def numcommits(self):
231 def numcommits(self):
232 """Return the number of commits in this source.
232 """Return the number of commits in this source.
233
233
234 If unknown, return None.
234 If unknown, return None.
235 """
235 """
236 return None
236 return None
237
237
238 def gettags(self):
238 def gettags(self):
239 """Return the tags as a dictionary of name: revision
239 """Return the tags as a dictionary of name: revision
240
240
241 Tag names must be UTF-8 strings.
241 Tag names must be UTF-8 strings.
242 """
242 """
243 raise NotImplementedError
243 raise NotImplementedError
244
244
245 def recode(self, s, encoding=None):
245 def recode(self, s, encoding=None):
246 if not encoding:
246 if not encoding:
247 encoding = self.encoding or b'utf-8'
247 encoding = self.encoding or b'utf-8'
248
248
249 if isinstance(s, pycompat.unicode):
249 if isinstance(s, pycompat.unicode):
250 return s.encode("utf-8")
250 return s.encode("utf-8")
251 try:
251 try:
252 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
252 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
253 except UnicodeError:
253 except UnicodeError:
254 try:
254 try:
255 return s.decode("latin-1").encode("utf-8")
255 return s.decode("latin-1").encode("utf-8")
256 except UnicodeError:
256 except UnicodeError:
257 return s.decode(pycompat.sysstr(encoding), "replace").encode(
257 return s.decode(pycompat.sysstr(encoding), "replace").encode(
258 "utf-8"
258 "utf-8"
259 )
259 )
260
260
261 def getchangedfiles(self, rev, i):
261 def getchangedfiles(self, rev, i):
262 """Return the files changed by rev compared to parent[i].
262 """Return the files changed by rev compared to parent[i].
263
263
264 i is an index selecting one of the parents of rev. The return
264 i is an index selecting one of the parents of rev. The return
265 value should be the list of files that are different in rev and
265 value should be the list of files that are different in rev and
266 this parent.
266 this parent.
267
267
268 If rev has no parents, i is None.
268 If rev has no parents, i is None.
269
269
270 This function is only needed to support --filemap
270 This function is only needed to support --filemap
271 """
271 """
272 raise NotImplementedError
272 raise NotImplementedError
273
273
274 def converted(self, rev, sinkrev):
274 def converted(self, rev, sinkrev):
275 '''Notify the source that a revision has been converted.'''
275 '''Notify the source that a revision has been converted.'''
276
276
277 def hasnativeorder(self):
277 def hasnativeorder(self):
278 """Return true if this source has a meaningful, native revision
278 """Return true if this source has a meaningful, native revision
279 order. For instance, Mercurial revisions are store sequentially
279 order. For instance, Mercurial revisions are store sequentially
280 while there is no such global ordering with Darcs.
280 while there is no such global ordering with Darcs.
281 """
281 """
282 return False
282 return False
283
283
284 def hasnativeclose(self):
284 def hasnativeclose(self):
285 """Return true if this source has ability to close branch."""
285 """Return true if this source has ability to close branch."""
286 return False
286 return False
287
287
288 def lookuprev(self, rev):
288 def lookuprev(self, rev):
289 """If rev is a meaningful revision reference in source, return
289 """If rev is a meaningful revision reference in source, return
290 the referenced identifier in the same format used by getcommit().
290 the referenced identifier in the same format used by getcommit().
291 return None otherwise.
291 return None otherwise.
292 """
292 """
293 return None
293 return None
294
294
295 def getbookmarks(self):
295 def getbookmarks(self):
296 """Return the bookmarks as a dictionary of name: revision
296 """Return the bookmarks as a dictionary of name: revision
297
297
298 Bookmark names are to be UTF-8 strings.
298 Bookmark names are to be UTF-8 strings.
299 """
299 """
300 return {}
300 return {}
301
301
302 def checkrevformat(self, revstr, mapname=b'splicemap'):
302 def checkrevformat(self, revstr, mapname=b'splicemap'):
303 """revstr is a string that describes a revision in the given
303 """revstr is a string that describes a revision in the given
304 source control system. Return true if revstr has correct
304 source control system. Return true if revstr has correct
305 format.
305 format.
306 """
306 """
307 return True
307 return True
308
308
309
309
310 class converter_sink(object):
310 class converter_sink(object):
311 """Conversion sink (target) interface"""
311 """Conversion sink (target) interface"""
312
312
313 def __init__(self, ui, repotype, path):
313 def __init__(self, ui, repotype, path):
314 """Initialize conversion sink (or raise NoRepo("message")
314 """Initialize conversion sink (or raise NoRepo("message")
315 exception if path is not a valid repository)
315 exception if path is not a valid repository)
316
316
317 created is a list of paths to remove if a fatal error occurs
317 created is a list of paths to remove if a fatal error occurs
318 later"""
318 later"""
319 self.ui = ui
319 self.ui = ui
320 self.path = path
320 self.path = path
321 self.created = []
321 self.created = []
322 self.repotype = repotype
322 self.repotype = repotype
323
323
324 def revmapfile(self):
324 def revmapfile(self):
325 """Path to a file that will contain lines
325 """Path to a file that will contain lines
326 source_rev_id sink_rev_id
326 source_rev_id sink_rev_id
327 mapping equivalent revision identifiers for each system."""
327 mapping equivalent revision identifiers for each system."""
328 raise NotImplementedError
328 raise NotImplementedError
329
329
330 def authorfile(self):
330 def authorfile(self):
331 """Path to a file that will contain lines
331 """Path to a file that will contain lines
332 srcauthor=dstauthor
332 srcauthor=dstauthor
333 mapping equivalent authors identifiers for each system."""
333 mapping equivalent authors identifiers for each system."""
334 return None
334 return None
335
335
336 def putcommit(
336 def putcommit(
337 self, files, copies, parents, commit, source, revmap, full, cleanp2
337 self, files, copies, parents, commit, source, revmap, full, cleanp2
338 ):
338 ):
339 """Create a revision with all changed files listed in 'files'
339 """Create a revision with all changed files listed in 'files'
340 and having listed parents. 'commit' is a commit object
340 and having listed parents. 'commit' is a commit object
341 containing at a minimum the author, date, and message for this
341 containing at a minimum the author, date, and message for this
342 changeset. 'files' is a list of (path, version) tuples,
342 changeset. 'files' is a list of (path, version) tuples,
343 'copies' is a dictionary mapping destinations to sources,
343 'copies' is a dictionary mapping destinations to sources,
344 'source' is the source repository, and 'revmap' is a mapfile
344 'source' is the source repository, and 'revmap' is a mapfile
345 of source revisions to converted revisions. Only getfile() and
345 of source revisions to converted revisions. Only getfile() and
346 lookuprev() should be called on 'source'. 'full' means that 'files'
346 lookuprev() should be called on 'source'. 'full' means that 'files'
347 is complete and all other files should be removed.
347 is complete and all other files should be removed.
348 'cleanp2' is a set of the filenames that are unchanged from p2
348 'cleanp2' is a set of the filenames that are unchanged from p2
349 (only in the common merge case where there two parents).
349 (only in the common merge case where there two parents).
350
350
351 Note that the sink repository is not told to update itself to
351 Note that the sink repository is not told to update itself to
352 a particular revision (or even what that revision would be)
352 a particular revision (or even what that revision would be)
353 before it receives the file data.
353 before it receives the file data.
354 """
354 """
355 raise NotImplementedError
355 raise NotImplementedError
356
356
357 def puttags(self, tags):
357 def puttags(self, tags):
358 """Put tags into sink.
358 """Put tags into sink.
359
359
360 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
360 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
361 Return a pair (tag_revision, tag_parent_revision), or (None, None)
361 Return a pair (tag_revision, tag_parent_revision), or (None, None)
362 if nothing was changed.
362 if nothing was changed.
363 """
363 """
364 raise NotImplementedError
364 raise NotImplementedError
365
365
366 def setbranch(self, branch, pbranches):
366 def setbranch(self, branch, pbranches):
367 """Set the current branch name. Called before the first putcommit
367 """Set the current branch name. Called before the first putcommit
368 on the branch.
368 on the branch.
369 branch: branch name for subsequent commits
369 branch: branch name for subsequent commits
370 pbranches: (converted parent revision, parent branch) tuples"""
370 pbranches: (converted parent revision, parent branch) tuples"""
371
371
372 def setfilemapmode(self, active):
372 def setfilemapmode(self, active):
373 """Tell the destination that we're using a filemap
373 """Tell the destination that we're using a filemap
374
374
375 Some converter_sources (svn in particular) can claim that a file
375 Some converter_sources (svn in particular) can claim that a file
376 was changed in a revision, even if there was no change. This method
376 was changed in a revision, even if there was no change. This method
377 tells the destination that we're using a filemap and that it should
377 tells the destination that we're using a filemap and that it should
378 filter empty revisions.
378 filter empty revisions.
379 """
379 """
380
380
381 def before(self):
381 def before(self):
382 pass
382 pass
383
383
384 def after(self):
384 def after(self):
385 pass
385 pass
386
386
387 def putbookmarks(self, bookmarks):
387 def putbookmarks(self, bookmarks):
388 """Put bookmarks into sink.
388 """Put bookmarks into sink.
389
389
390 bookmarks: {bookmarkname: sink_rev_id, ...}
390 bookmarks: {bookmarkname: sink_rev_id, ...}
391 where bookmarkname is an UTF-8 string.
391 where bookmarkname is an UTF-8 string.
392 """
392 """
393
393
394 def hascommitfrommap(self, rev):
394 def hascommitfrommap(self, rev):
395 """Return False if a rev mentioned in a filemap is known to not be
395 """Return False if a rev mentioned in a filemap is known to not be
396 present."""
396 present."""
397 raise NotImplementedError
397 raise NotImplementedError
398
398
399 def hascommitforsplicemap(self, rev):
399 def hascommitforsplicemap(self, rev):
400 """This method is for the special needs for splicemap handling and not
400 """This method is for the special needs for splicemap handling and not
401 for general use. Returns True if the sink contains rev, aborts on some
401 for general use. Returns True if the sink contains rev, aborts on some
402 special cases."""
402 special cases."""
403 raise NotImplementedError
403 raise NotImplementedError
404
404
405
405
406 class commandline(object):
406 class commandline(object):
407 def __init__(self, ui, command):
407 def __init__(self, ui, command):
408 self.ui = ui
408 self.ui = ui
409 self.command = command
409 self.command = command
410
410
411 def prerun(self):
411 def prerun(self):
412 pass
412 pass
413
413
414 def postrun(self):
414 def postrun(self):
415 pass
415 pass
416
416
417 def _cmdline(self, cmd, *args, **kwargs):
417 def _cmdline(self, cmd, *args, **kwargs):
418 kwargs = pycompat.byteskwargs(kwargs)
418 kwargs = pycompat.byteskwargs(kwargs)
419 cmdline = [self.command, cmd] + list(args)
419 cmdline = [self.command, cmd] + list(args)
420 for k, v in pycompat.iteritems(kwargs):
420 for k, v in kwargs.items():
421 if len(k) == 1:
421 if len(k) == 1:
422 cmdline.append(b'-' + k)
422 cmdline.append(b'-' + k)
423 else:
423 else:
424 cmdline.append(b'--' + k.replace(b'_', b'-'))
424 cmdline.append(b'--' + k.replace(b'_', b'-'))
425 try:
425 try:
426 if len(k) == 1:
426 if len(k) == 1:
427 cmdline.append(b'' + v)
427 cmdline.append(b'' + v)
428 else:
428 else:
429 cmdline[-1] += b'=' + v
429 cmdline[-1] += b'=' + v
430 except TypeError:
430 except TypeError:
431 pass
431 pass
432 cmdline = [procutil.shellquote(arg) for arg in cmdline]
432 cmdline = [procutil.shellquote(arg) for arg in cmdline]
433 if not self.ui.debugflag:
433 if not self.ui.debugflag:
434 cmdline += [b'2>', pycompat.bytestr(os.devnull)]
434 cmdline += [b'2>', pycompat.bytestr(os.devnull)]
435 cmdline = b' '.join(cmdline)
435 cmdline = b' '.join(cmdline)
436 return cmdline
436 return cmdline
437
437
438 def _run(self, cmd, *args, **kwargs):
438 def _run(self, cmd, *args, **kwargs):
439 def popen(cmdline):
439 def popen(cmdline):
440 p = subprocess.Popen(
440 p = subprocess.Popen(
441 procutil.tonativestr(cmdline),
441 procutil.tonativestr(cmdline),
442 shell=True,
442 shell=True,
443 bufsize=-1,
443 bufsize=-1,
444 close_fds=procutil.closefds,
444 close_fds=procutil.closefds,
445 stdout=subprocess.PIPE,
445 stdout=subprocess.PIPE,
446 )
446 )
447 return p
447 return p
448
448
449 return self._dorun(popen, cmd, *args, **kwargs)
449 return self._dorun(popen, cmd, *args, **kwargs)
450
450
451 def _run2(self, cmd, *args, **kwargs):
451 def _run2(self, cmd, *args, **kwargs):
452 return self._dorun(procutil.popen2, cmd, *args, **kwargs)
452 return self._dorun(procutil.popen2, cmd, *args, **kwargs)
453
453
454 def _run3(self, cmd, *args, **kwargs):
454 def _run3(self, cmd, *args, **kwargs):
455 return self._dorun(procutil.popen3, cmd, *args, **kwargs)
455 return self._dorun(procutil.popen3, cmd, *args, **kwargs)
456
456
457 def _dorun(self, openfunc, cmd, *args, **kwargs):
457 def _dorun(self, openfunc, cmd, *args, **kwargs):
458 cmdline = self._cmdline(cmd, *args, **kwargs)
458 cmdline = self._cmdline(cmd, *args, **kwargs)
459 self.ui.debug(b'running: %s\n' % (cmdline,))
459 self.ui.debug(b'running: %s\n' % (cmdline,))
460 self.prerun()
460 self.prerun()
461 try:
461 try:
462 return openfunc(cmdline)
462 return openfunc(cmdline)
463 finally:
463 finally:
464 self.postrun()
464 self.postrun()
465
465
466 def run(self, cmd, *args, **kwargs):
466 def run(self, cmd, *args, **kwargs):
467 p = self._run(cmd, *args, **kwargs)
467 p = self._run(cmd, *args, **kwargs)
468 output = p.communicate()[0]
468 output = p.communicate()[0]
469 self.ui.debug(output)
469 self.ui.debug(output)
470 return output, p.returncode
470 return output, p.returncode
471
471
472 def runlines(self, cmd, *args, **kwargs):
472 def runlines(self, cmd, *args, **kwargs):
473 p = self._run(cmd, *args, **kwargs)
473 p = self._run(cmd, *args, **kwargs)
474 output = p.stdout.readlines()
474 output = p.stdout.readlines()
475 p.wait()
475 p.wait()
476 self.ui.debug(b''.join(output))
476 self.ui.debug(b''.join(output))
477 return output, p.returncode
477 return output, p.returncode
478
478
479 def checkexit(self, status, output=b''):
479 def checkexit(self, status, output=b''):
480 if status:
480 if status:
481 if output:
481 if output:
482 self.ui.warn(_(b'%s error:\n') % self.command)
482 self.ui.warn(_(b'%s error:\n') % self.command)
483 self.ui.warn(output)
483 self.ui.warn(output)
484 msg = procutil.explainexit(status)
484 msg = procutil.explainexit(status)
485 raise error.Abort(b'%s %s' % (self.command, msg))
485 raise error.Abort(b'%s %s' % (self.command, msg))
486
486
487 def run0(self, cmd, *args, **kwargs):
487 def run0(self, cmd, *args, **kwargs):
488 output, status = self.run(cmd, *args, **kwargs)
488 output, status = self.run(cmd, *args, **kwargs)
489 self.checkexit(status, output)
489 self.checkexit(status, output)
490 return output
490 return output
491
491
492 def runlines0(self, cmd, *args, **kwargs):
492 def runlines0(self, cmd, *args, **kwargs):
493 output, status = self.runlines(cmd, *args, **kwargs)
493 output, status = self.runlines(cmd, *args, **kwargs)
494 self.checkexit(status, b''.join(output))
494 self.checkexit(status, b''.join(output))
495 return output
495 return output
496
496
497 @propertycache
497 @propertycache
498 def argmax(self):
498 def argmax(self):
499 # POSIX requires at least 4096 bytes for ARG_MAX
499 # POSIX requires at least 4096 bytes for ARG_MAX
500 argmax = 4096
500 argmax = 4096
501 try:
501 try:
502 argmax = os.sysconf("SC_ARG_MAX")
502 argmax = os.sysconf("SC_ARG_MAX")
503 except (AttributeError, ValueError):
503 except (AttributeError, ValueError):
504 pass
504 pass
505
505
506 # Windows shells impose their own limits on command line length,
506 # Windows shells impose their own limits on command line length,
507 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
507 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
508 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
508 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
509 # details about cmd.exe limitations.
509 # details about cmd.exe limitations.
510
510
511 # Since ARG_MAX is for command line _and_ environment, lower our limit
511 # Since ARG_MAX is for command line _and_ environment, lower our limit
512 # (and make happy Windows shells while doing this).
512 # (and make happy Windows shells while doing this).
513 return argmax // 2 - 1
513 return argmax // 2 - 1
514
514
515 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
515 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
516 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
516 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
517 limit = self.argmax - cmdlen
517 limit = self.argmax - cmdlen
518 numbytes = 0
518 numbytes = 0
519 fl = []
519 fl = []
520 for fn in arglist:
520 for fn in arglist:
521 b = len(fn) + 3
521 b = len(fn) + 3
522 if numbytes + b < limit or len(fl) == 0:
522 if numbytes + b < limit or len(fl) == 0:
523 fl.append(fn)
523 fl.append(fn)
524 numbytes += b
524 numbytes += b
525 else:
525 else:
526 yield fl
526 yield fl
527 fl = [fn]
527 fl = [fn]
528 numbytes = b
528 numbytes = b
529 if fl:
529 if fl:
530 yield fl
530 yield fl
531
531
532 def xargs(self, arglist, cmd, *args, **kwargs):
532 def xargs(self, arglist, cmd, *args, **kwargs):
533 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
533 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
534 self.run0(cmd, *(list(args) + l), **kwargs)
534 self.run0(cmd, *(list(args) + l), **kwargs)
535
535
536
536
537 class mapfile(dict):
537 class mapfile(dict):
538 def __init__(self, ui, path):
538 def __init__(self, ui, path):
539 super(mapfile, self).__init__()
539 super(mapfile, self).__init__()
540 self.ui = ui
540 self.ui = ui
541 self.path = path
541 self.path = path
542 self.fp = None
542 self.fp = None
543 self.order = []
543 self.order = []
544 self._read()
544 self._read()
545
545
546 def _read(self):
546 def _read(self):
547 if not self.path:
547 if not self.path:
548 return
548 return
549 try:
549 try:
550 fp = open(self.path, b'rb')
550 fp = open(self.path, b'rb')
551 except IOError as err:
551 except IOError as err:
552 if err.errno != errno.ENOENT:
552 if err.errno != errno.ENOENT:
553 raise
553 raise
554 return
554 return
555 for i, line in enumerate(util.iterfile(fp)):
555 for i, line in enumerate(util.iterfile(fp)):
556 line = line.splitlines()[0].rstrip()
556 line = line.splitlines()[0].rstrip()
557 if not line:
557 if not line:
558 # Ignore blank lines
558 # Ignore blank lines
559 continue
559 continue
560 try:
560 try:
561 key, value = line.rsplit(b' ', 1)
561 key, value = line.rsplit(b' ', 1)
562 except ValueError:
562 except ValueError:
563 raise error.Abort(
563 raise error.Abort(
564 _(b'syntax error in %s(%d): key/value pair expected')
564 _(b'syntax error in %s(%d): key/value pair expected')
565 % (self.path, i + 1)
565 % (self.path, i + 1)
566 )
566 )
567 if key not in self:
567 if key not in self:
568 self.order.append(key)
568 self.order.append(key)
569 super(mapfile, self).__setitem__(key, value)
569 super(mapfile, self).__setitem__(key, value)
570 fp.close()
570 fp.close()
571
571
572 def __setitem__(self, key, value):
572 def __setitem__(self, key, value):
573 if self.fp is None:
573 if self.fp is None:
574 try:
574 try:
575 self.fp = open(self.path, b'ab')
575 self.fp = open(self.path, b'ab')
576 except IOError as err:
576 except IOError as err:
577 raise error.Abort(
577 raise error.Abort(
578 _(b'could not open map file %r: %s')
578 _(b'could not open map file %r: %s')
579 % (self.path, encoding.strtolocal(err.strerror))
579 % (self.path, encoding.strtolocal(err.strerror))
580 )
580 )
581 self.fp.write(util.tonativeeol(b'%s %s\n' % (key, value)))
581 self.fp.write(util.tonativeeol(b'%s %s\n' % (key, value)))
582 self.fp.flush()
582 self.fp.flush()
583 super(mapfile, self).__setitem__(key, value)
583 super(mapfile, self).__setitem__(key, value)
584
584
585 def close(self):
585 def close(self):
586 if self.fp:
586 if self.fp:
587 self.fp.close()
587 self.fp.close()
588 self.fp = None
588 self.fp = None
589
589
590
590
591 def makedatetimestamp(t):
591 def makedatetimestamp(t):
592 """Like dateutil.makedate() but for time t instead of current time"""
592 """Like dateutil.makedate() but for time t instead of current time"""
593 delta = datetime.datetime.utcfromtimestamp(
593 delta = datetime.datetime.utcfromtimestamp(
594 t
594 t
595 ) - datetime.datetime.fromtimestamp(t)
595 ) - datetime.datetime.fromtimestamp(t)
596 tz = delta.days * 86400 + delta.seconds
596 tz = delta.days * 86400 + delta.seconds
597 return t, tz
597 return t, tz
@@ -1,669 +1,667 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import collections
8 import collections
9 import os
9 import os
10 import shutil
10 import shutil
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.pycompat import open
13 from mercurial.pycompat import open
14 from mercurial import (
14 from mercurial import (
15 encoding,
15 encoding,
16 error,
16 error,
17 hg,
17 hg,
18 pycompat,
18 pycompat,
19 scmutil,
19 scmutil,
20 util,
20 util,
21 )
21 )
22 from mercurial.utils import dateutil
22 from mercurial.utils import dateutil
23
23
24 from . import (
24 from . import (
25 bzr,
25 bzr,
26 common,
26 common,
27 cvs,
27 cvs,
28 darcs,
28 darcs,
29 filemap,
29 filemap,
30 git,
30 git,
31 gnuarch,
31 gnuarch,
32 hg as hgconvert,
32 hg as hgconvert,
33 monotone,
33 monotone,
34 p4,
34 p4,
35 subversion,
35 subversion,
36 )
36 )
37
37
38 mapfile = common.mapfile
38 mapfile = common.mapfile
39 MissingTool = common.MissingTool
39 MissingTool = common.MissingTool
40 NoRepo = common.NoRepo
40 NoRepo = common.NoRepo
41 SKIPREV = common.SKIPREV
41 SKIPREV = common.SKIPREV
42
42
43 bzr_source = bzr.bzr_source
43 bzr_source = bzr.bzr_source
44 convert_cvs = cvs.convert_cvs
44 convert_cvs = cvs.convert_cvs
45 convert_git = git.convert_git
45 convert_git = git.convert_git
46 darcs_source = darcs.darcs_source
46 darcs_source = darcs.darcs_source
47 gnuarch_source = gnuarch.gnuarch_source
47 gnuarch_source = gnuarch.gnuarch_source
48 mercurial_sink = hgconvert.mercurial_sink
48 mercurial_sink = hgconvert.mercurial_sink
49 mercurial_source = hgconvert.mercurial_source
49 mercurial_source = hgconvert.mercurial_source
50 monotone_source = monotone.monotone_source
50 monotone_source = monotone.monotone_source
51 p4_source = p4.p4_source
51 p4_source = p4.p4_source
52 svn_sink = subversion.svn_sink
52 svn_sink = subversion.svn_sink
53 svn_source = subversion.svn_source
53 svn_source = subversion.svn_source
54
54
55 orig_encoding = b'ascii'
55 orig_encoding = b'ascii'
56
56
57
57
58 def readauthormap(ui, authorfile, authors=None):
58 def readauthormap(ui, authorfile, authors=None):
59 if authors is None:
59 if authors is None:
60 authors = {}
60 authors = {}
61 with open(authorfile, b'rb') as afile:
61 with open(authorfile, b'rb') as afile:
62 for line in afile:
62 for line in afile:
63
63
64 line = line.strip()
64 line = line.strip()
65 if not line or line.startswith(b'#'):
65 if not line or line.startswith(b'#'):
66 continue
66 continue
67
67
68 try:
68 try:
69 srcauthor, dstauthor = line.split(b'=', 1)
69 srcauthor, dstauthor = line.split(b'=', 1)
70 except ValueError:
70 except ValueError:
71 msg = _(b'ignoring bad line in author map file %s: %s\n')
71 msg = _(b'ignoring bad line in author map file %s: %s\n')
72 ui.warn(msg % (authorfile, line.rstrip()))
72 ui.warn(msg % (authorfile, line.rstrip()))
73 continue
73 continue
74
74
75 srcauthor = srcauthor.strip()
75 srcauthor = srcauthor.strip()
76 dstauthor = dstauthor.strip()
76 dstauthor = dstauthor.strip()
77 if authors.get(srcauthor) in (None, dstauthor):
77 if authors.get(srcauthor) in (None, dstauthor):
78 msg = _(b'mapping author %s to %s\n')
78 msg = _(b'mapping author %s to %s\n')
79 ui.debug(msg % (srcauthor, dstauthor))
79 ui.debug(msg % (srcauthor, dstauthor))
80 authors[srcauthor] = dstauthor
80 authors[srcauthor] = dstauthor
81 continue
81 continue
82
82
83 m = _(b'overriding mapping for author %s, was %s, will be %s\n')
83 m = _(b'overriding mapping for author %s, was %s, will be %s\n')
84 ui.status(m % (srcauthor, authors[srcauthor], dstauthor))
84 ui.status(m % (srcauthor, authors[srcauthor], dstauthor))
85 return authors
85 return authors
86
86
87
87
88 def recode(s):
88 def recode(s):
89 if isinstance(s, pycompat.unicode):
89 if isinstance(s, pycompat.unicode):
90 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
90 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
91 else:
91 else:
92 return s.decode('utf-8').encode(
92 return s.decode('utf-8').encode(
93 pycompat.sysstr(orig_encoding), 'replace'
93 pycompat.sysstr(orig_encoding), 'replace'
94 )
94 )
95
95
96
96
97 def mapbranch(branch, branchmap):
97 def mapbranch(branch, branchmap):
98 """
98 """
99 >>> bmap = {b'default': b'branch1'}
99 >>> bmap = {b'default': b'branch1'}
100 >>> for i in [b'', None]:
100 >>> for i in [b'', None]:
101 ... mapbranch(i, bmap)
101 ... mapbranch(i, bmap)
102 'branch1'
102 'branch1'
103 'branch1'
103 'branch1'
104 >>> bmap = {b'None': b'branch2'}
104 >>> bmap = {b'None': b'branch2'}
105 >>> for i in [b'', None]:
105 >>> for i in [b'', None]:
106 ... mapbranch(i, bmap)
106 ... mapbranch(i, bmap)
107 'branch2'
107 'branch2'
108 'branch2'
108 'branch2'
109 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
109 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
110 >>> for i in [b'None', b'', None, b'default', b'branch5']:
110 >>> for i in [b'None', b'', None, b'default', b'branch5']:
111 ... mapbranch(i, bmap)
111 ... mapbranch(i, bmap)
112 'branch3'
112 'branch3'
113 'branch4'
113 'branch4'
114 'branch4'
114 'branch4'
115 'branch4'
115 'branch4'
116 'branch5'
116 'branch5'
117 """
117 """
118 # If branch is None or empty, this commit is coming from the source
118 # If branch is None or empty, this commit is coming from the source
119 # repository's default branch and destined for the default branch in the
119 # repository's default branch and destined for the default branch in the
120 # destination repository. For such commits, using a literal "default"
120 # destination repository. For such commits, using a literal "default"
121 # in branchmap below allows the user to map "default" to an alternate
121 # in branchmap below allows the user to map "default" to an alternate
122 # default branch in the destination repository.
122 # default branch in the destination repository.
123 branch = branchmap.get(branch or b'default', branch)
123 branch = branchmap.get(branch or b'default', branch)
124 # At some point we used "None" literal to denote the default branch,
124 # At some point we used "None" literal to denote the default branch,
125 # attempt to use that for backward compatibility.
125 # attempt to use that for backward compatibility.
126 if not branch:
126 if not branch:
127 branch = branchmap.get(b'None', branch)
127 branch = branchmap.get(b'None', branch)
128 return branch
128 return branch
129
129
130
130
131 source_converters = [
131 source_converters = [
132 (b'cvs', convert_cvs, b'branchsort'),
132 (b'cvs', convert_cvs, b'branchsort'),
133 (b'git', convert_git, b'branchsort'),
133 (b'git', convert_git, b'branchsort'),
134 (b'svn', svn_source, b'branchsort'),
134 (b'svn', svn_source, b'branchsort'),
135 (b'hg', mercurial_source, b'sourcesort'),
135 (b'hg', mercurial_source, b'sourcesort'),
136 (b'darcs', darcs_source, b'branchsort'),
136 (b'darcs', darcs_source, b'branchsort'),
137 (b'mtn', monotone_source, b'branchsort'),
137 (b'mtn', monotone_source, b'branchsort'),
138 (b'gnuarch', gnuarch_source, b'branchsort'),
138 (b'gnuarch', gnuarch_source, b'branchsort'),
139 (b'bzr', bzr_source, b'branchsort'),
139 (b'bzr', bzr_source, b'branchsort'),
140 (b'p4', p4_source, b'branchsort'),
140 (b'p4', p4_source, b'branchsort'),
141 ]
141 ]
142
142
143 sink_converters = [
143 sink_converters = [
144 (b'hg', mercurial_sink),
144 (b'hg', mercurial_sink),
145 (b'svn', svn_sink),
145 (b'svn', svn_sink),
146 ]
146 ]
147
147
148
148
149 def convertsource(ui, path, type, revs):
149 def convertsource(ui, path, type, revs):
150 exceptions = []
150 exceptions = []
151 if type and type not in [s[0] for s in source_converters]:
151 if type and type not in [s[0] for s in source_converters]:
152 raise error.Abort(_(b'%s: invalid source repository type') % type)
152 raise error.Abort(_(b'%s: invalid source repository type') % type)
153 for name, source, sortmode in source_converters:
153 for name, source, sortmode in source_converters:
154 try:
154 try:
155 if not type or name == type:
155 if not type or name == type:
156 return source(ui, name, path, revs), sortmode
156 return source(ui, name, path, revs), sortmode
157 except (NoRepo, MissingTool) as inst:
157 except (NoRepo, MissingTool) as inst:
158 exceptions.append(inst)
158 exceptions.append(inst)
159 if not ui.quiet:
159 if not ui.quiet:
160 for inst in exceptions:
160 for inst in exceptions:
161 ui.write(b"%s\n" % pycompat.bytestr(inst.args[0]))
161 ui.write(b"%s\n" % pycompat.bytestr(inst.args[0]))
162 raise error.Abort(_(b'%s: missing or unsupported repository') % path)
162 raise error.Abort(_(b'%s: missing or unsupported repository') % path)
163
163
164
164
165 def convertsink(ui, path, type):
165 def convertsink(ui, path, type):
166 if type and type not in [s[0] for s in sink_converters]:
166 if type and type not in [s[0] for s in sink_converters]:
167 raise error.Abort(_(b'%s: invalid destination repository type') % type)
167 raise error.Abort(_(b'%s: invalid destination repository type') % type)
168 for name, sink in sink_converters:
168 for name, sink in sink_converters:
169 try:
169 try:
170 if not type or name == type:
170 if not type or name == type:
171 return sink(ui, name, path)
171 return sink(ui, name, path)
172 except NoRepo as inst:
172 except NoRepo as inst:
173 ui.note(_(b"convert: %s\n") % inst)
173 ui.note(_(b"convert: %s\n") % inst)
174 except MissingTool as inst:
174 except MissingTool as inst:
175 raise error.Abort(b'%s\n' % inst)
175 raise error.Abort(b'%s\n' % inst)
176 raise error.Abort(_(b'%s: unknown repository type') % path)
176 raise error.Abort(_(b'%s: unknown repository type') % path)
177
177
178
178
179 class progresssource(object):
179 class progresssource(object):
180 def __init__(self, ui, source, filecount):
180 def __init__(self, ui, source, filecount):
181 self.ui = ui
181 self.ui = ui
182 self.source = source
182 self.source = source
183 self.progress = ui.makeprogress(
183 self.progress = ui.makeprogress(
184 _(b'getting files'), unit=_(b'files'), total=filecount
184 _(b'getting files'), unit=_(b'files'), total=filecount
185 )
185 )
186
186
187 def getfile(self, file, rev):
187 def getfile(self, file, rev):
188 self.progress.increment(item=file)
188 self.progress.increment(item=file)
189 return self.source.getfile(file, rev)
189 return self.source.getfile(file, rev)
190
190
191 def targetfilebelongstosource(self, targetfilename):
191 def targetfilebelongstosource(self, targetfilename):
192 return self.source.targetfilebelongstosource(targetfilename)
192 return self.source.targetfilebelongstosource(targetfilename)
193
193
194 def lookuprev(self, rev):
194 def lookuprev(self, rev):
195 return self.source.lookuprev(rev)
195 return self.source.lookuprev(rev)
196
196
197 def close(self):
197 def close(self):
198 self.progress.complete()
198 self.progress.complete()
199
199
200
200
201 class converter(object):
201 class converter(object):
202 def __init__(self, ui, source, dest, revmapfile, opts):
202 def __init__(self, ui, source, dest, revmapfile, opts):
203
203
204 self.source = source
204 self.source = source
205 self.dest = dest
205 self.dest = dest
206 self.ui = ui
206 self.ui = ui
207 self.opts = opts
207 self.opts = opts
208 self.commitcache = {}
208 self.commitcache = {}
209 self.authors = {}
209 self.authors = {}
210 self.authorfile = None
210 self.authorfile = None
211
211
212 # Record converted revisions persistently: maps source revision
212 # Record converted revisions persistently: maps source revision
213 # ID to target revision ID (both strings). (This is how
213 # ID to target revision ID (both strings). (This is how
214 # incremental conversions work.)
214 # incremental conversions work.)
215 self.map = mapfile(ui, revmapfile)
215 self.map = mapfile(ui, revmapfile)
216
216
217 # Read first the dst author map if any
217 # Read first the dst author map if any
218 authorfile = self.dest.authorfile()
218 authorfile = self.dest.authorfile()
219 if authorfile and os.path.exists(authorfile):
219 if authorfile and os.path.exists(authorfile):
220 self.readauthormap(authorfile)
220 self.readauthormap(authorfile)
221 # Extend/Override with new author map if necessary
221 # Extend/Override with new author map if necessary
222 if opts.get(b'authormap'):
222 if opts.get(b'authormap'):
223 self.readauthormap(opts.get(b'authormap'))
223 self.readauthormap(opts.get(b'authormap'))
224 self.authorfile = self.dest.authorfile()
224 self.authorfile = self.dest.authorfile()
225
225
226 self.splicemap = self.parsesplicemap(opts.get(b'splicemap'))
226 self.splicemap = self.parsesplicemap(opts.get(b'splicemap'))
227 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
227 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
228
228
229 def parsesplicemap(self, path):
229 def parsesplicemap(self, path):
230 """check and validate the splicemap format and
230 """check and validate the splicemap format and
231 return a child/parents dictionary.
231 return a child/parents dictionary.
232 Format checking has two parts.
232 Format checking has two parts.
233 1. generic format which is same across all source types
233 1. generic format which is same across all source types
234 2. specific format checking which may be different for
234 2. specific format checking which may be different for
235 different source type. This logic is implemented in
235 different source type. This logic is implemented in
236 checkrevformat function in source files like
236 checkrevformat function in source files like
237 hg.py, subversion.py etc.
237 hg.py, subversion.py etc.
238 """
238 """
239
239
240 if not path:
240 if not path:
241 return {}
241 return {}
242 m = {}
242 m = {}
243 try:
243 try:
244 fp = open(path, b'rb')
244 fp = open(path, b'rb')
245 for i, line in enumerate(util.iterfile(fp)):
245 for i, line in enumerate(util.iterfile(fp)):
246 line = line.splitlines()[0].rstrip()
246 line = line.splitlines()[0].rstrip()
247 if not line:
247 if not line:
248 # Ignore blank lines
248 # Ignore blank lines
249 continue
249 continue
250 # split line
250 # split line
251 lex = common.shlexer(data=line, whitespace=b',')
251 lex = common.shlexer(data=line, whitespace=b',')
252 line = list(lex)
252 line = list(lex)
253 # check number of parents
253 # check number of parents
254 if not (2 <= len(line) <= 3):
254 if not (2 <= len(line) <= 3):
255 raise error.Abort(
255 raise error.Abort(
256 _(
256 _(
257 b'syntax error in %s(%d): child parent1'
257 b'syntax error in %s(%d): child parent1'
258 b'[,parent2] expected'
258 b'[,parent2] expected'
259 )
259 )
260 % (path, i + 1)
260 % (path, i + 1)
261 )
261 )
262 for part in line:
262 for part in line:
263 self.source.checkrevformat(part)
263 self.source.checkrevformat(part)
264 child, p1, p2 = line[0], line[1:2], line[2:]
264 child, p1, p2 = line[0], line[1:2], line[2:]
265 if p1 == p2:
265 if p1 == p2:
266 m[child] = p1
266 m[child] = p1
267 else:
267 else:
268 m[child] = p1 + p2
268 m[child] = p1 + p2
269 # if file does not exist or error reading, exit
269 # if file does not exist or error reading, exit
270 except IOError:
270 except IOError:
271 raise error.Abort(
271 raise error.Abort(
272 _(b'splicemap file not found or error reading %s:') % path
272 _(b'splicemap file not found or error reading %s:') % path
273 )
273 )
274 return m
274 return m
275
275
276 def walktree(self, heads):
276 def walktree(self, heads):
277 """Return a mapping that identifies the uncommitted parents of every
277 """Return a mapping that identifies the uncommitted parents of every
278 uncommitted changeset."""
278 uncommitted changeset."""
279 visit = list(heads)
279 visit = list(heads)
280 known = set()
280 known = set()
281 parents = {}
281 parents = {}
282 numcommits = self.source.numcommits()
282 numcommits = self.source.numcommits()
283 progress = self.ui.makeprogress(
283 progress = self.ui.makeprogress(
284 _(b'scanning'), unit=_(b'revisions'), total=numcommits
284 _(b'scanning'), unit=_(b'revisions'), total=numcommits
285 )
285 )
286 while visit:
286 while visit:
287 n = visit.pop(0)
287 n = visit.pop(0)
288 if n in known:
288 if n in known:
289 continue
289 continue
290 if n in self.map:
290 if n in self.map:
291 m = self.map[n]
291 m = self.map[n]
292 if m == SKIPREV or self.dest.hascommitfrommap(m):
292 if m == SKIPREV or self.dest.hascommitfrommap(m):
293 continue
293 continue
294 known.add(n)
294 known.add(n)
295 progress.update(len(known))
295 progress.update(len(known))
296 commit = self.cachecommit(n)
296 commit = self.cachecommit(n)
297 parents[n] = []
297 parents[n] = []
298 for p in commit.parents:
298 for p in commit.parents:
299 parents[n].append(p)
299 parents[n].append(p)
300 visit.append(p)
300 visit.append(p)
301 progress.complete()
301 progress.complete()
302
302
303 return parents
303 return parents
304
304
305 def mergesplicemap(self, parents, splicemap):
305 def mergesplicemap(self, parents, splicemap):
306 """A splicemap redefines child/parent relationships. Check the
306 """A splicemap redefines child/parent relationships. Check the
307 map contains valid revision identifiers and merge the new
307 map contains valid revision identifiers and merge the new
308 links in the source graph.
308 links in the source graph.
309 """
309 """
310 for c in sorted(splicemap):
310 for c in sorted(splicemap):
311 if c not in parents:
311 if c not in parents:
312 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
312 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
313 # Could be in source but not converted during this run
313 # Could be in source but not converted during this run
314 self.ui.warn(
314 self.ui.warn(
315 _(
315 _(
316 b'splice map revision %s is not being '
316 b'splice map revision %s is not being '
317 b'converted, ignoring\n'
317 b'converted, ignoring\n'
318 )
318 )
319 % c
319 % c
320 )
320 )
321 continue
321 continue
322 pc = []
322 pc = []
323 for p in splicemap[c]:
323 for p in splicemap[c]:
324 # We do not have to wait for nodes already in dest.
324 # We do not have to wait for nodes already in dest.
325 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
325 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
326 continue
326 continue
327 # Parent is not in dest and not being converted, not good
327 # Parent is not in dest and not being converted, not good
328 if p not in parents:
328 if p not in parents:
329 raise error.Abort(_(b'unknown splice map parent: %s') % p)
329 raise error.Abort(_(b'unknown splice map parent: %s') % p)
330 pc.append(p)
330 pc.append(p)
331 parents[c] = pc
331 parents[c] = pc
332
332
333 def toposort(self, parents, sortmode):
333 def toposort(self, parents, sortmode):
334 """Return an ordering such that every uncommitted changeset is
334 """Return an ordering such that every uncommitted changeset is
335 preceded by all its uncommitted ancestors."""
335 preceded by all its uncommitted ancestors."""
336
336
337 def mapchildren(parents):
337 def mapchildren(parents):
338 """Return a (children, roots) tuple where 'children' maps parent
338 """Return a (children, roots) tuple where 'children' maps parent
339 revision identifiers to children ones, and 'roots' is the list of
339 revision identifiers to children ones, and 'roots' is the list of
340 revisions without parents. 'parents' must be a mapping of revision
340 revisions without parents. 'parents' must be a mapping of revision
341 identifier to its parents ones.
341 identifier to its parents ones.
342 """
342 """
343 visit = collections.deque(sorted(parents))
343 visit = collections.deque(sorted(parents))
344 seen = set()
344 seen = set()
345 children = {}
345 children = {}
346 roots = []
346 roots = []
347
347
348 while visit:
348 while visit:
349 n = visit.popleft()
349 n = visit.popleft()
350 if n in seen:
350 if n in seen:
351 continue
351 continue
352 seen.add(n)
352 seen.add(n)
353 # Ensure that nodes without parents are present in the
353 # Ensure that nodes without parents are present in the
354 # 'children' mapping.
354 # 'children' mapping.
355 children.setdefault(n, [])
355 children.setdefault(n, [])
356 hasparent = False
356 hasparent = False
357 for p in parents[n]:
357 for p in parents[n]:
358 if p not in self.map:
358 if p not in self.map:
359 visit.append(p)
359 visit.append(p)
360 hasparent = True
360 hasparent = True
361 children.setdefault(p, []).append(n)
361 children.setdefault(p, []).append(n)
362 if not hasparent:
362 if not hasparent:
363 roots.append(n)
363 roots.append(n)
364
364
365 return children, roots
365 return children, roots
366
366
367 # Sort functions are supposed to take a list of revisions which
367 # Sort functions are supposed to take a list of revisions which
368 # can be converted immediately and pick one
368 # can be converted immediately and pick one
369
369
370 def makebranchsorter():
370 def makebranchsorter():
371 """If the previously converted revision has a child in the
371 """If the previously converted revision has a child in the
372 eligible revisions list, pick it. Return the list head
372 eligible revisions list, pick it. Return the list head
373 otherwise. Branch sort attempts to minimize branch
373 otherwise. Branch sort attempts to minimize branch
374 switching, which is harmful for Mercurial backend
374 switching, which is harmful for Mercurial backend
375 compression.
375 compression.
376 """
376 """
377 prev = [None]
377 prev = [None]
378
378
379 def picknext(nodes):
379 def picknext(nodes):
380 next = nodes[0]
380 next = nodes[0]
381 for n in nodes:
381 for n in nodes:
382 if prev[0] in parents[n]:
382 if prev[0] in parents[n]:
383 next = n
383 next = n
384 break
384 break
385 prev[0] = next
385 prev[0] = next
386 return next
386 return next
387
387
388 return picknext
388 return picknext
389
389
390 def makesourcesorter():
390 def makesourcesorter():
391 """Source specific sort."""
391 """Source specific sort."""
392 keyfn = lambda n: self.commitcache[n].sortkey
392 keyfn = lambda n: self.commitcache[n].sortkey
393
393
394 def picknext(nodes):
394 def picknext(nodes):
395 return sorted(nodes, key=keyfn)[0]
395 return sorted(nodes, key=keyfn)[0]
396
396
397 return picknext
397 return picknext
398
398
399 def makeclosesorter():
399 def makeclosesorter():
400 """Close order sort."""
400 """Close order sort."""
401 keyfn = lambda n: (
401 keyfn = lambda n: (
402 b'close' not in self.commitcache[n].extra,
402 b'close' not in self.commitcache[n].extra,
403 self.commitcache[n].sortkey,
403 self.commitcache[n].sortkey,
404 )
404 )
405
405
406 def picknext(nodes):
406 def picknext(nodes):
407 return sorted(nodes, key=keyfn)[0]
407 return sorted(nodes, key=keyfn)[0]
408
408
409 return picknext
409 return picknext
410
410
411 def makedatesorter():
411 def makedatesorter():
412 """Sort revisions by date."""
412 """Sort revisions by date."""
413 dates = {}
413 dates = {}
414
414
415 def getdate(n):
415 def getdate(n):
416 if n not in dates:
416 if n not in dates:
417 dates[n] = dateutil.parsedate(self.commitcache[n].date)
417 dates[n] = dateutil.parsedate(self.commitcache[n].date)
418 return dates[n]
418 return dates[n]
419
419
420 def picknext(nodes):
420 def picknext(nodes):
421 return min([(getdate(n), n) for n in nodes])[1]
421 return min([(getdate(n), n) for n in nodes])[1]
422
422
423 return picknext
423 return picknext
424
424
425 if sortmode == b'branchsort':
425 if sortmode == b'branchsort':
426 picknext = makebranchsorter()
426 picknext = makebranchsorter()
427 elif sortmode == b'datesort':
427 elif sortmode == b'datesort':
428 picknext = makedatesorter()
428 picknext = makedatesorter()
429 elif sortmode == b'sourcesort':
429 elif sortmode == b'sourcesort':
430 picknext = makesourcesorter()
430 picknext = makesourcesorter()
431 elif sortmode == b'closesort':
431 elif sortmode == b'closesort':
432 picknext = makeclosesorter()
432 picknext = makeclosesorter()
433 else:
433 else:
434 raise error.Abort(_(b'unknown sort mode: %s') % sortmode)
434 raise error.Abort(_(b'unknown sort mode: %s') % sortmode)
435
435
436 children, actives = mapchildren(parents)
436 children, actives = mapchildren(parents)
437
437
438 s = []
438 s = []
439 pendings = {}
439 pendings = {}
440 while actives:
440 while actives:
441 n = picknext(actives)
441 n = picknext(actives)
442 actives.remove(n)
442 actives.remove(n)
443 s.append(n)
443 s.append(n)
444
444
445 # Update dependents list
445 # Update dependents list
446 for c in children.get(n, []):
446 for c in children.get(n, []):
447 if c not in pendings:
447 if c not in pendings:
448 pendings[c] = [p for p in parents[c] if p not in self.map]
448 pendings[c] = [p for p in parents[c] if p not in self.map]
449 try:
449 try:
450 pendings[c].remove(n)
450 pendings[c].remove(n)
451 except ValueError:
451 except ValueError:
452 raise error.Abort(
452 raise error.Abort(
453 _(b'cycle detected between %s and %s')
453 _(b'cycle detected between %s and %s')
454 % (recode(c), recode(n))
454 % (recode(c), recode(n))
455 )
455 )
456 if not pendings[c]:
456 if not pendings[c]:
457 # Parents are converted, node is eligible
457 # Parents are converted, node is eligible
458 actives.insert(0, c)
458 actives.insert(0, c)
459 pendings[c] = None
459 pendings[c] = None
460
460
461 if len(s) != len(parents):
461 if len(s) != len(parents):
462 raise error.Abort(_(b"not all revisions were sorted"))
462 raise error.Abort(_(b"not all revisions were sorted"))
463
463
464 return s
464 return s
465
465
466 def writeauthormap(self):
466 def writeauthormap(self):
467 authorfile = self.authorfile
467 authorfile = self.authorfile
468 if authorfile:
468 if authorfile:
469 self.ui.status(_(b'writing author map file %s\n') % authorfile)
469 self.ui.status(_(b'writing author map file %s\n') % authorfile)
470 ofile = open(authorfile, b'wb+')
470 ofile = open(authorfile, b'wb+')
471 for author in self.authors:
471 for author in self.authors:
472 ofile.write(
472 ofile.write(
473 util.tonativeeol(
473 util.tonativeeol(
474 b"%s=%s\n" % (author, self.authors[author])
474 b"%s=%s\n" % (author, self.authors[author])
475 )
475 )
476 )
476 )
477 ofile.close()
477 ofile.close()
478
478
479 def readauthormap(self, authorfile):
479 def readauthormap(self, authorfile):
480 self.authors = readauthormap(self.ui, authorfile, self.authors)
480 self.authors = readauthormap(self.ui, authorfile, self.authors)
481
481
482 def cachecommit(self, rev):
482 def cachecommit(self, rev):
483 commit = self.source.getcommit(rev)
483 commit = self.source.getcommit(rev)
484 commit.author = self.authors.get(commit.author, commit.author)
484 commit.author = self.authors.get(commit.author, commit.author)
485 commit.branch = mapbranch(commit.branch, self.branchmap)
485 commit.branch = mapbranch(commit.branch, self.branchmap)
486 self.commitcache[rev] = commit
486 self.commitcache[rev] = commit
487 return commit
487 return commit
488
488
489 def copy(self, rev):
489 def copy(self, rev):
490 commit = self.commitcache[rev]
490 commit = self.commitcache[rev]
491 full = self.opts.get(b'full')
491 full = self.opts.get(b'full')
492 changes = self.source.getchanges(rev, full)
492 changes = self.source.getchanges(rev, full)
493 if isinstance(changes, bytes):
493 if isinstance(changes, bytes):
494 if changes == SKIPREV:
494 if changes == SKIPREV:
495 dest = SKIPREV
495 dest = SKIPREV
496 else:
496 else:
497 dest = self.map[changes]
497 dest = self.map[changes]
498 self.map[rev] = dest
498 self.map[rev] = dest
499 return
499 return
500 files, copies, cleanp2 = changes
500 files, copies, cleanp2 = changes
501 pbranches = []
501 pbranches = []
502 if commit.parents:
502 if commit.parents:
503 for prev in commit.parents:
503 for prev in commit.parents:
504 if prev not in self.commitcache:
504 if prev not in self.commitcache:
505 self.cachecommit(prev)
505 self.cachecommit(prev)
506 pbranches.append(
506 pbranches.append(
507 (self.map[prev], self.commitcache[prev].branch)
507 (self.map[prev], self.commitcache[prev].branch)
508 )
508 )
509 self.dest.setbranch(commit.branch, pbranches)
509 self.dest.setbranch(commit.branch, pbranches)
510 try:
510 try:
511 parents = self.splicemap[rev]
511 parents = self.splicemap[rev]
512 self.ui.status(
512 self.ui.status(
513 _(b'spliced in %s as parents of %s\n')
513 _(b'spliced in %s as parents of %s\n')
514 % (_(b' and ').join(parents), rev)
514 % (_(b' and ').join(parents), rev)
515 )
515 )
516 parents = [self.map.get(p, p) for p in parents]
516 parents = [self.map.get(p, p) for p in parents]
517 except KeyError:
517 except KeyError:
518 parents = [b[0] for b in pbranches]
518 parents = [b[0] for b in pbranches]
519 parents.extend(
519 parents.extend(
520 self.map[x] for x in commit.optparents if x in self.map
520 self.map[x] for x in commit.optparents if x in self.map
521 )
521 )
522 if len(pbranches) != 2:
522 if len(pbranches) != 2:
523 cleanp2 = set()
523 cleanp2 = set()
524 if len(parents) < 3:
524 if len(parents) < 3:
525 source = progresssource(self.ui, self.source, len(files))
525 source = progresssource(self.ui, self.source, len(files))
526 else:
526 else:
527 # For an octopus merge, we end up traversing the list of
527 # For an octopus merge, we end up traversing the list of
528 # changed files N-1 times. This tweak to the number of
528 # changed files N-1 times. This tweak to the number of
529 # files makes it so the progress bar doesn't overflow
529 # files makes it so the progress bar doesn't overflow
530 # itself.
530 # itself.
531 source = progresssource(
531 source = progresssource(
532 self.ui, self.source, len(files) * (len(parents) - 1)
532 self.ui, self.source, len(files) * (len(parents) - 1)
533 )
533 )
534 newnode = self.dest.putcommit(
534 newnode = self.dest.putcommit(
535 files, copies, parents, commit, source, self.map, full, cleanp2
535 files, copies, parents, commit, source, self.map, full, cleanp2
536 )
536 )
537 source.close()
537 source.close()
538 self.source.converted(rev, newnode)
538 self.source.converted(rev, newnode)
539 self.map[rev] = newnode
539 self.map[rev] = newnode
540
540
541 def convert(self, sortmode):
541 def convert(self, sortmode):
542 try:
542 try:
543 self.source.before()
543 self.source.before()
544 self.dest.before()
544 self.dest.before()
545 self.source.setrevmap(self.map)
545 self.source.setrevmap(self.map)
546 self.ui.status(_(b"scanning source...\n"))
546 self.ui.status(_(b"scanning source...\n"))
547 heads = self.source.getheads()
547 heads = self.source.getheads()
548 parents = self.walktree(heads)
548 parents = self.walktree(heads)
549 self.mergesplicemap(parents, self.splicemap)
549 self.mergesplicemap(parents, self.splicemap)
550 self.ui.status(_(b"sorting...\n"))
550 self.ui.status(_(b"sorting...\n"))
551 t = self.toposort(parents, sortmode)
551 t = self.toposort(parents, sortmode)
552 num = len(t)
552 num = len(t)
553 c = None
553 c = None
554
554
555 self.ui.status(_(b"converting...\n"))
555 self.ui.status(_(b"converting...\n"))
556 progress = self.ui.makeprogress(
556 progress = self.ui.makeprogress(
557 _(b'converting'), unit=_(b'revisions'), total=len(t)
557 _(b'converting'), unit=_(b'revisions'), total=len(t)
558 )
558 )
559 for i, c in enumerate(t):
559 for i, c in enumerate(t):
560 num -= 1
560 num -= 1
561 desc = self.commitcache[c].desc
561 desc = self.commitcache[c].desc
562 if b"\n" in desc:
562 if b"\n" in desc:
563 desc = desc.splitlines()[0]
563 desc = desc.splitlines()[0]
564 # convert log message to local encoding without using
564 # convert log message to local encoding without using
565 # tolocal() because the encoding.encoding convert()
565 # tolocal() because the encoding.encoding convert()
566 # uses is 'utf-8'
566 # uses is 'utf-8'
567 self.ui.status(b"%d %s\n" % (num, recode(desc)))
567 self.ui.status(b"%d %s\n" % (num, recode(desc)))
568 self.ui.note(_(b"source: %s\n") % recode(c))
568 self.ui.note(_(b"source: %s\n") % recode(c))
569 progress.update(i)
569 progress.update(i)
570 self.copy(c)
570 self.copy(c)
571 progress.complete()
571 progress.complete()
572
572
573 if not self.ui.configbool(b'convert', b'skiptags'):
573 if not self.ui.configbool(b'convert', b'skiptags'):
574 tags = self.source.gettags()
574 tags = self.source.gettags()
575 ctags = {}
575 ctags = {}
576 for k in tags:
576 for k in tags:
577 v = tags[k]
577 v = tags[k]
578 if self.map.get(v, SKIPREV) != SKIPREV:
578 if self.map.get(v, SKIPREV) != SKIPREV:
579 ctags[k] = self.map[v]
579 ctags[k] = self.map[v]
580
580
581 if c and ctags:
581 if c and ctags:
582 nrev, tagsparent = self.dest.puttags(ctags)
582 nrev, tagsparent = self.dest.puttags(ctags)
583 if nrev and tagsparent:
583 if nrev and tagsparent:
584 # write another hash correspondence to override the
584 # write another hash correspondence to override the
585 # previous one so we don't end up with extra tag heads
585 # previous one so we don't end up with extra tag heads
586 tagsparents = [
586 tagsparents = [
587 e
587 e for e in self.map.items() if e[1] == tagsparent
588 for e in pycompat.iteritems(self.map)
589 if e[1] == tagsparent
590 ]
588 ]
591 if tagsparents:
589 if tagsparents:
592 self.map[tagsparents[0][0]] = nrev
590 self.map[tagsparents[0][0]] = nrev
593
591
594 bookmarks = self.source.getbookmarks()
592 bookmarks = self.source.getbookmarks()
595 cbookmarks = {}
593 cbookmarks = {}
596 for k in bookmarks:
594 for k in bookmarks:
597 v = bookmarks[k]
595 v = bookmarks[k]
598 if self.map.get(v, SKIPREV) != SKIPREV:
596 if self.map.get(v, SKIPREV) != SKIPREV:
599 cbookmarks[k] = self.map[v]
597 cbookmarks[k] = self.map[v]
600
598
601 if c and cbookmarks:
599 if c and cbookmarks:
602 self.dest.putbookmarks(cbookmarks)
600 self.dest.putbookmarks(cbookmarks)
603
601
604 self.writeauthormap()
602 self.writeauthormap()
605 finally:
603 finally:
606 self.cleanup()
604 self.cleanup()
607
605
608 def cleanup(self):
606 def cleanup(self):
609 try:
607 try:
610 self.dest.after()
608 self.dest.after()
611 finally:
609 finally:
612 self.source.after()
610 self.source.after()
613 self.map.close()
611 self.map.close()
614
612
615
613
616 def convert(ui, src, dest=None, revmapfile=None, **opts):
614 def convert(ui, src, dest=None, revmapfile=None, **opts):
617 opts = pycompat.byteskwargs(opts)
615 opts = pycompat.byteskwargs(opts)
618 global orig_encoding
616 global orig_encoding
619 orig_encoding = encoding.encoding
617 orig_encoding = encoding.encoding
620 encoding.encoding = b'UTF-8'
618 encoding.encoding = b'UTF-8'
621
619
622 # support --authors as an alias for --authormap
620 # support --authors as an alias for --authormap
623 if not opts.get(b'authormap'):
621 if not opts.get(b'authormap'):
624 opts[b'authormap'] = opts.get(b'authors')
622 opts[b'authormap'] = opts.get(b'authors')
625
623
626 if not dest:
624 if not dest:
627 dest = hg.defaultdest(src) + b"-hg"
625 dest = hg.defaultdest(src) + b"-hg"
628 ui.status(_(b"assuming destination %s\n") % dest)
626 ui.status(_(b"assuming destination %s\n") % dest)
629
627
630 destc = convertsink(ui, dest, opts.get(b'dest_type'))
628 destc = convertsink(ui, dest, opts.get(b'dest_type'))
631 destc = scmutil.wrapconvertsink(destc)
629 destc = scmutil.wrapconvertsink(destc)
632
630
633 try:
631 try:
634 srcc, defaultsort = convertsource(
632 srcc, defaultsort = convertsource(
635 ui, src, opts.get(b'source_type'), opts.get(b'rev')
633 ui, src, opts.get(b'source_type'), opts.get(b'rev')
636 )
634 )
637 except Exception:
635 except Exception:
638 for path in destc.created:
636 for path in destc.created:
639 shutil.rmtree(path, True)
637 shutil.rmtree(path, True)
640 raise
638 raise
641
639
642 sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort')
640 sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort')
643 sortmode = [m for m in sortmodes if opts.get(m)]
641 sortmode = [m for m in sortmodes if opts.get(m)]
644 if len(sortmode) > 1:
642 if len(sortmode) > 1:
645 raise error.Abort(_(b'more than one sort mode specified'))
643 raise error.Abort(_(b'more than one sort mode specified'))
646 if sortmode:
644 if sortmode:
647 sortmode = sortmode[0]
645 sortmode = sortmode[0]
648 else:
646 else:
649 sortmode = defaultsort
647 sortmode = defaultsort
650
648
651 if sortmode == b'sourcesort' and not srcc.hasnativeorder():
649 if sortmode == b'sourcesort' and not srcc.hasnativeorder():
652 raise error.Abort(
650 raise error.Abort(
653 _(b'--sourcesort is not supported by this data source')
651 _(b'--sourcesort is not supported by this data source')
654 )
652 )
655 if sortmode == b'closesort' and not srcc.hasnativeclose():
653 if sortmode == b'closesort' and not srcc.hasnativeclose():
656 raise error.Abort(
654 raise error.Abort(
657 _(b'--closesort is not supported by this data source')
655 _(b'--closesort is not supported by this data source')
658 )
656 )
659
657
660 fmap = opts.get(b'filemap')
658 fmap = opts.get(b'filemap')
661 if fmap:
659 if fmap:
662 srcc = filemap.filemap_source(ui, srcc, fmap)
660 srcc = filemap.filemap_source(ui, srcc, fmap)
663 destc.setfilemapmode(True)
661 destc.setfilemapmode(True)
664
662
665 if not revmapfile:
663 if not revmapfile:
666 revmapfile = destc.revmapfile()
664 revmapfile = destc.revmapfile()
667
665
668 c = converter(ui, srcc, destc, revmapfile, opts)
666 c = converter(ui, srcc, destc, revmapfile, opts)
669 c.convert(sortmode)
667 c.convert(sortmode)
@@ -1,1068 +1,1068 b''
1 # Mercurial built-in replacement for cvsps.
1 # Mercurial built-in replacement for cvsps.
2 #
2 #
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import functools
8 import functools
9 import os
9 import os
10 import pickle
10 import pickle
11 import re
11 import re
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.pycompat import open
14 from mercurial.pycompat import open
15 from mercurial import (
15 from mercurial import (
16 encoding,
16 encoding,
17 error,
17 error,
18 hook,
18 hook,
19 pycompat,
19 pycompat,
20 util,
20 util,
21 )
21 )
22 from mercurial.utils import (
22 from mercurial.utils import (
23 dateutil,
23 dateutil,
24 procutil,
24 procutil,
25 stringutil,
25 stringutil,
26 )
26 )
27
27
28
28
29 class logentry(object):
29 class logentry(object):
30 """Class logentry has the following attributes:
30 """Class logentry has the following attributes:
31 .author - author name as CVS knows it
31 .author - author name as CVS knows it
32 .branch - name of branch this revision is on
32 .branch - name of branch this revision is on
33 .branches - revision tuple of branches starting at this revision
33 .branches - revision tuple of branches starting at this revision
34 .comment - commit message
34 .comment - commit message
35 .commitid - CVS commitid or None
35 .commitid - CVS commitid or None
36 .date - the commit date as a (time, tz) tuple
36 .date - the commit date as a (time, tz) tuple
37 .dead - true if file revision is dead
37 .dead - true if file revision is dead
38 .file - Name of file
38 .file - Name of file
39 .lines - a tuple (+lines, -lines) or None
39 .lines - a tuple (+lines, -lines) or None
40 .parent - Previous revision of this entry
40 .parent - Previous revision of this entry
41 .rcs - name of file as returned from CVS
41 .rcs - name of file as returned from CVS
42 .revision - revision number as tuple
42 .revision - revision number as tuple
43 .tags - list of tags on the file
43 .tags - list of tags on the file
44 .synthetic - is this a synthetic "file ... added on ..." revision?
44 .synthetic - is this a synthetic "file ... added on ..." revision?
45 .mergepoint - the branch that has been merged from (if present in
45 .mergepoint - the branch that has been merged from (if present in
46 rlog output) or None
46 rlog output) or None
47 .branchpoints - the branches that start at the current entry or empty
47 .branchpoints - the branches that start at the current entry or empty
48 """
48 """
49
49
50 def __init__(self, **entries):
50 def __init__(self, **entries):
51 self.synthetic = False
51 self.synthetic = False
52 self.__dict__.update(entries)
52 self.__dict__.update(entries)
53
53
54 def __repr__(self):
54 def __repr__(self):
55 items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
55 items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
56 return "%s(%s)" % (type(self).__name__, ", ".join(items))
56 return "%s(%s)" % (type(self).__name__, ", ".join(items))
57
57
58
58
59 class logerror(Exception):
59 class logerror(Exception):
60 pass
60 pass
61
61
62
62
63 def getrepopath(cvspath):
63 def getrepopath(cvspath):
64 """Return the repository path from a CVS path.
64 """Return the repository path from a CVS path.
65
65
66 >>> getrepopath(b'/foo/bar')
66 >>> getrepopath(b'/foo/bar')
67 '/foo/bar'
67 '/foo/bar'
68 >>> getrepopath(b'c:/foo/bar')
68 >>> getrepopath(b'c:/foo/bar')
69 '/foo/bar'
69 '/foo/bar'
70 >>> getrepopath(b':pserver:10/foo/bar')
70 >>> getrepopath(b':pserver:10/foo/bar')
71 '/foo/bar'
71 '/foo/bar'
72 >>> getrepopath(b':pserver:10c:/foo/bar')
72 >>> getrepopath(b':pserver:10c:/foo/bar')
73 '/foo/bar'
73 '/foo/bar'
74 >>> getrepopath(b':pserver:/foo/bar')
74 >>> getrepopath(b':pserver:/foo/bar')
75 '/foo/bar'
75 '/foo/bar'
76 >>> getrepopath(b':pserver:c:/foo/bar')
76 >>> getrepopath(b':pserver:c:/foo/bar')
77 '/foo/bar'
77 '/foo/bar'
78 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
78 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
79 '/foo/bar'
79 '/foo/bar'
80 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
80 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
81 '/foo/bar'
81 '/foo/bar'
82 >>> getrepopath(b'user@server/path/to/repository')
82 >>> getrepopath(b'user@server/path/to/repository')
83 '/path/to/repository'
83 '/path/to/repository'
84 """
84 """
85 # According to CVS manual, CVS paths are expressed like:
85 # According to CVS manual, CVS paths are expressed like:
86 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
86 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
87 #
87 #
88 # CVSpath is splitted into parts and then position of the first occurrence
88 # CVSpath is splitted into parts and then position of the first occurrence
89 # of the '/' char after the '@' is located. The solution is the rest of the
89 # of the '/' char after the '@' is located. The solution is the rest of the
90 # string after that '/' sign including it
90 # string after that '/' sign including it
91
91
92 parts = cvspath.split(b':')
92 parts = cvspath.split(b':')
93 atposition = parts[-1].find(b'@')
93 atposition = parts[-1].find(b'@')
94 start = 0
94 start = 0
95
95
96 if atposition != -1:
96 if atposition != -1:
97 start = atposition
97 start = atposition
98
98
99 repopath = parts[-1][parts[-1].find(b'/', start) :]
99 repopath = parts[-1][parts[-1].find(b'/', start) :]
100 return repopath
100 return repopath
101
101
102
102
103 def createlog(ui, directory=None, root=b"", rlog=True, cache=None):
103 def createlog(ui, directory=None, root=b"", rlog=True, cache=None):
104 '''Collect the CVS rlog'''
104 '''Collect the CVS rlog'''
105
105
106 # Because we store many duplicate commit log messages, reusing strings
106 # Because we store many duplicate commit log messages, reusing strings
107 # saves a lot of memory and pickle storage space.
107 # saves a lot of memory and pickle storage space.
108 _scache = {}
108 _scache = {}
109
109
110 def scache(s):
110 def scache(s):
111 """return a shared version of a string"""
111 """return a shared version of a string"""
112 return _scache.setdefault(s, s)
112 return _scache.setdefault(s, s)
113
113
114 ui.status(_(b'collecting CVS rlog\n'))
114 ui.status(_(b'collecting CVS rlog\n'))
115
115
116 log = [] # list of logentry objects containing the CVS state
116 log = [] # list of logentry objects containing the CVS state
117
117
118 # patterns to match in CVS (r)log output, by state of use
118 # patterns to match in CVS (r)log output, by state of use
119 re_00 = re.compile(b'RCS file: (.+)$')
119 re_00 = re.compile(b'RCS file: (.+)$')
120 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
120 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
121 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
121 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
122 re_03 = re.compile(
122 re_03 = re.compile(
123 b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$"
123 b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$"
124 )
124 )
125 re_10 = re.compile(b'Working file: (.+)$')
125 re_10 = re.compile(b'Working file: (.+)$')
126 re_20 = re.compile(b'symbolic names:')
126 re_20 = re.compile(b'symbolic names:')
127 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
127 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
128 re_31 = re.compile(b'----------------------------$')
128 re_31 = re.compile(b'----------------------------$')
129 re_32 = re.compile(
129 re_32 = re.compile(
130 b'======================================='
130 b'======================================='
131 b'======================================$'
131 b'======================================$'
132 )
132 )
133 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
133 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
134 re_60 = re.compile(
134 re_60 = re.compile(
135 br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
135 br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
136 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
136 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
137 br'(\s+commitid:\s+([^;]+);)?'
137 br'(\s+commitid:\s+([^;]+);)?'
138 br'(.*mergepoint:\s+([^;]+);)?'
138 br'(.*mergepoint:\s+([^;]+);)?'
139 )
139 )
140 re_70 = re.compile(b'branches: (.+);$')
140 re_70 = re.compile(b'branches: (.+);$')
141
141
142 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
142 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
143
143
144 prefix = b'' # leading path to strip of what we get from CVS
144 prefix = b'' # leading path to strip of what we get from CVS
145
145
146 if directory is None:
146 if directory is None:
147 # Current working directory
147 # Current working directory
148
148
149 # Get the real directory in the repository
149 # Get the real directory in the repository
150 try:
150 try:
151 with open(os.path.join(b'CVS', b'Repository'), b'rb') as f:
151 with open(os.path.join(b'CVS', b'Repository'), b'rb') as f:
152 prefix = f.read().strip()
152 prefix = f.read().strip()
153 directory = prefix
153 directory = prefix
154 if prefix == b".":
154 if prefix == b".":
155 prefix = b""
155 prefix = b""
156 except IOError:
156 except IOError:
157 raise logerror(_(b'not a CVS sandbox'))
157 raise logerror(_(b'not a CVS sandbox'))
158
158
159 if prefix and not prefix.endswith(pycompat.ossep):
159 if prefix and not prefix.endswith(pycompat.ossep):
160 prefix += pycompat.ossep
160 prefix += pycompat.ossep
161
161
162 # Use the Root file in the sandbox, if it exists
162 # Use the Root file in the sandbox, if it exists
163 try:
163 try:
164 root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip()
164 root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip()
165 except IOError:
165 except IOError:
166 pass
166 pass
167
167
168 if not root:
168 if not root:
169 root = encoding.environ.get(b'CVSROOT', b'')
169 root = encoding.environ.get(b'CVSROOT', b'')
170
170
171 # read log cache if one exists
171 # read log cache if one exists
172 oldlog = []
172 oldlog = []
173 date = None
173 date = None
174
174
175 if cache:
175 if cache:
176 cachedir = os.path.expanduser(b'~/.hg.cvsps')
176 cachedir = os.path.expanduser(b'~/.hg.cvsps')
177 if not os.path.exists(cachedir):
177 if not os.path.exists(cachedir):
178 os.mkdir(cachedir)
178 os.mkdir(cachedir)
179
179
180 # The cvsps cache pickle needs a uniquified name, based on the
180 # The cvsps cache pickle needs a uniquified name, based on the
181 # repository location. The address may have all sort of nasties
181 # repository location. The address may have all sort of nasties
182 # in it, slashes, colons and such. So here we take just the
182 # in it, slashes, colons and such. So here we take just the
183 # alphanumeric characters, concatenated in a way that does not
183 # alphanumeric characters, concatenated in a way that does not
184 # mix up the various components, so that
184 # mix up the various components, so that
185 # :pserver:user@server:/path
185 # :pserver:user@server:/path
186 # and
186 # and
187 # /pserver/user/server/path
187 # /pserver/user/server/path
188 # are mapped to different cache file names.
188 # are mapped to different cache file names.
189 cachefile = root.split(b":") + [directory, b"cache"]
189 cachefile = root.split(b":") + [directory, b"cache"]
190 cachefile = [b'-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
190 cachefile = [b'-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
191 cachefile = os.path.join(
191 cachefile = os.path.join(
192 cachedir, b'.'.join([s for s in cachefile if s])
192 cachedir, b'.'.join([s for s in cachefile if s])
193 )
193 )
194
194
195 if cache == b'update':
195 if cache == b'update':
196 try:
196 try:
197 ui.note(_(b'reading cvs log cache %s\n') % cachefile)
197 ui.note(_(b'reading cvs log cache %s\n') % cachefile)
198 oldlog = pickle.load(open(cachefile, b'rb'))
198 oldlog = pickle.load(open(cachefile, b'rb'))
199 for e in oldlog:
199 for e in oldlog:
200 if not (
200 if not (
201 util.safehasattr(e, b'branchpoints')
201 util.safehasattr(e, b'branchpoints')
202 and util.safehasattr(e, b'commitid')
202 and util.safehasattr(e, b'commitid')
203 and util.safehasattr(e, b'mergepoint')
203 and util.safehasattr(e, b'mergepoint')
204 ):
204 ):
205 ui.status(_(b'ignoring old cache\n'))
205 ui.status(_(b'ignoring old cache\n'))
206 oldlog = []
206 oldlog = []
207 break
207 break
208
208
209 ui.note(_(b'cache has %d log entries\n') % len(oldlog))
209 ui.note(_(b'cache has %d log entries\n') % len(oldlog))
210 except Exception as e:
210 except Exception as e:
211 ui.note(_(b'error reading cache: %r\n') % e)
211 ui.note(_(b'error reading cache: %r\n') % e)
212
212
213 if oldlog:
213 if oldlog:
214 date = oldlog[-1].date # last commit date as a (time,tz) tuple
214 date = oldlog[-1].date # last commit date as a (time,tz) tuple
215 date = dateutil.datestr(date, b'%Y/%m/%d %H:%M:%S %1%2')
215 date = dateutil.datestr(date, b'%Y/%m/%d %H:%M:%S %1%2')
216
216
217 # build the CVS commandline
217 # build the CVS commandline
218 cmd = [b'cvs', b'-q']
218 cmd = [b'cvs', b'-q']
219 if root:
219 if root:
220 cmd.append(b'-d%s' % root)
220 cmd.append(b'-d%s' % root)
221 p = util.normpath(getrepopath(root))
221 p = util.normpath(getrepopath(root))
222 if not p.endswith(b'/'):
222 if not p.endswith(b'/'):
223 p += b'/'
223 p += b'/'
224 if prefix:
224 if prefix:
225 # looks like normpath replaces "" by "."
225 # looks like normpath replaces "" by "."
226 prefix = p + util.normpath(prefix)
226 prefix = p + util.normpath(prefix)
227 else:
227 else:
228 prefix = p
228 prefix = p
229 cmd.append([b'log', b'rlog'][rlog])
229 cmd.append([b'log', b'rlog'][rlog])
230 if date:
230 if date:
231 # no space between option and date string
231 # no space between option and date string
232 cmd.append(b'-d>%s' % date)
232 cmd.append(b'-d>%s' % date)
233 cmd.append(directory)
233 cmd.append(directory)
234
234
235 # state machine begins here
235 # state machine begins here
236 tags = {} # dictionary of revisions on current file with their tags
236 tags = {} # dictionary of revisions on current file with their tags
237 branchmap = {} # mapping between branch names and revision numbers
237 branchmap = {} # mapping between branch names and revision numbers
238 rcsmap = {}
238 rcsmap = {}
239 state = 0
239 state = 0
240 store = False # set when a new record can be appended
240 store = False # set when a new record can be appended
241
241
242 cmd = [procutil.shellquote(arg) for arg in cmd]
242 cmd = [procutil.shellquote(arg) for arg in cmd]
243 ui.note(_(b"running %s\n") % (b' '.join(cmd)))
243 ui.note(_(b"running %s\n") % (b' '.join(cmd)))
244 ui.debug(b"prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
244 ui.debug(b"prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
245
245
246 pfp = procutil.popen(b' '.join(cmd), b'rb')
246 pfp = procutil.popen(b' '.join(cmd), b'rb')
247 peek = util.fromnativeeol(pfp.readline())
247 peek = util.fromnativeeol(pfp.readline())
248 while True:
248 while True:
249 line = peek
249 line = peek
250 if line == b'':
250 if line == b'':
251 break
251 break
252 peek = util.fromnativeeol(pfp.readline())
252 peek = util.fromnativeeol(pfp.readline())
253 if line.endswith(b'\n'):
253 if line.endswith(b'\n'):
254 line = line[:-1]
254 line = line[:-1]
255 # ui.debug('state=%d line=%r\n' % (state, line))
255 # ui.debug('state=%d line=%r\n' % (state, line))
256
256
257 if state == 0:
257 if state == 0:
258 # initial state, consume input until we see 'RCS file'
258 # initial state, consume input until we see 'RCS file'
259 match = re_00.match(line)
259 match = re_00.match(line)
260 if match:
260 if match:
261 rcs = match.group(1)
261 rcs = match.group(1)
262 tags = {}
262 tags = {}
263 if rlog:
263 if rlog:
264 filename = util.normpath(rcs[:-2])
264 filename = util.normpath(rcs[:-2])
265 if filename.startswith(prefix):
265 if filename.startswith(prefix):
266 filename = filename[len(prefix) :]
266 filename = filename[len(prefix) :]
267 if filename.startswith(b'/'):
267 if filename.startswith(b'/'):
268 filename = filename[1:]
268 filename = filename[1:]
269 if filename.startswith(b'Attic/'):
269 if filename.startswith(b'Attic/'):
270 filename = filename[6:]
270 filename = filename[6:]
271 else:
271 else:
272 filename = filename.replace(b'/Attic/', b'/')
272 filename = filename.replace(b'/Attic/', b'/')
273 state = 2
273 state = 2
274 continue
274 continue
275 state = 1
275 state = 1
276 continue
276 continue
277 match = re_01.match(line)
277 match = re_01.match(line)
278 if match:
278 if match:
279 raise logerror(match.group(1))
279 raise logerror(match.group(1))
280 match = re_02.match(line)
280 match = re_02.match(line)
281 if match:
281 if match:
282 raise logerror(match.group(2))
282 raise logerror(match.group(2))
283 if re_03.match(line):
283 if re_03.match(line):
284 raise logerror(line)
284 raise logerror(line)
285
285
286 elif state == 1:
286 elif state == 1:
287 # expect 'Working file' (only when using log instead of rlog)
287 # expect 'Working file' (only when using log instead of rlog)
288 match = re_10.match(line)
288 match = re_10.match(line)
289 assert match, _(b'RCS file must be followed by working file')
289 assert match, _(b'RCS file must be followed by working file')
290 filename = util.normpath(match.group(1))
290 filename = util.normpath(match.group(1))
291 state = 2
291 state = 2
292
292
293 elif state == 2:
293 elif state == 2:
294 # expect 'symbolic names'
294 # expect 'symbolic names'
295 if re_20.match(line):
295 if re_20.match(line):
296 branchmap = {}
296 branchmap = {}
297 state = 3
297 state = 3
298
298
299 elif state == 3:
299 elif state == 3:
300 # read the symbolic names and store as tags
300 # read the symbolic names and store as tags
301 match = re_30.match(line)
301 match = re_30.match(line)
302 if match:
302 if match:
303 rev = [int(x) for x in match.group(2).split(b'.')]
303 rev = [int(x) for x in match.group(2).split(b'.')]
304
304
305 # Convert magic branch number to an odd-numbered one
305 # Convert magic branch number to an odd-numbered one
306 revn = len(rev)
306 revn = len(rev)
307 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
307 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
308 rev = rev[:-2] + rev[-1:]
308 rev = rev[:-2] + rev[-1:]
309 rev = tuple(rev)
309 rev = tuple(rev)
310
310
311 if rev not in tags:
311 if rev not in tags:
312 tags[rev] = []
312 tags[rev] = []
313 tags[rev].append(match.group(1))
313 tags[rev].append(match.group(1))
314 branchmap[match.group(1)] = match.group(2)
314 branchmap[match.group(1)] = match.group(2)
315
315
316 elif re_31.match(line):
316 elif re_31.match(line):
317 state = 5
317 state = 5
318 elif re_32.match(line):
318 elif re_32.match(line):
319 state = 0
319 state = 0
320
320
321 elif state == 4:
321 elif state == 4:
322 # expecting '------' separator before first revision
322 # expecting '------' separator before first revision
323 if re_31.match(line):
323 if re_31.match(line):
324 state = 5
324 state = 5
325 else:
325 else:
326 assert not re_32.match(line), _(
326 assert not re_32.match(line), _(
327 b'must have at least some revisions'
327 b'must have at least some revisions'
328 )
328 )
329
329
330 elif state == 5:
330 elif state == 5:
331 # expecting revision number and possibly (ignored) lock indication
331 # expecting revision number and possibly (ignored) lock indication
332 # we create the logentry here from values stored in states 0 to 4,
332 # we create the logentry here from values stored in states 0 to 4,
333 # as this state is re-entered for subsequent revisions of a file.
333 # as this state is re-entered for subsequent revisions of a file.
334 match = re_50.match(line)
334 match = re_50.match(line)
335 assert match, _(b'expected revision number')
335 assert match, _(b'expected revision number')
336 e = logentry(
336 e = logentry(
337 rcs=scache(rcs),
337 rcs=scache(rcs),
338 file=scache(filename),
338 file=scache(filename),
339 revision=tuple([int(x) for x in match.group(1).split(b'.')]),
339 revision=tuple([int(x) for x in match.group(1).split(b'.')]),
340 branches=[],
340 branches=[],
341 parent=None,
341 parent=None,
342 commitid=None,
342 commitid=None,
343 mergepoint=None,
343 mergepoint=None,
344 branchpoints=set(),
344 branchpoints=set(),
345 )
345 )
346
346
347 state = 6
347 state = 6
348
348
349 elif state == 6:
349 elif state == 6:
350 # expecting date, author, state, lines changed
350 # expecting date, author, state, lines changed
351 match = re_60.match(line)
351 match = re_60.match(line)
352 assert match, _(b'revision must be followed by date line')
352 assert match, _(b'revision must be followed by date line')
353 d = match.group(1)
353 d = match.group(1)
354 if d[2] == b'/':
354 if d[2] == b'/':
355 # Y2K
355 # Y2K
356 d = b'19' + d
356 d = b'19' + d
357
357
358 if len(d.split()) != 3:
358 if len(d.split()) != 3:
359 # cvs log dates always in GMT
359 # cvs log dates always in GMT
360 d = d + b' UTC'
360 d = d + b' UTC'
361 e.date = dateutil.parsedate(
361 e.date = dateutil.parsedate(
362 d,
362 d,
363 [
363 [
364 b'%y/%m/%d %H:%M:%S',
364 b'%y/%m/%d %H:%M:%S',
365 b'%Y/%m/%d %H:%M:%S',
365 b'%Y/%m/%d %H:%M:%S',
366 b'%Y-%m-%d %H:%M:%S',
366 b'%Y-%m-%d %H:%M:%S',
367 ],
367 ],
368 )
368 )
369 e.author = scache(match.group(2))
369 e.author = scache(match.group(2))
370 e.dead = match.group(3).lower() == b'dead'
370 e.dead = match.group(3).lower() == b'dead'
371
371
372 if match.group(5):
372 if match.group(5):
373 if match.group(6):
373 if match.group(6):
374 e.lines = (int(match.group(5)), int(match.group(6)))
374 e.lines = (int(match.group(5)), int(match.group(6)))
375 else:
375 else:
376 e.lines = (int(match.group(5)), 0)
376 e.lines = (int(match.group(5)), 0)
377 elif match.group(6):
377 elif match.group(6):
378 e.lines = (0, int(match.group(6)))
378 e.lines = (0, int(match.group(6)))
379 else:
379 else:
380 e.lines = None
380 e.lines = None
381
381
382 if match.group(7): # cvs 1.12 commitid
382 if match.group(7): # cvs 1.12 commitid
383 e.commitid = match.group(8)
383 e.commitid = match.group(8)
384
384
385 if match.group(9): # cvsnt mergepoint
385 if match.group(9): # cvsnt mergepoint
386 myrev = match.group(10).split(b'.')
386 myrev = match.group(10).split(b'.')
387 if len(myrev) == 2: # head
387 if len(myrev) == 2: # head
388 e.mergepoint = b'HEAD'
388 e.mergepoint = b'HEAD'
389 else:
389 else:
390 myrev = b'.'.join(myrev[:-2] + [b'0', myrev[-2]])
390 myrev = b'.'.join(myrev[:-2] + [b'0', myrev[-2]])
391 branches = [b for b in branchmap if branchmap[b] == myrev]
391 branches = [b for b in branchmap if branchmap[b] == myrev]
392 assert len(branches) == 1, (
392 assert len(branches) == 1, (
393 b'unknown branch: %s' % e.mergepoint
393 b'unknown branch: %s' % e.mergepoint
394 )
394 )
395 e.mergepoint = branches[0]
395 e.mergepoint = branches[0]
396
396
397 e.comment = []
397 e.comment = []
398 state = 7
398 state = 7
399
399
400 elif state == 7:
400 elif state == 7:
401 # read the revision numbers of branches that start at this revision
401 # read the revision numbers of branches that start at this revision
402 # or store the commit log message otherwise
402 # or store the commit log message otherwise
403 m = re_70.match(line)
403 m = re_70.match(line)
404 if m:
404 if m:
405 e.branches = [
405 e.branches = [
406 tuple([int(y) for y in x.strip().split(b'.')])
406 tuple([int(y) for y in x.strip().split(b'.')])
407 for x in m.group(1).split(b';')
407 for x in m.group(1).split(b';')
408 ]
408 ]
409 state = 8
409 state = 8
410 elif re_31.match(line) and re_50.match(peek):
410 elif re_31.match(line) and re_50.match(peek):
411 state = 5
411 state = 5
412 store = True
412 store = True
413 elif re_32.match(line):
413 elif re_32.match(line):
414 state = 0
414 state = 0
415 store = True
415 store = True
416 else:
416 else:
417 e.comment.append(line)
417 e.comment.append(line)
418
418
419 elif state == 8:
419 elif state == 8:
420 # store commit log message
420 # store commit log message
421 if re_31.match(line):
421 if re_31.match(line):
422 cpeek = peek
422 cpeek = peek
423 if cpeek.endswith(b'\n'):
423 if cpeek.endswith(b'\n'):
424 cpeek = cpeek[:-1]
424 cpeek = cpeek[:-1]
425 if re_50.match(cpeek):
425 if re_50.match(cpeek):
426 state = 5
426 state = 5
427 store = True
427 store = True
428 else:
428 else:
429 e.comment.append(line)
429 e.comment.append(line)
430 elif re_32.match(line):
430 elif re_32.match(line):
431 state = 0
431 state = 0
432 store = True
432 store = True
433 else:
433 else:
434 e.comment.append(line)
434 e.comment.append(line)
435
435
436 # When a file is added on a branch B1, CVS creates a synthetic
436 # When a file is added on a branch B1, CVS creates a synthetic
437 # dead trunk revision 1.1 so that the branch has a root.
437 # dead trunk revision 1.1 so that the branch has a root.
438 # Likewise, if you merge such a file to a later branch B2 (one
438 # Likewise, if you merge such a file to a later branch B2 (one
439 # that already existed when the file was added on B1), CVS
439 # that already existed when the file was added on B1), CVS
440 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
440 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
441 # these revisions now, but mark them synthetic so
441 # these revisions now, but mark them synthetic so
442 # createchangeset() can take care of them.
442 # createchangeset() can take care of them.
443 if (
443 if (
444 store
444 store
445 and e.dead
445 and e.dead
446 and e.revision[-1] == 1
446 and e.revision[-1] == 1
447 and len(e.comment) == 1 # 1.1 or 1.1.x.1
447 and len(e.comment) == 1 # 1.1 or 1.1.x.1
448 and file_added_re.match(e.comment[0])
448 and file_added_re.match(e.comment[0])
449 ):
449 ):
450 ui.debug(
450 ui.debug(
451 b'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
451 b'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
452 )
452 )
453 e.synthetic = True
453 e.synthetic = True
454
454
455 if store:
455 if store:
456 # clean up the results and save in the log.
456 # clean up the results and save in the log.
457 store = False
457 store = False
458 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
458 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
459 e.comment = scache(b'\n'.join(e.comment))
459 e.comment = scache(b'\n'.join(e.comment))
460
460
461 revn = len(e.revision)
461 revn = len(e.revision)
462 if revn > 3 and (revn % 2) == 0:
462 if revn > 3 and (revn % 2) == 0:
463 e.branch = tags.get(e.revision[:-1], [None])[0]
463 e.branch = tags.get(e.revision[:-1], [None])[0]
464 else:
464 else:
465 e.branch = None
465 e.branch = None
466
466
467 # find the branches starting from this revision
467 # find the branches starting from this revision
468 branchpoints = set()
468 branchpoints = set()
469 for branch, revision in pycompat.iteritems(branchmap):
469 for branch, revision in branchmap.items():
470 revparts = tuple([int(i) for i in revision.split(b'.')])
470 revparts = tuple([int(i) for i in revision.split(b'.')])
471 if len(revparts) < 2: # bad tags
471 if len(revparts) < 2: # bad tags
472 continue
472 continue
473 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
473 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
474 # normal branch
474 # normal branch
475 if revparts[:-2] == e.revision:
475 if revparts[:-2] == e.revision:
476 branchpoints.add(branch)
476 branchpoints.add(branch)
477 elif revparts == (1, 1, 1): # vendor branch
477 elif revparts == (1, 1, 1): # vendor branch
478 if revparts in e.branches:
478 if revparts in e.branches:
479 branchpoints.add(branch)
479 branchpoints.add(branch)
480 e.branchpoints = branchpoints
480 e.branchpoints = branchpoints
481
481
482 log.append(e)
482 log.append(e)
483
483
484 rcsmap[e.rcs.replace(b'/Attic/', b'/')] = e.rcs
484 rcsmap[e.rcs.replace(b'/Attic/', b'/')] = e.rcs
485
485
486 if len(log) % 100 == 0:
486 if len(log) % 100 == 0:
487 ui.status(
487 ui.status(
488 stringutil.ellipsis(b'%d %s' % (len(log), e.file), 80)
488 stringutil.ellipsis(b'%d %s' % (len(log), e.file), 80)
489 + b'\n'
489 + b'\n'
490 )
490 )
491
491
492 log.sort(key=lambda x: (x.rcs, x.revision))
492 log.sort(key=lambda x: (x.rcs, x.revision))
493
493
494 # find parent revisions of individual files
494 # find parent revisions of individual files
495 versions = {}
495 versions = {}
496 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
496 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
497 rcs = e.rcs.replace(b'/Attic/', b'/')
497 rcs = e.rcs.replace(b'/Attic/', b'/')
498 if rcs in rcsmap:
498 if rcs in rcsmap:
499 e.rcs = rcsmap[rcs]
499 e.rcs = rcsmap[rcs]
500 branch = e.revision[:-1]
500 branch = e.revision[:-1]
501 versions[(e.rcs, branch)] = e.revision
501 versions[(e.rcs, branch)] = e.revision
502
502
503 for e in log:
503 for e in log:
504 branch = e.revision[:-1]
504 branch = e.revision[:-1]
505 p = versions.get((e.rcs, branch), None)
505 p = versions.get((e.rcs, branch), None)
506 if p is None:
506 if p is None:
507 p = e.revision[:-2]
507 p = e.revision[:-2]
508 e.parent = p
508 e.parent = p
509 versions[(e.rcs, branch)] = e.revision
509 versions[(e.rcs, branch)] = e.revision
510
510
511 # update the log cache
511 # update the log cache
512 if cache:
512 if cache:
513 if log:
513 if log:
514 # join up the old and new logs
514 # join up the old and new logs
515 log.sort(key=lambda x: x.date)
515 log.sort(key=lambda x: x.date)
516
516
517 if oldlog and oldlog[-1].date >= log[0].date:
517 if oldlog and oldlog[-1].date >= log[0].date:
518 raise logerror(
518 raise logerror(
519 _(
519 _(
520 b'log cache overlaps with new log entries,'
520 b'log cache overlaps with new log entries,'
521 b' re-run without cache.'
521 b' re-run without cache.'
522 )
522 )
523 )
523 )
524
524
525 log = oldlog + log
525 log = oldlog + log
526
526
527 # write the new cachefile
527 # write the new cachefile
528 ui.note(_(b'writing cvs log cache %s\n') % cachefile)
528 ui.note(_(b'writing cvs log cache %s\n') % cachefile)
529 pickle.dump(log, open(cachefile, b'wb'))
529 pickle.dump(log, open(cachefile, b'wb'))
530 else:
530 else:
531 log = oldlog
531 log = oldlog
532
532
533 ui.status(_(b'%d log entries\n') % len(log))
533 ui.status(_(b'%d log entries\n') % len(log))
534
534
535 encodings = ui.configlist(b'convert', b'cvsps.logencoding')
535 encodings = ui.configlist(b'convert', b'cvsps.logencoding')
536 if encodings:
536 if encodings:
537
537
538 def revstr(r):
538 def revstr(r):
539 # this is needed, because logentry.revision is a tuple of "int"
539 # this is needed, because logentry.revision is a tuple of "int"
540 # (e.g. (1, 2) for "1.2")
540 # (e.g. (1, 2) for "1.2")
541 return b'.'.join(pycompat.maplist(pycompat.bytestr, r))
541 return b'.'.join(pycompat.maplist(pycompat.bytestr, r))
542
542
543 for entry in log:
543 for entry in log:
544 comment = entry.comment
544 comment = entry.comment
545 for e in encodings:
545 for e in encodings:
546 try:
546 try:
547 entry.comment = comment.decode(pycompat.sysstr(e)).encode(
547 entry.comment = comment.decode(pycompat.sysstr(e)).encode(
548 'utf-8'
548 'utf-8'
549 )
549 )
550 if ui.debugflag:
550 if ui.debugflag:
551 ui.debug(
551 ui.debug(
552 b"transcoding by %s: %s of %s\n"
552 b"transcoding by %s: %s of %s\n"
553 % (e, revstr(entry.revision), entry.file)
553 % (e, revstr(entry.revision), entry.file)
554 )
554 )
555 break
555 break
556 except UnicodeDecodeError:
556 except UnicodeDecodeError:
557 pass # try next encoding
557 pass # try next encoding
558 except LookupError as inst: # unknown encoding, maybe
558 except LookupError as inst: # unknown encoding, maybe
559 raise error.Abort(
559 raise error.Abort(
560 pycompat.bytestr(inst),
560 pycompat.bytestr(inst),
561 hint=_(
561 hint=_(
562 b'check convert.cvsps.logencoding configuration'
562 b'check convert.cvsps.logencoding configuration'
563 ),
563 ),
564 )
564 )
565 else:
565 else:
566 raise error.Abort(
566 raise error.Abort(
567 _(
567 _(
568 b"no encoding can transcode"
568 b"no encoding can transcode"
569 b" CVS log message for %s of %s"
569 b" CVS log message for %s of %s"
570 )
570 )
571 % (revstr(entry.revision), entry.file),
571 % (revstr(entry.revision), entry.file),
572 hint=_(b'check convert.cvsps.logencoding configuration'),
572 hint=_(b'check convert.cvsps.logencoding configuration'),
573 )
573 )
574
574
575 hook.hook(ui, None, b"cvslog", True, log=log)
575 hook.hook(ui, None, b"cvslog", True, log=log)
576
576
577 return log
577 return log
578
578
579
579
580 class changeset(object):
580 class changeset(object):
581 """Class changeset has the following attributes:
581 """Class changeset has the following attributes:
582 .id - integer identifying this changeset (list index)
582 .id - integer identifying this changeset (list index)
583 .author - author name as CVS knows it
583 .author - author name as CVS knows it
584 .branch - name of branch this changeset is on, or None
584 .branch - name of branch this changeset is on, or None
585 .comment - commit message
585 .comment - commit message
586 .commitid - CVS commitid or None
586 .commitid - CVS commitid or None
587 .date - the commit date as a (time,tz) tuple
587 .date - the commit date as a (time,tz) tuple
588 .entries - list of logentry objects in this changeset
588 .entries - list of logentry objects in this changeset
589 .parents - list of one or two parent changesets
589 .parents - list of one or two parent changesets
590 .tags - list of tags on this changeset
590 .tags - list of tags on this changeset
591 .synthetic - from synthetic revision "file ... added on branch ..."
591 .synthetic - from synthetic revision "file ... added on branch ..."
592 .mergepoint- the branch that has been merged from or None
592 .mergepoint- the branch that has been merged from or None
593 .branchpoints- the branches that start at the current entry or empty
593 .branchpoints- the branches that start at the current entry or empty
594 """
594 """
595
595
596 def __init__(self, **entries):
596 def __init__(self, **entries):
597 self.id = None
597 self.id = None
598 self.synthetic = False
598 self.synthetic = False
599 self.__dict__.update(entries)
599 self.__dict__.update(entries)
600
600
601 def __repr__(self):
601 def __repr__(self):
602 items = (
602 items = (
603 b"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
603 b"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
604 )
604 )
605 return b"%s(%s)" % (type(self).__name__, b", ".join(items))
605 return b"%s(%s)" % (type(self).__name__, b", ".join(items))
606
606
607
607
608 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
608 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
609 '''Convert log into changesets.'''
609 '''Convert log into changesets.'''
610
610
611 ui.status(_(b'creating changesets\n'))
611 ui.status(_(b'creating changesets\n'))
612
612
613 # try to order commitids by date
613 # try to order commitids by date
614 mindate = {}
614 mindate = {}
615 for e in log:
615 for e in log:
616 if e.commitid:
616 if e.commitid:
617 if e.commitid not in mindate:
617 if e.commitid not in mindate:
618 mindate[e.commitid] = e.date
618 mindate[e.commitid] = e.date
619 else:
619 else:
620 mindate[e.commitid] = min(e.date, mindate[e.commitid])
620 mindate[e.commitid] = min(e.date, mindate[e.commitid])
621
621
622 # Merge changesets
622 # Merge changesets
623 log.sort(
623 log.sort(
624 key=lambda x: (
624 key=lambda x: (
625 mindate.get(x.commitid, (-1, 0)),
625 mindate.get(x.commitid, (-1, 0)),
626 x.commitid or b'',
626 x.commitid or b'',
627 x.comment,
627 x.comment,
628 x.author,
628 x.author,
629 x.branch or b'',
629 x.branch or b'',
630 x.date,
630 x.date,
631 x.branchpoints,
631 x.branchpoints,
632 )
632 )
633 )
633 )
634
634
635 changesets = []
635 changesets = []
636 files = set()
636 files = set()
637 c = None
637 c = None
638 for i, e in enumerate(log):
638 for i, e in enumerate(log):
639
639
640 # Check if log entry belongs to the current changeset or not.
640 # Check if log entry belongs to the current changeset or not.
641
641
642 # Since CVS is file-centric, two different file revisions with
642 # Since CVS is file-centric, two different file revisions with
643 # different branchpoints should be treated as belonging to two
643 # different branchpoints should be treated as belonging to two
644 # different changesets (and the ordering is important and not
644 # different changesets (and the ordering is important and not
645 # honoured by cvsps at this point).
645 # honoured by cvsps at this point).
646 #
646 #
647 # Consider the following case:
647 # Consider the following case:
648 # foo 1.1 branchpoints: [MYBRANCH]
648 # foo 1.1 branchpoints: [MYBRANCH]
649 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
649 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
650 #
650 #
651 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
651 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
652 # later version of foo may be in MYBRANCH2, so foo should be the
652 # later version of foo may be in MYBRANCH2, so foo should be the
653 # first changeset and bar the next and MYBRANCH and MYBRANCH2
653 # first changeset and bar the next and MYBRANCH and MYBRANCH2
654 # should both start off of the bar changeset. No provisions are
654 # should both start off of the bar changeset. No provisions are
655 # made to ensure that this is, in fact, what happens.
655 # made to ensure that this is, in fact, what happens.
656 if not (
656 if not (
657 c
657 c
658 and e.branchpoints == c.branchpoints
658 and e.branchpoints == c.branchpoints
659 and ( # cvs commitids
659 and ( # cvs commitids
660 (e.commitid is not None and e.commitid == c.commitid)
660 (e.commitid is not None and e.commitid == c.commitid)
661 or ( # no commitids, use fuzzy commit detection
661 or ( # no commitids, use fuzzy commit detection
662 (e.commitid is None or c.commitid is None)
662 (e.commitid is None or c.commitid is None)
663 and e.comment == c.comment
663 and e.comment == c.comment
664 and e.author == c.author
664 and e.author == c.author
665 and e.branch == c.branch
665 and e.branch == c.branch
666 and (
666 and (
667 (c.date[0] + c.date[1])
667 (c.date[0] + c.date[1])
668 <= (e.date[0] + e.date[1])
668 <= (e.date[0] + e.date[1])
669 <= (c.date[0] + c.date[1]) + fuzz
669 <= (c.date[0] + c.date[1]) + fuzz
670 )
670 )
671 and e.file not in files
671 and e.file not in files
672 )
672 )
673 )
673 )
674 ):
674 ):
675 c = changeset(
675 c = changeset(
676 comment=e.comment,
676 comment=e.comment,
677 author=e.author,
677 author=e.author,
678 branch=e.branch,
678 branch=e.branch,
679 date=e.date,
679 date=e.date,
680 entries=[],
680 entries=[],
681 mergepoint=e.mergepoint,
681 mergepoint=e.mergepoint,
682 branchpoints=e.branchpoints,
682 branchpoints=e.branchpoints,
683 commitid=e.commitid,
683 commitid=e.commitid,
684 )
684 )
685 changesets.append(c)
685 changesets.append(c)
686
686
687 files = set()
687 files = set()
688 if len(changesets) % 100 == 0:
688 if len(changesets) % 100 == 0:
689 t = b'%d %s' % (len(changesets), repr(e.comment)[1:-1])
689 t = b'%d %s' % (len(changesets), repr(e.comment)[1:-1])
690 ui.status(stringutil.ellipsis(t, 80) + b'\n')
690 ui.status(stringutil.ellipsis(t, 80) + b'\n')
691
691
692 c.entries.append(e)
692 c.entries.append(e)
693 files.add(e.file)
693 files.add(e.file)
694 c.date = e.date # changeset date is date of latest commit in it
694 c.date = e.date # changeset date is date of latest commit in it
695
695
696 # Mark synthetic changesets
696 # Mark synthetic changesets
697
697
698 for c in changesets:
698 for c in changesets:
699 # Synthetic revisions always get their own changeset, because
699 # Synthetic revisions always get their own changeset, because
700 # the log message includes the filename. E.g. if you add file3
700 # the log message includes the filename. E.g. if you add file3
701 # and file4 on a branch, you get four log entries and three
701 # and file4 on a branch, you get four log entries and three
702 # changesets:
702 # changesets:
703 # "File file3 was added on branch ..." (synthetic, 1 entry)
703 # "File file3 was added on branch ..." (synthetic, 1 entry)
704 # "File file4 was added on branch ..." (synthetic, 1 entry)
704 # "File file4 was added on branch ..." (synthetic, 1 entry)
705 # "Add file3 and file4 to fix ..." (real, 2 entries)
705 # "Add file3 and file4 to fix ..." (real, 2 entries)
706 # Hence the check for 1 entry here.
706 # Hence the check for 1 entry here.
707 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
707 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
708
708
709 # Sort files in each changeset
709 # Sort files in each changeset
710
710
711 def entitycompare(l, r):
711 def entitycompare(l, r):
712 """Mimic cvsps sorting order"""
712 """Mimic cvsps sorting order"""
713 l = l.file.split(b'/')
713 l = l.file.split(b'/')
714 r = r.file.split(b'/')
714 r = r.file.split(b'/')
715 nl = len(l)
715 nl = len(l)
716 nr = len(r)
716 nr = len(r)
717 n = min(nl, nr)
717 n = min(nl, nr)
718 for i in range(n):
718 for i in range(n):
719 if i + 1 == nl and nl < nr:
719 if i + 1 == nl and nl < nr:
720 return -1
720 return -1
721 elif i + 1 == nr and nl > nr:
721 elif i + 1 == nr and nl > nr:
722 return +1
722 return +1
723 elif l[i] < r[i]:
723 elif l[i] < r[i]:
724 return -1
724 return -1
725 elif l[i] > r[i]:
725 elif l[i] > r[i]:
726 return +1
726 return +1
727 return 0
727 return 0
728
728
729 for c in changesets:
729 for c in changesets:
730 c.entries.sort(key=functools.cmp_to_key(entitycompare))
730 c.entries.sort(key=functools.cmp_to_key(entitycompare))
731
731
732 # Sort changesets by date
732 # Sort changesets by date
733
733
734 odd = set()
734 odd = set()
735
735
736 def cscmp(l, r):
736 def cscmp(l, r):
737 d = sum(l.date) - sum(r.date)
737 d = sum(l.date) - sum(r.date)
738 if d:
738 if d:
739 return d
739 return d
740
740
741 # detect vendor branches and initial commits on a branch
741 # detect vendor branches and initial commits on a branch
742 le = {}
742 le = {}
743 for e in l.entries:
743 for e in l.entries:
744 le[e.rcs] = e.revision
744 le[e.rcs] = e.revision
745 re = {}
745 re = {}
746 for e in r.entries:
746 for e in r.entries:
747 re[e.rcs] = e.revision
747 re[e.rcs] = e.revision
748
748
749 d = 0
749 d = 0
750 for e in l.entries:
750 for e in l.entries:
751 if re.get(e.rcs, None) == e.parent:
751 if re.get(e.rcs, None) == e.parent:
752 assert not d
752 assert not d
753 d = 1
753 d = 1
754 break
754 break
755
755
756 for e in r.entries:
756 for e in r.entries:
757 if le.get(e.rcs, None) == e.parent:
757 if le.get(e.rcs, None) == e.parent:
758 if d:
758 if d:
759 odd.add((l, r))
759 odd.add((l, r))
760 d = -1
760 d = -1
761 break
761 break
762 # By this point, the changesets are sufficiently compared that
762 # By this point, the changesets are sufficiently compared that
763 # we don't really care about ordering. However, this leaves
763 # we don't really care about ordering. However, this leaves
764 # some race conditions in the tests, so we compare on the
764 # some race conditions in the tests, so we compare on the
765 # number of files modified, the files contained in each
765 # number of files modified, the files contained in each
766 # changeset, and the branchpoints in the change to ensure test
766 # changeset, and the branchpoints in the change to ensure test
767 # output remains stable.
767 # output remains stable.
768
768
769 # recommended replacement for cmp from
769 # recommended replacement for cmp from
770 # https://docs.python.org/3.0/whatsnew/3.0.html
770 # https://docs.python.org/3.0/whatsnew/3.0.html
771 c = lambda x, y: (x > y) - (x < y)
771 c = lambda x, y: (x > y) - (x < y)
772 # Sort bigger changes first.
772 # Sort bigger changes first.
773 if not d:
773 if not d:
774 d = c(len(l.entries), len(r.entries))
774 d = c(len(l.entries), len(r.entries))
775 # Try sorting by filename in the change.
775 # Try sorting by filename in the change.
776 if not d:
776 if not d:
777 d = c([e.file for e in l.entries], [e.file for e in r.entries])
777 d = c([e.file for e in l.entries], [e.file for e in r.entries])
778 # Try and put changes without a branch point before ones with
778 # Try and put changes without a branch point before ones with
779 # a branch point.
779 # a branch point.
780 if not d:
780 if not d:
781 d = c(len(l.branchpoints), len(r.branchpoints))
781 d = c(len(l.branchpoints), len(r.branchpoints))
782 return d
782 return d
783
783
784 changesets.sort(key=functools.cmp_to_key(cscmp))
784 changesets.sort(key=functools.cmp_to_key(cscmp))
785
785
786 # Collect tags
786 # Collect tags
787
787
788 globaltags = {}
788 globaltags = {}
789 for c in changesets:
789 for c in changesets:
790 for e in c.entries:
790 for e in c.entries:
791 for tag in e.tags:
791 for tag in e.tags:
792 # remember which is the latest changeset to have this tag
792 # remember which is the latest changeset to have this tag
793 globaltags[tag] = c
793 globaltags[tag] = c
794
794
795 for c in changesets:
795 for c in changesets:
796 tags = set()
796 tags = set()
797 for e in c.entries:
797 for e in c.entries:
798 tags.update(e.tags)
798 tags.update(e.tags)
799 # remember tags only if this is the latest changeset to have it
799 # remember tags only if this is the latest changeset to have it
800 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
800 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
801
801
802 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
802 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
803 # by inserting dummy changesets with two parents, and handle
803 # by inserting dummy changesets with two parents, and handle
804 # {{mergefrombranch BRANCHNAME}} by setting two parents.
804 # {{mergefrombranch BRANCHNAME}} by setting two parents.
805
805
806 if mergeto is None:
806 if mergeto is None:
807 mergeto = br'{{mergetobranch ([-\w]+)}}'
807 mergeto = br'{{mergetobranch ([-\w]+)}}'
808 if mergeto:
808 if mergeto:
809 mergeto = re.compile(mergeto)
809 mergeto = re.compile(mergeto)
810
810
811 if mergefrom is None:
811 if mergefrom is None:
812 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
812 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
813 if mergefrom:
813 if mergefrom:
814 mergefrom = re.compile(mergefrom)
814 mergefrom = re.compile(mergefrom)
815
815
816 versions = {} # changeset index where we saw any particular file version
816 versions = {} # changeset index where we saw any particular file version
817 branches = {} # changeset index where we saw a branch
817 branches = {} # changeset index where we saw a branch
818 n = len(changesets)
818 n = len(changesets)
819 i = 0
819 i = 0
820 while i < n:
820 while i < n:
821 c = changesets[i]
821 c = changesets[i]
822
822
823 for f in c.entries:
823 for f in c.entries:
824 versions[(f.rcs, f.revision)] = i
824 versions[(f.rcs, f.revision)] = i
825
825
826 p = None
826 p = None
827 if c.branch in branches:
827 if c.branch in branches:
828 p = branches[c.branch]
828 p = branches[c.branch]
829 else:
829 else:
830 # first changeset on a new branch
830 # first changeset on a new branch
831 # the parent is a changeset with the branch in its
831 # the parent is a changeset with the branch in its
832 # branchpoints such that it is the latest possible
832 # branchpoints such that it is the latest possible
833 # commit without any intervening, unrelated commits.
833 # commit without any intervening, unrelated commits.
834
834
835 for candidate in pycompat.xrange(i):
835 for candidate in pycompat.xrange(i):
836 if c.branch not in changesets[candidate].branchpoints:
836 if c.branch not in changesets[candidate].branchpoints:
837 if p is not None:
837 if p is not None:
838 break
838 break
839 continue
839 continue
840 p = candidate
840 p = candidate
841
841
842 c.parents = []
842 c.parents = []
843 if p is not None:
843 if p is not None:
844 p = changesets[p]
844 p = changesets[p]
845
845
846 # Ensure no changeset has a synthetic changeset as a parent.
846 # Ensure no changeset has a synthetic changeset as a parent.
847 while p.synthetic:
847 while p.synthetic:
848 assert len(p.parents) <= 1, _(
848 assert len(p.parents) <= 1, _(
849 b'synthetic changeset cannot have multiple parents'
849 b'synthetic changeset cannot have multiple parents'
850 )
850 )
851 if p.parents:
851 if p.parents:
852 p = p.parents[0]
852 p = p.parents[0]
853 else:
853 else:
854 p = None
854 p = None
855 break
855 break
856
856
857 if p is not None:
857 if p is not None:
858 c.parents.append(p)
858 c.parents.append(p)
859
859
860 if c.mergepoint:
860 if c.mergepoint:
861 if c.mergepoint == b'HEAD':
861 if c.mergepoint == b'HEAD':
862 c.mergepoint = None
862 c.mergepoint = None
863 c.parents.append(changesets[branches[c.mergepoint]])
863 c.parents.append(changesets[branches[c.mergepoint]])
864
864
865 if mergefrom:
865 if mergefrom:
866 m = mergefrom.search(c.comment)
866 m = mergefrom.search(c.comment)
867 if m:
867 if m:
868 m = m.group(1)
868 m = m.group(1)
869 if m == b'HEAD':
869 if m == b'HEAD':
870 m = None
870 m = None
871 try:
871 try:
872 candidate = changesets[branches[m]]
872 candidate = changesets[branches[m]]
873 except KeyError:
873 except KeyError:
874 ui.warn(
874 ui.warn(
875 _(
875 _(
876 b"warning: CVS commit message references "
876 b"warning: CVS commit message references "
877 b"non-existent branch %r:\n%s\n"
877 b"non-existent branch %r:\n%s\n"
878 )
878 )
879 % (pycompat.bytestr(m), c.comment)
879 % (pycompat.bytestr(m), c.comment)
880 )
880 )
881 if m in branches and c.branch != m and not candidate.synthetic:
881 if m in branches and c.branch != m and not candidate.synthetic:
882 c.parents.append(candidate)
882 c.parents.append(candidate)
883
883
884 if mergeto:
884 if mergeto:
885 m = mergeto.search(c.comment)
885 m = mergeto.search(c.comment)
886 if m:
886 if m:
887 if m.groups():
887 if m.groups():
888 m = m.group(1)
888 m = m.group(1)
889 if m == b'HEAD':
889 if m == b'HEAD':
890 m = None
890 m = None
891 else:
891 else:
892 m = None # if no group found then merge to HEAD
892 m = None # if no group found then merge to HEAD
893 if m in branches and c.branch != m:
893 if m in branches and c.branch != m:
894 # insert empty changeset for merge
894 # insert empty changeset for merge
895 cc = changeset(
895 cc = changeset(
896 author=c.author,
896 author=c.author,
897 branch=m,
897 branch=m,
898 date=c.date,
898 date=c.date,
899 comment=b'convert-repo: CVS merge from branch %s'
899 comment=b'convert-repo: CVS merge from branch %s'
900 % c.branch,
900 % c.branch,
901 entries=[],
901 entries=[],
902 tags=[],
902 tags=[],
903 parents=[changesets[branches[m]], c],
903 parents=[changesets[branches[m]], c],
904 )
904 )
905 changesets.insert(i + 1, cc)
905 changesets.insert(i + 1, cc)
906 branches[m] = i + 1
906 branches[m] = i + 1
907
907
908 # adjust our loop counters now we have inserted a new entry
908 # adjust our loop counters now we have inserted a new entry
909 n += 1
909 n += 1
910 i += 2
910 i += 2
911 continue
911 continue
912
912
913 branches[c.branch] = i
913 branches[c.branch] = i
914 i += 1
914 i += 1
915
915
916 # Drop synthetic changesets (safe now that we have ensured no other
916 # Drop synthetic changesets (safe now that we have ensured no other
917 # changesets can have them as parents).
917 # changesets can have them as parents).
918 i = 0
918 i = 0
919 while i < len(changesets):
919 while i < len(changesets):
920 if changesets[i].synthetic:
920 if changesets[i].synthetic:
921 del changesets[i]
921 del changesets[i]
922 else:
922 else:
923 i += 1
923 i += 1
924
924
925 # Number changesets
925 # Number changesets
926
926
927 for i, c in enumerate(changesets):
927 for i, c in enumerate(changesets):
928 c.id = i + 1
928 c.id = i + 1
929
929
930 if odd:
930 if odd:
931 for l, r in odd:
931 for l, r in odd:
932 if l.id is not None and r.id is not None:
932 if l.id is not None and r.id is not None:
933 ui.warn(
933 ui.warn(
934 _(b'changeset %d is both before and after %d\n')
934 _(b'changeset %d is both before and after %d\n')
935 % (l.id, r.id)
935 % (l.id, r.id)
936 )
936 )
937
937
938 ui.status(_(b'%d changeset entries\n') % len(changesets))
938 ui.status(_(b'%d changeset entries\n') % len(changesets))
939
939
940 hook.hook(ui, None, b"cvschangesets", True, changesets=changesets)
940 hook.hook(ui, None, b"cvschangesets", True, changesets=changesets)
941
941
942 return changesets
942 return changesets
943
943
944
944
945 def debugcvsps(ui, *args, **opts):
945 def debugcvsps(ui, *args, **opts):
946 """Read CVS rlog for current directory or named path in
946 """Read CVS rlog for current directory or named path in
947 repository, and convert the log to changesets based on matching
947 repository, and convert the log to changesets based on matching
948 commit log entries and dates.
948 commit log entries and dates.
949 """
949 """
950 opts = pycompat.byteskwargs(opts)
950 opts = pycompat.byteskwargs(opts)
951 if opts[b"new_cache"]:
951 if opts[b"new_cache"]:
952 cache = b"write"
952 cache = b"write"
953 elif opts[b"update_cache"]:
953 elif opts[b"update_cache"]:
954 cache = b"update"
954 cache = b"update"
955 else:
955 else:
956 cache = None
956 cache = None
957
957
958 revisions = opts[b"revisions"]
958 revisions = opts[b"revisions"]
959
959
960 try:
960 try:
961 if args:
961 if args:
962 log = []
962 log = []
963 for d in args:
963 for d in args:
964 log += createlog(ui, d, root=opts[b"root"], cache=cache)
964 log += createlog(ui, d, root=opts[b"root"], cache=cache)
965 else:
965 else:
966 log = createlog(ui, root=opts[b"root"], cache=cache)
966 log = createlog(ui, root=opts[b"root"], cache=cache)
967 except logerror as e:
967 except logerror as e:
968 ui.write(b"%r\n" % e)
968 ui.write(b"%r\n" % e)
969 return
969 return
970
970
971 changesets = createchangeset(ui, log, opts[b"fuzz"])
971 changesets = createchangeset(ui, log, opts[b"fuzz"])
972 del log
972 del log
973
973
974 # Print changesets (optionally filtered)
974 # Print changesets (optionally filtered)
975
975
976 off = len(revisions)
976 off = len(revisions)
977 branches = {} # latest version number in each branch
977 branches = {} # latest version number in each branch
978 ancestors = {} # parent branch
978 ancestors = {} # parent branch
979 for cs in changesets:
979 for cs in changesets:
980
980
981 if opts[b"ancestors"]:
981 if opts[b"ancestors"]:
982 if cs.branch not in branches and cs.parents and cs.parents[0].id:
982 if cs.branch not in branches and cs.parents and cs.parents[0].id:
983 ancestors[cs.branch] = (
983 ancestors[cs.branch] = (
984 changesets[cs.parents[0].id - 1].branch,
984 changesets[cs.parents[0].id - 1].branch,
985 cs.parents[0].id,
985 cs.parents[0].id,
986 )
986 )
987 branches[cs.branch] = cs.id
987 branches[cs.branch] = cs.id
988
988
989 # limit by branches
989 # limit by branches
990 if (
990 if (
991 opts[b"branches"]
991 opts[b"branches"]
992 and (cs.branch or b'HEAD') not in opts[b"branches"]
992 and (cs.branch or b'HEAD') not in opts[b"branches"]
993 ):
993 ):
994 continue
994 continue
995
995
996 if not off:
996 if not off:
997 # Note: trailing spaces on several lines here are needed to have
997 # Note: trailing spaces on several lines here are needed to have
998 # bug-for-bug compatibility with cvsps.
998 # bug-for-bug compatibility with cvsps.
999 ui.write(b'---------------------\n')
999 ui.write(b'---------------------\n')
1000 ui.write((b'PatchSet %d \n' % cs.id))
1000 ui.write((b'PatchSet %d \n' % cs.id))
1001 ui.write(
1001 ui.write(
1002 (
1002 (
1003 b'Date: %s\n'
1003 b'Date: %s\n'
1004 % dateutil.datestr(cs.date, b'%Y/%m/%d %H:%M:%S %1%2')
1004 % dateutil.datestr(cs.date, b'%Y/%m/%d %H:%M:%S %1%2')
1005 )
1005 )
1006 )
1006 )
1007 ui.write((b'Author: %s\n' % cs.author))
1007 ui.write((b'Author: %s\n' % cs.author))
1008 ui.write((b'Branch: %s\n' % (cs.branch or b'HEAD')))
1008 ui.write((b'Branch: %s\n' % (cs.branch or b'HEAD')))
1009 ui.write(
1009 ui.write(
1010 (
1010 (
1011 b'Tag%s: %s \n'
1011 b'Tag%s: %s \n'
1012 % (
1012 % (
1013 [b'', b's'][len(cs.tags) > 1],
1013 [b'', b's'][len(cs.tags) > 1],
1014 b','.join(cs.tags) or b'(none)',
1014 b','.join(cs.tags) or b'(none)',
1015 )
1015 )
1016 )
1016 )
1017 )
1017 )
1018 if cs.branchpoints:
1018 if cs.branchpoints:
1019 ui.writenoi18n(
1019 ui.writenoi18n(
1020 b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints))
1020 b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints))
1021 )
1021 )
1022 if opts[b"parents"] and cs.parents:
1022 if opts[b"parents"] and cs.parents:
1023 if len(cs.parents) > 1:
1023 if len(cs.parents) > 1:
1024 ui.write(
1024 ui.write(
1025 (
1025 (
1026 b'Parents: %s\n'
1026 b'Parents: %s\n'
1027 % (b','.join([(b"%d" % p.id) for p in cs.parents]))
1027 % (b','.join([(b"%d" % p.id) for p in cs.parents]))
1028 )
1028 )
1029 )
1029 )
1030 else:
1030 else:
1031 ui.write((b'Parent: %d\n' % cs.parents[0].id))
1031 ui.write((b'Parent: %d\n' % cs.parents[0].id))
1032
1032
1033 if opts[b"ancestors"]:
1033 if opts[b"ancestors"]:
1034 b = cs.branch
1034 b = cs.branch
1035 r = []
1035 r = []
1036 while b:
1036 while b:
1037 b, c = ancestors[b]
1037 b, c = ancestors[b]
1038 r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b]))
1038 r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b]))
1039 if r:
1039 if r:
1040 ui.write((b'Ancestors: %s\n' % (b','.join(r))))
1040 ui.write((b'Ancestors: %s\n' % (b','.join(r))))
1041
1041
1042 ui.writenoi18n(b'Log:\n')
1042 ui.writenoi18n(b'Log:\n')
1043 ui.write(b'%s\n\n' % cs.comment)
1043 ui.write(b'%s\n\n' % cs.comment)
1044 ui.writenoi18n(b'Members: \n')
1044 ui.writenoi18n(b'Members: \n')
1045 for f in cs.entries:
1045 for f in cs.entries:
1046 fn = f.file
1046 fn = f.file
1047 if fn.startswith(opts[b"prefix"]):
1047 if fn.startswith(opts[b"prefix"]):
1048 fn = fn[len(opts[b"prefix"]) :]
1048 fn = fn[len(opts[b"prefix"]) :]
1049 ui.write(
1049 ui.write(
1050 b'\t%s:%s->%s%s \n'
1050 b'\t%s:%s->%s%s \n'
1051 % (
1051 % (
1052 fn,
1052 fn,
1053 b'.'.join([b"%d" % x for x in f.parent]) or b'INITIAL',
1053 b'.'.join([b"%d" % x for x in f.parent]) or b'INITIAL',
1054 b'.'.join([(b"%d" % x) for x in f.revision]),
1054 b'.'.join([(b"%d" % x) for x in f.revision]),
1055 [b'', b'(DEAD)'][f.dead],
1055 [b'', b'(DEAD)'][f.dead],
1056 )
1056 )
1057 )
1057 )
1058 ui.write(b'\n')
1058 ui.write(b'\n')
1059
1059
1060 # have we seen the start tag?
1060 # have we seen the start tag?
1061 if revisions and off:
1061 if revisions and off:
1062 if revisions[0] == (b"%d" % cs.id) or revisions[0] in cs.tags:
1062 if revisions[0] == (b"%d" % cs.id) or revisions[0] in cs.tags:
1063 off = False
1063 off = False
1064
1064
1065 # see if we reached the end tag
1065 # see if we reached the end tag
1066 if len(revisions) > 1 and not off:
1066 if len(revisions) > 1 and not off:
1067 if revisions[1] == (b"%d" % cs.id) or revisions[1] in cs.tags:
1067 if revisions[1] == (b"%d" % cs.id) or revisions[1] in cs.tags:
1068 break
1068 break
@@ -1,497 +1,497 b''
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7
7
8 import posixpath
8 import posixpath
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import (
11 from mercurial import (
12 error,
12 error,
13 pycompat,
13 pycompat,
14 )
14 )
15 from . import common
15 from . import common
16
16
17 SKIPREV = common.SKIPREV
17 SKIPREV = common.SKIPREV
18
18
19
19
20 def rpairs(path):
20 def rpairs(path):
21 """Yield tuples with path split at '/', starting with the full path.
21 """Yield tuples with path split at '/', starting with the full path.
22 No leading, trailing or double '/', please.
22 No leading, trailing or double '/', please.
23 >>> for x in rpairs(b'foo/bar/baz'): print(x)
23 >>> for x in rpairs(b'foo/bar/baz'): print(x)
24 ('foo/bar/baz', '')
24 ('foo/bar/baz', '')
25 ('foo/bar', 'baz')
25 ('foo/bar', 'baz')
26 ('foo', 'bar/baz')
26 ('foo', 'bar/baz')
27 ('.', 'foo/bar/baz')
27 ('.', 'foo/bar/baz')
28 """
28 """
29 i = len(path)
29 i = len(path)
30 while i != -1:
30 while i != -1:
31 yield path[:i], path[i + 1 :]
31 yield path[:i], path[i + 1 :]
32 i = path.rfind(b'/', 0, i)
32 i = path.rfind(b'/', 0, i)
33 yield b'.', path
33 yield b'.', path
34
34
35
35
36 def normalize(path):
36 def normalize(path):
37 """We use posixpath.normpath to support cross-platform path format.
37 """We use posixpath.normpath to support cross-platform path format.
38 However, it doesn't handle None input. So we wrap it up."""
38 However, it doesn't handle None input. So we wrap it up."""
39 if path is None:
39 if path is None:
40 return None
40 return None
41 return posixpath.normpath(path)
41 return posixpath.normpath(path)
42
42
43
43
44 class filemapper(object):
44 class filemapper(object):
45 """Map and filter filenames when importing.
45 """Map and filter filenames when importing.
46 A name can be mapped to itself, a new name, or None (omit from new
46 A name can be mapped to itself, a new name, or None (omit from new
47 repository)."""
47 repository)."""
48
48
49 def __init__(self, ui, path=None):
49 def __init__(self, ui, path=None):
50 self.ui = ui
50 self.ui = ui
51 self.include = {}
51 self.include = {}
52 self.exclude = {}
52 self.exclude = {}
53 self.rename = {}
53 self.rename = {}
54 self.targetprefixes = None
54 self.targetprefixes = None
55 if path:
55 if path:
56 if self.parse(path):
56 if self.parse(path):
57 raise error.Abort(_(b'errors in filemap'))
57 raise error.Abort(_(b'errors in filemap'))
58
58
59 def parse(self, path):
59 def parse(self, path):
60 errs = 0
60 errs = 0
61
61
62 def check(name, mapping, listname):
62 def check(name, mapping, listname):
63 if not name:
63 if not name:
64 self.ui.warn(
64 self.ui.warn(
65 _(b'%s:%d: path to %s is missing\n')
65 _(b'%s:%d: path to %s is missing\n')
66 % (lex.infile, lex.lineno, listname)
66 % (lex.infile, lex.lineno, listname)
67 )
67 )
68 return 1
68 return 1
69 if name in mapping:
69 if name in mapping:
70 self.ui.warn(
70 self.ui.warn(
71 _(b'%s:%d: %r already in %s list\n')
71 _(b'%s:%d: %r already in %s list\n')
72 % (lex.infile, lex.lineno, name, listname)
72 % (lex.infile, lex.lineno, name, listname)
73 )
73 )
74 return 1
74 return 1
75 if name.startswith(b'/') or name.endswith(b'/') or b'//' in name:
75 if name.startswith(b'/') or name.endswith(b'/') or b'//' in name:
76 self.ui.warn(
76 self.ui.warn(
77 _(b'%s:%d: superfluous / in %s %r\n')
77 _(b'%s:%d: superfluous / in %s %r\n')
78 % (lex.infile, lex.lineno, listname, pycompat.bytestr(name))
78 % (lex.infile, lex.lineno, listname, pycompat.bytestr(name))
79 )
79 )
80 return 1
80 return 1
81 return 0
81 return 0
82
82
83 lex = common.shlexer(
83 lex = common.shlexer(
84 filepath=path, wordchars=b'!@#$%^&*()-=+[]{}|;:,./<>?'
84 filepath=path, wordchars=b'!@#$%^&*()-=+[]{}|;:,./<>?'
85 )
85 )
86 cmd = lex.get_token()
86 cmd = lex.get_token()
87 while cmd:
87 while cmd:
88 if cmd == b'include':
88 if cmd == b'include':
89 name = normalize(lex.get_token())
89 name = normalize(lex.get_token())
90 errs += check(name, self.exclude, b'exclude')
90 errs += check(name, self.exclude, b'exclude')
91 self.include[name] = name
91 self.include[name] = name
92 elif cmd == b'exclude':
92 elif cmd == b'exclude':
93 name = normalize(lex.get_token())
93 name = normalize(lex.get_token())
94 errs += check(name, self.include, b'include')
94 errs += check(name, self.include, b'include')
95 errs += check(name, self.rename, b'rename')
95 errs += check(name, self.rename, b'rename')
96 self.exclude[name] = name
96 self.exclude[name] = name
97 elif cmd == b'rename':
97 elif cmd == b'rename':
98 src = normalize(lex.get_token())
98 src = normalize(lex.get_token())
99 dest = normalize(lex.get_token())
99 dest = normalize(lex.get_token())
100 errs += check(src, self.exclude, b'exclude')
100 errs += check(src, self.exclude, b'exclude')
101 self.rename[src] = dest
101 self.rename[src] = dest
102 elif cmd == b'source':
102 elif cmd == b'source':
103 errs += self.parse(normalize(lex.get_token()))
103 errs += self.parse(normalize(lex.get_token()))
104 else:
104 else:
105 self.ui.warn(
105 self.ui.warn(
106 _(b'%s:%d: unknown directive %r\n')
106 _(b'%s:%d: unknown directive %r\n')
107 % (lex.infile, lex.lineno, pycompat.bytestr(cmd))
107 % (lex.infile, lex.lineno, pycompat.bytestr(cmd))
108 )
108 )
109 errs += 1
109 errs += 1
110 cmd = lex.get_token()
110 cmd = lex.get_token()
111 return errs
111 return errs
112
112
113 def lookup(self, name, mapping):
113 def lookup(self, name, mapping):
114 name = normalize(name)
114 name = normalize(name)
115 for pre, suf in rpairs(name):
115 for pre, suf in rpairs(name):
116 try:
116 try:
117 return mapping[pre], pre, suf
117 return mapping[pre], pre, suf
118 except KeyError:
118 except KeyError:
119 pass
119 pass
120 return b'', name, b''
120 return b'', name, b''
121
121
122 def istargetfile(self, filename):
122 def istargetfile(self, filename):
123 """Return true if the given target filename is covered as a destination
123 """Return true if the given target filename is covered as a destination
124 of the filemap. This is useful for identifying what parts of the target
124 of the filemap. This is useful for identifying what parts of the target
125 repo belong to the source repo and what parts don't."""
125 repo belong to the source repo and what parts don't."""
126 if self.targetprefixes is None:
126 if self.targetprefixes is None:
127 self.targetprefixes = set()
127 self.targetprefixes = set()
128 for before, after in pycompat.iteritems(self.rename):
128 for before, after in self.rename.items():
129 self.targetprefixes.add(after)
129 self.targetprefixes.add(after)
130
130
131 # If "." is a target, then all target files are considered from the
131 # If "." is a target, then all target files are considered from the
132 # source.
132 # source.
133 if not self.targetprefixes or b'.' in self.targetprefixes:
133 if not self.targetprefixes or b'.' in self.targetprefixes:
134 return True
134 return True
135
135
136 filename = normalize(filename)
136 filename = normalize(filename)
137 for pre, suf in rpairs(filename):
137 for pre, suf in rpairs(filename):
138 # This check is imperfect since it doesn't account for the
138 # This check is imperfect since it doesn't account for the
139 # include/exclude list, but it should work in filemaps that don't
139 # include/exclude list, but it should work in filemaps that don't
140 # apply include/exclude to the same source directories they are
140 # apply include/exclude to the same source directories they are
141 # renaming.
141 # renaming.
142 if pre in self.targetprefixes:
142 if pre in self.targetprefixes:
143 return True
143 return True
144 return False
144 return False
145
145
146 def __call__(self, name):
146 def __call__(self, name):
147 if self.include:
147 if self.include:
148 inc = self.lookup(name, self.include)[0]
148 inc = self.lookup(name, self.include)[0]
149 else:
149 else:
150 inc = name
150 inc = name
151 if self.exclude:
151 if self.exclude:
152 exc = self.lookup(name, self.exclude)[0]
152 exc = self.lookup(name, self.exclude)[0]
153 else:
153 else:
154 exc = b''
154 exc = b''
155 if (not self.include and exc) or (len(inc) <= len(exc)):
155 if (not self.include and exc) or (len(inc) <= len(exc)):
156 return None
156 return None
157 newpre, pre, suf = self.lookup(name, self.rename)
157 newpre, pre, suf = self.lookup(name, self.rename)
158 if newpre:
158 if newpre:
159 if newpre == b'.':
159 if newpre == b'.':
160 return suf
160 return suf
161 if suf:
161 if suf:
162 if newpre.endswith(b'/'):
162 if newpre.endswith(b'/'):
163 return newpre + suf
163 return newpre + suf
164 return newpre + b'/' + suf
164 return newpre + b'/' + suf
165 return newpre
165 return newpre
166 return name
166 return name
167
167
168 def active(self):
168 def active(self):
169 return bool(self.include or self.exclude or self.rename)
169 return bool(self.include or self.exclude or self.rename)
170
170
171
171
172 # This class does two additional things compared to a regular source:
172 # This class does two additional things compared to a regular source:
173 #
173 #
174 # - Filter and rename files. This is mostly wrapped by the filemapper
174 # - Filter and rename files. This is mostly wrapped by the filemapper
175 # class above. We hide the original filename in the revision that is
175 # class above. We hide the original filename in the revision that is
176 # returned by getchanges to be able to find things later in getfile.
176 # returned by getchanges to be able to find things later in getfile.
177 #
177 #
178 # - Return only revisions that matter for the files we're interested in.
178 # - Return only revisions that matter for the files we're interested in.
179 # This involves rewriting the parents of the original revision to
179 # This involves rewriting the parents of the original revision to
180 # create a graph that is restricted to those revisions.
180 # create a graph that is restricted to those revisions.
181 #
181 #
182 # This set of revisions includes not only revisions that directly
182 # This set of revisions includes not only revisions that directly
183 # touch files we're interested in, but also merges that merge two
183 # touch files we're interested in, but also merges that merge two
184 # or more interesting revisions.
184 # or more interesting revisions.
185
185
186
186
187 class filemap_source(common.converter_source):
187 class filemap_source(common.converter_source):
188 def __init__(self, ui, baseconverter, filemap):
188 def __init__(self, ui, baseconverter, filemap):
189 super(filemap_source, self).__init__(ui, baseconverter.repotype)
189 super(filemap_source, self).__init__(ui, baseconverter.repotype)
190 self.base = baseconverter
190 self.base = baseconverter
191 self.filemapper = filemapper(ui, filemap)
191 self.filemapper = filemapper(ui, filemap)
192 self.commits = {}
192 self.commits = {}
193 # if a revision rev has parent p in the original revision graph, then
193 # if a revision rev has parent p in the original revision graph, then
194 # rev will have parent self.parentmap[p] in the restricted graph.
194 # rev will have parent self.parentmap[p] in the restricted graph.
195 self.parentmap = {}
195 self.parentmap = {}
196 # self.wantedancestors[rev] is the set of all ancestors of rev that
196 # self.wantedancestors[rev] is the set of all ancestors of rev that
197 # are in the restricted graph.
197 # are in the restricted graph.
198 self.wantedancestors = {}
198 self.wantedancestors = {}
199 self.convertedorder = None
199 self.convertedorder = None
200 self._rebuilt = False
200 self._rebuilt = False
201 self.origparents = {}
201 self.origparents = {}
202 self.children = {}
202 self.children = {}
203 self.seenchildren = {}
203 self.seenchildren = {}
204 # experimental config: convert.ignoreancestorcheck
204 # experimental config: convert.ignoreancestorcheck
205 self.ignoreancestorcheck = self.ui.configbool(
205 self.ignoreancestorcheck = self.ui.configbool(
206 b'convert', b'ignoreancestorcheck'
206 b'convert', b'ignoreancestorcheck'
207 )
207 )
208
208
209 def before(self):
209 def before(self):
210 self.base.before()
210 self.base.before()
211
211
212 def after(self):
212 def after(self):
213 self.base.after()
213 self.base.after()
214
214
215 def setrevmap(self, revmap):
215 def setrevmap(self, revmap):
216 # rebuild our state to make things restartable
216 # rebuild our state to make things restartable
217 #
217 #
218 # To avoid calling getcommit for every revision that has already
218 # To avoid calling getcommit for every revision that has already
219 # been converted, we rebuild only the parentmap, delaying the
219 # been converted, we rebuild only the parentmap, delaying the
220 # rebuild of wantedancestors until we need it (i.e. until a
220 # rebuild of wantedancestors until we need it (i.e. until a
221 # merge).
221 # merge).
222 #
222 #
223 # We assume the order argument lists the revisions in
223 # We assume the order argument lists the revisions in
224 # topological order, so that we can infer which revisions were
224 # topological order, so that we can infer which revisions were
225 # wanted by previous runs.
225 # wanted by previous runs.
226 self._rebuilt = not revmap
226 self._rebuilt = not revmap
227 seen = {SKIPREV: SKIPREV}
227 seen = {SKIPREV: SKIPREV}
228 dummyset = set()
228 dummyset = set()
229 converted = []
229 converted = []
230 for rev in revmap.order:
230 for rev in revmap.order:
231 mapped = revmap[rev]
231 mapped = revmap[rev]
232 wanted = mapped not in seen
232 wanted = mapped not in seen
233 if wanted:
233 if wanted:
234 seen[mapped] = rev
234 seen[mapped] = rev
235 self.parentmap[rev] = rev
235 self.parentmap[rev] = rev
236 else:
236 else:
237 self.parentmap[rev] = seen[mapped]
237 self.parentmap[rev] = seen[mapped]
238 self.wantedancestors[rev] = dummyset
238 self.wantedancestors[rev] = dummyset
239 arg = seen[mapped]
239 arg = seen[mapped]
240 if arg == SKIPREV:
240 if arg == SKIPREV:
241 arg = None
241 arg = None
242 converted.append((rev, wanted, arg))
242 converted.append((rev, wanted, arg))
243 self.convertedorder = converted
243 self.convertedorder = converted
244 return self.base.setrevmap(revmap)
244 return self.base.setrevmap(revmap)
245
245
246 def rebuild(self):
246 def rebuild(self):
247 if self._rebuilt:
247 if self._rebuilt:
248 return True
248 return True
249 self._rebuilt = True
249 self._rebuilt = True
250 self.parentmap.clear()
250 self.parentmap.clear()
251 self.wantedancestors.clear()
251 self.wantedancestors.clear()
252 self.seenchildren.clear()
252 self.seenchildren.clear()
253 for rev, wanted, arg in self.convertedorder:
253 for rev, wanted, arg in self.convertedorder:
254 if rev not in self.origparents:
254 if rev not in self.origparents:
255 try:
255 try:
256 self.origparents[rev] = self.getcommit(rev).parents
256 self.origparents[rev] = self.getcommit(rev).parents
257 except error.RepoLookupError:
257 except error.RepoLookupError:
258 self.ui.debug(b"unknown revmap source: %s\n" % rev)
258 self.ui.debug(b"unknown revmap source: %s\n" % rev)
259 continue
259 continue
260 if arg is not None:
260 if arg is not None:
261 self.children[arg] = self.children.get(arg, 0) + 1
261 self.children[arg] = self.children.get(arg, 0) + 1
262
262
263 for rev, wanted, arg in self.convertedorder:
263 for rev, wanted, arg in self.convertedorder:
264 try:
264 try:
265 parents = self.origparents[rev]
265 parents = self.origparents[rev]
266 except KeyError:
266 except KeyError:
267 continue # unknown revmap source
267 continue # unknown revmap source
268 if wanted:
268 if wanted:
269 self.mark_wanted(rev, parents)
269 self.mark_wanted(rev, parents)
270 else:
270 else:
271 self.mark_not_wanted(rev, arg)
271 self.mark_not_wanted(rev, arg)
272 self._discard(arg, *parents)
272 self._discard(arg, *parents)
273
273
274 return True
274 return True
275
275
276 def getheads(self):
276 def getheads(self):
277 return self.base.getheads()
277 return self.base.getheads()
278
278
279 def getcommit(self, rev):
279 def getcommit(self, rev):
280 # We want to save a reference to the commit objects to be able
280 # We want to save a reference to the commit objects to be able
281 # to rewrite their parents later on.
281 # to rewrite their parents later on.
282 c = self.commits[rev] = self.base.getcommit(rev)
282 c = self.commits[rev] = self.base.getcommit(rev)
283 for p in c.parents:
283 for p in c.parents:
284 self.children[p] = self.children.get(p, 0) + 1
284 self.children[p] = self.children.get(p, 0) + 1
285 return c
285 return c
286
286
287 def numcommits(self):
287 def numcommits(self):
288 return self.base.numcommits()
288 return self.base.numcommits()
289
289
290 def _cachedcommit(self, rev):
290 def _cachedcommit(self, rev):
291 if rev in self.commits:
291 if rev in self.commits:
292 return self.commits[rev]
292 return self.commits[rev]
293 return self.base.getcommit(rev)
293 return self.base.getcommit(rev)
294
294
295 def _discard(self, *revs):
295 def _discard(self, *revs):
296 for r in revs:
296 for r in revs:
297 if r is None:
297 if r is None:
298 continue
298 continue
299 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
299 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
300 if self.seenchildren[r] == self.children[r]:
300 if self.seenchildren[r] == self.children[r]:
301 self.wantedancestors.pop(r, None)
301 self.wantedancestors.pop(r, None)
302 self.parentmap.pop(r, None)
302 self.parentmap.pop(r, None)
303 del self.seenchildren[r]
303 del self.seenchildren[r]
304 if self._rebuilt:
304 if self._rebuilt:
305 del self.children[r]
305 del self.children[r]
306
306
307 def wanted(self, rev, i):
307 def wanted(self, rev, i):
308 # Return True if we're directly interested in rev.
308 # Return True if we're directly interested in rev.
309 #
309 #
310 # i is an index selecting one of the parents of rev (if rev
310 # i is an index selecting one of the parents of rev (if rev
311 # has no parents, i is None). getchangedfiles will give us
311 # has no parents, i is None). getchangedfiles will give us
312 # the list of files that are different in rev and in the parent
312 # the list of files that are different in rev and in the parent
313 # indicated by i. If we're interested in any of these files,
313 # indicated by i. If we're interested in any of these files,
314 # we're interested in rev.
314 # we're interested in rev.
315 try:
315 try:
316 files = self.base.getchangedfiles(rev, i)
316 files = self.base.getchangedfiles(rev, i)
317 except NotImplementedError:
317 except NotImplementedError:
318 raise error.Abort(_(b"source repository doesn't support --filemap"))
318 raise error.Abort(_(b"source repository doesn't support --filemap"))
319 for f in files:
319 for f in files:
320 if self.filemapper(f):
320 if self.filemapper(f):
321 return True
321 return True
322
322
323 # The include directive is documented to include nothing else (though
323 # The include directive is documented to include nothing else (though
324 # valid branch closes are included).
324 # valid branch closes are included).
325 if self.filemapper.include:
325 if self.filemapper.include:
326 return False
326 return False
327
327
328 # Allow empty commits in the source revision through. The getchanges()
328 # Allow empty commits in the source revision through. The getchanges()
329 # method doesn't even bother calling this if it determines that the
329 # method doesn't even bother calling this if it determines that the
330 # close marker is significant (i.e. all of the branch ancestors weren't
330 # close marker is significant (i.e. all of the branch ancestors weren't
331 # eliminated). Therefore if there *is* a close marker, getchanges()
331 # eliminated). Therefore if there *is* a close marker, getchanges()
332 # doesn't consider it significant, and this revision should be dropped.
332 # doesn't consider it significant, and this revision should be dropped.
333 return not files and b'close' not in self.commits[rev].extra
333 return not files and b'close' not in self.commits[rev].extra
334
334
335 def mark_not_wanted(self, rev, p):
335 def mark_not_wanted(self, rev, p):
336 # Mark rev as not interesting and update data structures.
336 # Mark rev as not interesting and update data structures.
337
337
338 if p is None:
338 if p is None:
339 # A root revision. Use SKIPREV to indicate that it doesn't
339 # A root revision. Use SKIPREV to indicate that it doesn't
340 # map to any revision in the restricted graph. Put SKIPREV
340 # map to any revision in the restricted graph. Put SKIPREV
341 # in the set of wanted ancestors to simplify code elsewhere
341 # in the set of wanted ancestors to simplify code elsewhere
342 self.parentmap[rev] = SKIPREV
342 self.parentmap[rev] = SKIPREV
343 self.wantedancestors[rev] = {SKIPREV}
343 self.wantedancestors[rev] = {SKIPREV}
344 return
344 return
345
345
346 # Reuse the data from our parent.
346 # Reuse the data from our parent.
347 self.parentmap[rev] = self.parentmap[p]
347 self.parentmap[rev] = self.parentmap[p]
348 self.wantedancestors[rev] = self.wantedancestors[p]
348 self.wantedancestors[rev] = self.wantedancestors[p]
349
349
350 def mark_wanted(self, rev, parents):
350 def mark_wanted(self, rev, parents):
351 # Mark rev ss wanted and update data structures.
351 # Mark rev ss wanted and update data structures.
352
352
353 # rev will be in the restricted graph, so children of rev in
353 # rev will be in the restricted graph, so children of rev in
354 # the original graph should still have rev as a parent in the
354 # the original graph should still have rev as a parent in the
355 # restricted graph.
355 # restricted graph.
356 self.parentmap[rev] = rev
356 self.parentmap[rev] = rev
357
357
358 # The set of wanted ancestors of rev is the union of the sets
358 # The set of wanted ancestors of rev is the union of the sets
359 # of wanted ancestors of its parents. Plus rev itself.
359 # of wanted ancestors of its parents. Plus rev itself.
360 wrev = set()
360 wrev = set()
361 for p in parents:
361 for p in parents:
362 if p in self.wantedancestors:
362 if p in self.wantedancestors:
363 wrev.update(self.wantedancestors[p])
363 wrev.update(self.wantedancestors[p])
364 else:
364 else:
365 self.ui.warn(
365 self.ui.warn(
366 _(b'warning: %s parent %s is missing\n') % (rev, p)
366 _(b'warning: %s parent %s is missing\n') % (rev, p)
367 )
367 )
368 wrev.add(rev)
368 wrev.add(rev)
369 self.wantedancestors[rev] = wrev
369 self.wantedancestors[rev] = wrev
370
370
371 def getchanges(self, rev, full):
371 def getchanges(self, rev, full):
372 parents = self.commits[rev].parents
372 parents = self.commits[rev].parents
373 if len(parents) > 1 and not self.ignoreancestorcheck:
373 if len(parents) > 1 and not self.ignoreancestorcheck:
374 self.rebuild()
374 self.rebuild()
375
375
376 # To decide whether we're interested in rev we:
376 # To decide whether we're interested in rev we:
377 #
377 #
378 # - calculate what parents rev will have if it turns out we're
378 # - calculate what parents rev will have if it turns out we're
379 # interested in it. If it's going to have more than 1 parent,
379 # interested in it. If it's going to have more than 1 parent,
380 # we're interested in it.
380 # we're interested in it.
381 #
381 #
382 # - otherwise, we'll compare it with the single parent we found.
382 # - otherwise, we'll compare it with the single parent we found.
383 # If any of the files we're interested in is different in the
383 # If any of the files we're interested in is different in the
384 # the two revisions, we're interested in rev.
384 # the two revisions, we're interested in rev.
385
385
386 # A parent p is interesting if its mapped version (self.parentmap[p]):
386 # A parent p is interesting if its mapped version (self.parentmap[p]):
387 # - is not SKIPREV
387 # - is not SKIPREV
388 # - is still not in the list of parents (we don't want duplicates)
388 # - is still not in the list of parents (we don't want duplicates)
389 # - is not an ancestor of the mapped versions of the other parents or
389 # - is not an ancestor of the mapped versions of the other parents or
390 # there is no parent in the same branch than the current revision.
390 # there is no parent in the same branch than the current revision.
391 mparents = []
391 mparents = []
392 knownparents = set()
392 knownparents = set()
393 branch = self.commits[rev].branch
393 branch = self.commits[rev].branch
394 hasbranchparent = False
394 hasbranchparent = False
395 for i, p1 in enumerate(parents):
395 for i, p1 in enumerate(parents):
396 mp1 = self.parentmap[p1]
396 mp1 = self.parentmap[p1]
397 if mp1 == SKIPREV or mp1 in knownparents:
397 if mp1 == SKIPREV or mp1 in knownparents:
398 continue
398 continue
399
399
400 isancestor = not self.ignoreancestorcheck and any(
400 isancestor = not self.ignoreancestorcheck and any(
401 p2
401 p2
402 for p2 in parents
402 for p2 in parents
403 if p1 != p2
403 if p1 != p2
404 and mp1 != self.parentmap[p2]
404 and mp1 != self.parentmap[p2]
405 and mp1 in self.wantedancestors[p2]
405 and mp1 in self.wantedancestors[p2]
406 )
406 )
407 if not isancestor and not hasbranchparent and len(parents) > 1:
407 if not isancestor and not hasbranchparent and len(parents) > 1:
408 # This could be expensive, avoid unnecessary calls.
408 # This could be expensive, avoid unnecessary calls.
409 if self._cachedcommit(p1).branch == branch:
409 if self._cachedcommit(p1).branch == branch:
410 hasbranchparent = True
410 hasbranchparent = True
411 mparents.append((p1, mp1, i, isancestor))
411 mparents.append((p1, mp1, i, isancestor))
412 knownparents.add(mp1)
412 knownparents.add(mp1)
413 # Discard parents ancestors of other parents if there is a
413 # Discard parents ancestors of other parents if there is a
414 # non-ancestor one on the same branch than current revision.
414 # non-ancestor one on the same branch than current revision.
415 if hasbranchparent:
415 if hasbranchparent:
416 mparents = [p for p in mparents if not p[3]]
416 mparents = [p for p in mparents if not p[3]]
417 wp = None
417 wp = None
418 if mparents:
418 if mparents:
419 wp = max(p[2] for p in mparents)
419 wp = max(p[2] for p in mparents)
420 mparents = [p[1] for p in mparents]
420 mparents = [p[1] for p in mparents]
421 elif parents:
421 elif parents:
422 wp = 0
422 wp = 0
423
423
424 self.origparents[rev] = parents
424 self.origparents[rev] = parents
425
425
426 closed = False
426 closed = False
427 if b'close' in self.commits[rev].extra:
427 if b'close' in self.commits[rev].extra:
428 # A branch closing revision is only useful if one of its
428 # A branch closing revision is only useful if one of its
429 # parents belong to the branch being closed
429 # parents belong to the branch being closed
430 pbranches = [self._cachedcommit(p).branch for p in mparents]
430 pbranches = [self._cachedcommit(p).branch for p in mparents]
431 if branch in pbranches:
431 if branch in pbranches:
432 closed = True
432 closed = True
433
433
434 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
434 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
435 # We don't want this revision.
435 # We don't want this revision.
436 # Update our state and tell the convert process to map this
436 # Update our state and tell the convert process to map this
437 # revision to the same revision its parent as mapped to.
437 # revision to the same revision its parent as mapped to.
438 p = None
438 p = None
439 if parents:
439 if parents:
440 p = parents[wp]
440 p = parents[wp]
441 self.mark_not_wanted(rev, p)
441 self.mark_not_wanted(rev, p)
442 self.convertedorder.append((rev, False, p))
442 self.convertedorder.append((rev, False, p))
443 self._discard(*parents)
443 self._discard(*parents)
444 return self.parentmap[rev]
444 return self.parentmap[rev]
445
445
446 # We want this revision.
446 # We want this revision.
447 # Rewrite the parents of the commit object
447 # Rewrite the parents of the commit object
448 self.commits[rev].parents = mparents
448 self.commits[rev].parents = mparents
449 self.mark_wanted(rev, parents)
449 self.mark_wanted(rev, parents)
450 self.convertedorder.append((rev, True, None))
450 self.convertedorder.append((rev, True, None))
451 self._discard(*parents)
451 self._discard(*parents)
452
452
453 # Get the real changes and do the filtering/mapping. To be
453 # Get the real changes and do the filtering/mapping. To be
454 # able to get the files later on in getfile, we hide the
454 # able to get the files later on in getfile, we hide the
455 # original filename in the rev part of the return value.
455 # original filename in the rev part of the return value.
456 changes, copies, cleanp2 = self.base.getchanges(rev, full)
456 changes, copies, cleanp2 = self.base.getchanges(rev, full)
457 files = {}
457 files = {}
458 ncleanp2 = set(cleanp2)
458 ncleanp2 = set(cleanp2)
459 for f, r in changes:
459 for f, r in changes:
460 newf = self.filemapper(f)
460 newf = self.filemapper(f)
461 if newf and (newf != f or newf not in files):
461 if newf and (newf != f or newf not in files):
462 files[newf] = (f, r)
462 files[newf] = (f, r)
463 if newf != f:
463 if newf != f:
464 ncleanp2.discard(f)
464 ncleanp2.discard(f)
465 files = sorted(files.items())
465 files = sorted(files.items())
466
466
467 ncopies = {}
467 ncopies = {}
468 for c in copies:
468 for c in copies:
469 newc = self.filemapper(c)
469 newc = self.filemapper(c)
470 if newc:
470 if newc:
471 newsource = self.filemapper(copies[c])
471 newsource = self.filemapper(copies[c])
472 if newsource:
472 if newsource:
473 ncopies[newc] = newsource
473 ncopies[newc] = newsource
474
474
475 return files, ncopies, ncleanp2
475 return files, ncopies, ncleanp2
476
476
477 def targetfilebelongstosource(self, targetfilename):
477 def targetfilebelongstosource(self, targetfilename):
478 return self.filemapper.istargetfile(targetfilename)
478 return self.filemapper.istargetfile(targetfilename)
479
479
480 def getfile(self, name, rev):
480 def getfile(self, name, rev):
481 realname, realrev = rev
481 realname, realrev = rev
482 return self.base.getfile(realname, realrev)
482 return self.base.getfile(realname, realrev)
483
483
484 def gettags(self):
484 def gettags(self):
485 return self.base.gettags()
485 return self.base.gettags()
486
486
487 def hasnativeorder(self):
487 def hasnativeorder(self):
488 return self.base.hasnativeorder()
488 return self.base.hasnativeorder()
489
489
490 def lookuprev(self, rev):
490 def lookuprev(self, rev):
491 return self.base.lookuprev(rev)
491 return self.base.lookuprev(rev)
492
492
493 def getbookmarks(self):
493 def getbookmarks(self):
494 return self.base.getbookmarks()
494 return self.base.getbookmarks()
495
495
496 def converted(self, rev, sinkrev):
496 def converted(self, rev, sinkrev):
497 self.base.converted(rev, sinkrev)
497 self.base.converted(rev, sinkrev)
@@ -1,732 +1,732 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19
19
20 import os
20 import os
21 import re
21 import re
22 import time
22 import time
23
23
24 from mercurial.i18n import _
24 from mercurial.i18n import _
25 from mercurial.pycompat import open
25 from mercurial.pycompat import open
26 from mercurial.node import (
26 from mercurial.node import (
27 bin,
27 bin,
28 hex,
28 hex,
29 sha1nodeconstants,
29 sha1nodeconstants,
30 )
30 )
31 from mercurial import (
31 from mercurial import (
32 bookmarks,
32 bookmarks,
33 context,
33 context,
34 error,
34 error,
35 exchange,
35 exchange,
36 hg,
36 hg,
37 lock as lockmod,
37 lock as lockmod,
38 logcmdutil,
38 logcmdutil,
39 merge as mergemod,
39 merge as mergemod,
40 mergestate,
40 mergestate,
41 phases,
41 phases,
42 pycompat,
42 pycompat,
43 util,
43 util,
44 )
44 )
45 from mercurial.utils import dateutil
45 from mercurial.utils import dateutil
46
46
47 stringio = util.stringio
47 stringio = util.stringio
48
48
49 from . import common
49 from . import common
50
50
51 mapfile = common.mapfile
51 mapfile = common.mapfile
52 NoRepo = common.NoRepo
52 NoRepo = common.NoRepo
53
53
54 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
54 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
55
55
56
56
57 class mercurial_sink(common.converter_sink):
57 class mercurial_sink(common.converter_sink):
58 def __init__(self, ui, repotype, path):
58 def __init__(self, ui, repotype, path):
59 common.converter_sink.__init__(self, ui, repotype, path)
59 common.converter_sink.__init__(self, ui, repotype, path)
60 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
60 self.branchnames = ui.configbool(b'convert', b'hg.usebranchnames')
61 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
61 self.clonebranches = ui.configbool(b'convert', b'hg.clonebranches')
62 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
62 self.tagsbranch = ui.config(b'convert', b'hg.tagsbranch')
63 self.lastbranch = None
63 self.lastbranch = None
64 if os.path.isdir(path) and len(os.listdir(path)) > 0:
64 if os.path.isdir(path) and len(os.listdir(path)) > 0:
65 try:
65 try:
66 self.repo = hg.repository(self.ui, path)
66 self.repo = hg.repository(self.ui, path)
67 if not self.repo.local():
67 if not self.repo.local():
68 raise NoRepo(
68 raise NoRepo(
69 _(b'%s is not a local Mercurial repository') % path
69 _(b'%s is not a local Mercurial repository') % path
70 )
70 )
71 except error.RepoError as err:
71 except error.RepoError as err:
72 ui.traceback()
72 ui.traceback()
73 raise NoRepo(err.args[0])
73 raise NoRepo(err.args[0])
74 else:
74 else:
75 try:
75 try:
76 ui.status(_(b'initializing destination %s repository\n') % path)
76 ui.status(_(b'initializing destination %s repository\n') % path)
77 self.repo = hg.repository(self.ui, path, create=True)
77 self.repo = hg.repository(self.ui, path, create=True)
78 if not self.repo.local():
78 if not self.repo.local():
79 raise NoRepo(
79 raise NoRepo(
80 _(b'%s is not a local Mercurial repository') % path
80 _(b'%s is not a local Mercurial repository') % path
81 )
81 )
82 self.created.append(path)
82 self.created.append(path)
83 except error.RepoError:
83 except error.RepoError:
84 ui.traceback()
84 ui.traceback()
85 raise NoRepo(
85 raise NoRepo(
86 _(b"could not create hg repository %s as sink") % path
86 _(b"could not create hg repository %s as sink") % path
87 )
87 )
88 self.lock = None
88 self.lock = None
89 self.wlock = None
89 self.wlock = None
90 self.filemapmode = False
90 self.filemapmode = False
91 self.subrevmaps = {}
91 self.subrevmaps = {}
92
92
93 def before(self):
93 def before(self):
94 self.ui.debug(b'run hg sink pre-conversion action\n')
94 self.ui.debug(b'run hg sink pre-conversion action\n')
95 self.wlock = self.repo.wlock()
95 self.wlock = self.repo.wlock()
96 self.lock = self.repo.lock()
96 self.lock = self.repo.lock()
97
97
98 def after(self):
98 def after(self):
99 self.ui.debug(b'run hg sink post-conversion action\n')
99 self.ui.debug(b'run hg sink post-conversion action\n')
100 if self.lock:
100 if self.lock:
101 self.lock.release()
101 self.lock.release()
102 if self.wlock:
102 if self.wlock:
103 self.wlock.release()
103 self.wlock.release()
104
104
105 def revmapfile(self):
105 def revmapfile(self):
106 return self.repo.vfs.join(b"shamap")
106 return self.repo.vfs.join(b"shamap")
107
107
108 def authorfile(self):
108 def authorfile(self):
109 return self.repo.vfs.join(b"authormap")
109 return self.repo.vfs.join(b"authormap")
110
110
111 def setbranch(self, branch, pbranches):
111 def setbranch(self, branch, pbranches):
112 if not self.clonebranches:
112 if not self.clonebranches:
113 return
113 return
114
114
115 setbranch = branch != self.lastbranch
115 setbranch = branch != self.lastbranch
116 self.lastbranch = branch
116 self.lastbranch = branch
117 if not branch:
117 if not branch:
118 branch = b'default'
118 branch = b'default'
119 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
119 pbranches = [(b[0], b[1] and b[1] or b'default') for b in pbranches]
120
120
121 branchpath = os.path.join(self.path, branch)
121 branchpath = os.path.join(self.path, branch)
122 if setbranch:
122 if setbranch:
123 self.after()
123 self.after()
124 try:
124 try:
125 self.repo = hg.repository(self.ui, branchpath)
125 self.repo = hg.repository(self.ui, branchpath)
126 except Exception:
126 except Exception:
127 self.repo = hg.repository(self.ui, branchpath, create=True)
127 self.repo = hg.repository(self.ui, branchpath, create=True)
128 self.before()
128 self.before()
129
129
130 # pbranches may bring revisions from other branches (merge parents)
130 # pbranches may bring revisions from other branches (merge parents)
131 # Make sure we have them, or pull them.
131 # Make sure we have them, or pull them.
132 missings = {}
132 missings = {}
133 for b in pbranches:
133 for b in pbranches:
134 try:
134 try:
135 self.repo.lookup(b[0])
135 self.repo.lookup(b[0])
136 except Exception:
136 except Exception:
137 missings.setdefault(b[1], []).append(b[0])
137 missings.setdefault(b[1], []).append(b[0])
138
138
139 if missings:
139 if missings:
140 self.after()
140 self.after()
141 for pbranch, heads in sorted(pycompat.iteritems(missings)):
141 for pbranch, heads in sorted(missings.items()):
142 pbranchpath = os.path.join(self.path, pbranch)
142 pbranchpath = os.path.join(self.path, pbranch)
143 prepo = hg.peer(self.ui, {}, pbranchpath)
143 prepo = hg.peer(self.ui, {}, pbranchpath)
144 self.ui.note(
144 self.ui.note(
145 _(b'pulling from %s into %s\n') % (pbranch, branch)
145 _(b'pulling from %s into %s\n') % (pbranch, branch)
146 )
146 )
147 exchange.pull(
147 exchange.pull(
148 self.repo, prepo, heads=[prepo.lookup(h) for h in heads]
148 self.repo, prepo, heads=[prepo.lookup(h) for h in heads]
149 )
149 )
150 self.before()
150 self.before()
151
151
152 def _rewritetags(self, source, revmap, data):
152 def _rewritetags(self, source, revmap, data):
153 fp = stringio()
153 fp = stringio()
154 for line in data.splitlines():
154 for line in data.splitlines():
155 s = line.split(b' ', 1)
155 s = line.split(b' ', 1)
156 if len(s) != 2:
156 if len(s) != 2:
157 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
157 self.ui.warn(_(b'invalid tag entry: "%s"\n') % line)
158 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
158 fp.write(b'%s\n' % line) # Bogus, but keep for hash stability
159 continue
159 continue
160 revid = revmap.get(source.lookuprev(s[0]))
160 revid = revmap.get(source.lookuprev(s[0]))
161 if not revid:
161 if not revid:
162 if s[0] == sha1nodeconstants.nullhex:
162 if s[0] == sha1nodeconstants.nullhex:
163 revid = s[0]
163 revid = s[0]
164 else:
164 else:
165 # missing, but keep for hash stability
165 # missing, but keep for hash stability
166 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
166 self.ui.warn(_(b'missing tag entry: "%s"\n') % line)
167 fp.write(b'%s\n' % line)
167 fp.write(b'%s\n' % line)
168 continue
168 continue
169 fp.write(b'%s %s\n' % (revid, s[1]))
169 fp.write(b'%s %s\n' % (revid, s[1]))
170 return fp.getvalue()
170 return fp.getvalue()
171
171
172 def _rewritesubstate(self, source, data):
172 def _rewritesubstate(self, source, data):
173 fp = stringio()
173 fp = stringio()
174 for line in data.splitlines():
174 for line in data.splitlines():
175 s = line.split(b' ', 1)
175 s = line.split(b' ', 1)
176 if len(s) != 2:
176 if len(s) != 2:
177 continue
177 continue
178
178
179 revid = s[0]
179 revid = s[0]
180 subpath = s[1]
180 subpath = s[1]
181 if revid != sha1nodeconstants.nullhex:
181 if revid != sha1nodeconstants.nullhex:
182 revmap = self.subrevmaps.get(subpath)
182 revmap = self.subrevmaps.get(subpath)
183 if revmap is None:
183 if revmap is None:
184 revmap = mapfile(
184 revmap = mapfile(
185 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
185 self.ui, self.repo.wjoin(subpath, b'.hg/shamap')
186 )
186 )
187 self.subrevmaps[subpath] = revmap
187 self.subrevmaps[subpath] = revmap
188
188
189 # It is reasonable that one or more of the subrepos don't
189 # It is reasonable that one or more of the subrepos don't
190 # need to be converted, in which case they can be cloned
190 # need to be converted, in which case they can be cloned
191 # into place instead of converted. Therefore, only warn
191 # into place instead of converted. Therefore, only warn
192 # once.
192 # once.
193 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
193 msg = _(b'no ".hgsubstate" updates will be made for "%s"\n')
194 if len(revmap) == 0:
194 if len(revmap) == 0:
195 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
195 sub = self.repo.wvfs.reljoin(subpath, b'.hg')
196
196
197 if self.repo.wvfs.exists(sub):
197 if self.repo.wvfs.exists(sub):
198 self.ui.warn(msg % subpath)
198 self.ui.warn(msg % subpath)
199
199
200 newid = revmap.get(revid)
200 newid = revmap.get(revid)
201 if not newid:
201 if not newid:
202 if len(revmap) > 0:
202 if len(revmap) > 0:
203 self.ui.warn(
203 self.ui.warn(
204 _(b"%s is missing from %s/.hg/shamap\n")
204 _(b"%s is missing from %s/.hg/shamap\n")
205 % (revid, subpath)
205 % (revid, subpath)
206 )
206 )
207 else:
207 else:
208 revid = newid
208 revid = newid
209
209
210 fp.write(b'%s %s\n' % (revid, subpath))
210 fp.write(b'%s %s\n' % (revid, subpath))
211
211
212 return fp.getvalue()
212 return fp.getvalue()
213
213
214 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
214 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
215 """Calculates the files from p2 that we need to pull in when merging p1
215 """Calculates the files from p2 that we need to pull in when merging p1
216 and p2, given that the merge is coming from the given source.
216 and p2, given that the merge is coming from the given source.
217
217
218 This prevents us from losing files that only exist in the target p2 and
218 This prevents us from losing files that only exist in the target p2 and
219 that don't come from the source repo (like if you're merging multiple
219 that don't come from the source repo (like if you're merging multiple
220 repositories together).
220 repositories together).
221 """
221 """
222 anc = [p1ctx.ancestor(p2ctx)]
222 anc = [p1ctx.ancestor(p2ctx)]
223 # Calculate what files are coming from p2
223 # Calculate what files are coming from p2
224 # TODO: mresult.commitinfo might be able to get that info
224 # TODO: mresult.commitinfo might be able to get that info
225 mresult = mergemod.calculateupdates(
225 mresult = mergemod.calculateupdates(
226 self.repo,
226 self.repo,
227 p1ctx,
227 p1ctx,
228 p2ctx,
228 p2ctx,
229 anc,
229 anc,
230 branchmerge=True,
230 branchmerge=True,
231 force=True,
231 force=True,
232 acceptremote=False,
232 acceptremote=False,
233 followcopies=False,
233 followcopies=False,
234 )
234 )
235
235
236 for file, (action, info, msg) in mresult.filemap():
236 for file, (action, info, msg) in mresult.filemap():
237 if source.targetfilebelongstosource(file):
237 if source.targetfilebelongstosource(file):
238 # If the file belongs to the source repo, ignore the p2
238 # If the file belongs to the source repo, ignore the p2
239 # since it will be covered by the existing fileset.
239 # since it will be covered by the existing fileset.
240 continue
240 continue
241
241
242 # If the file requires actual merging, abort. We don't have enough
242 # If the file requires actual merging, abort. We don't have enough
243 # context to resolve merges correctly.
243 # context to resolve merges correctly.
244 if action in mergestate.CONVERT_MERGE_ACTIONS:
244 if action in mergestate.CONVERT_MERGE_ACTIONS:
245 raise error.Abort(
245 raise error.Abort(
246 _(
246 _(
247 b"unable to convert merge commit "
247 b"unable to convert merge commit "
248 b"since target parents do not merge cleanly (file "
248 b"since target parents do not merge cleanly (file "
249 b"%s, parents %s and %s)"
249 b"%s, parents %s and %s)"
250 )
250 )
251 % (file, p1ctx, p2ctx)
251 % (file, p1ctx, p2ctx)
252 )
252 )
253 elif action == mergestate.ACTION_KEEP:
253 elif action == mergestate.ACTION_KEEP:
254 # 'keep' means nothing changed from p1
254 # 'keep' means nothing changed from p1
255 continue
255 continue
256 else:
256 else:
257 # Any other change means we want to take the p2 version
257 # Any other change means we want to take the p2 version
258 yield file
258 yield file
259
259
260 def putcommit(
260 def putcommit(
261 self, files, copies, parents, commit, source, revmap, full, cleanp2
261 self, files, copies, parents, commit, source, revmap, full, cleanp2
262 ):
262 ):
263 files = dict(files)
263 files = dict(files)
264
264
265 def getfilectx(repo, memctx, f):
265 def getfilectx(repo, memctx, f):
266 if p2ctx and f in p2files and f not in copies:
266 if p2ctx and f in p2files and f not in copies:
267 self.ui.debug(b'reusing %s from p2\n' % f)
267 self.ui.debug(b'reusing %s from p2\n' % f)
268 try:
268 try:
269 return p2ctx[f]
269 return p2ctx[f]
270 except error.ManifestLookupError:
270 except error.ManifestLookupError:
271 # If the file doesn't exist in p2, then we're syncing a
271 # If the file doesn't exist in p2, then we're syncing a
272 # delete, so just return None.
272 # delete, so just return None.
273 return None
273 return None
274 try:
274 try:
275 v = files[f]
275 v = files[f]
276 except KeyError:
276 except KeyError:
277 return None
277 return None
278 data, mode = source.getfile(f, v)
278 data, mode = source.getfile(f, v)
279 if data is None:
279 if data is None:
280 return None
280 return None
281 if f == b'.hgtags':
281 if f == b'.hgtags':
282 data = self._rewritetags(source, revmap, data)
282 data = self._rewritetags(source, revmap, data)
283 if f == b'.hgsubstate':
283 if f == b'.hgsubstate':
284 data = self._rewritesubstate(source, data)
284 data = self._rewritesubstate(source, data)
285 return context.memfilectx(
285 return context.memfilectx(
286 self.repo,
286 self.repo,
287 memctx,
287 memctx,
288 f,
288 f,
289 data,
289 data,
290 b'l' in mode,
290 b'l' in mode,
291 b'x' in mode,
291 b'x' in mode,
292 copies.get(f),
292 copies.get(f),
293 )
293 )
294
294
295 pl = []
295 pl = []
296 for p in parents:
296 for p in parents:
297 if p not in pl:
297 if p not in pl:
298 pl.append(p)
298 pl.append(p)
299 parents = pl
299 parents = pl
300 nparents = len(parents)
300 nparents = len(parents)
301 if self.filemapmode and nparents == 1:
301 if self.filemapmode and nparents == 1:
302 m1node = self.repo.changelog.read(bin(parents[0]))[0]
302 m1node = self.repo.changelog.read(bin(parents[0]))[0]
303 parent = parents[0]
303 parent = parents[0]
304
304
305 if len(parents) < 2:
305 if len(parents) < 2:
306 parents.append(self.repo.nullid)
306 parents.append(self.repo.nullid)
307 if len(parents) < 2:
307 if len(parents) < 2:
308 parents.append(self.repo.nullid)
308 parents.append(self.repo.nullid)
309 p2 = parents.pop(0)
309 p2 = parents.pop(0)
310
310
311 text = commit.desc
311 text = commit.desc
312
312
313 sha1s = re.findall(sha1re, text)
313 sha1s = re.findall(sha1re, text)
314 for sha1 in sha1s:
314 for sha1 in sha1s:
315 oldrev = source.lookuprev(sha1)
315 oldrev = source.lookuprev(sha1)
316 newrev = revmap.get(oldrev)
316 newrev = revmap.get(oldrev)
317 if newrev is not None:
317 if newrev is not None:
318 text = text.replace(sha1, newrev[: len(sha1)])
318 text = text.replace(sha1, newrev[: len(sha1)])
319
319
320 extra = commit.extra.copy()
320 extra = commit.extra.copy()
321
321
322 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
322 sourcename = self.repo.ui.config(b'convert', b'hg.sourcename')
323 if sourcename:
323 if sourcename:
324 extra[b'convert_source'] = sourcename
324 extra[b'convert_source'] = sourcename
325
325
326 for label in (
326 for label in (
327 b'source',
327 b'source',
328 b'transplant_source',
328 b'transplant_source',
329 b'rebase_source',
329 b'rebase_source',
330 b'intermediate-source',
330 b'intermediate-source',
331 ):
331 ):
332 node = extra.get(label)
332 node = extra.get(label)
333
333
334 if node is None:
334 if node is None:
335 continue
335 continue
336
336
337 # Only transplant stores its reference in binary
337 # Only transplant stores its reference in binary
338 if label == b'transplant_source':
338 if label == b'transplant_source':
339 node = hex(node)
339 node = hex(node)
340
340
341 newrev = revmap.get(node)
341 newrev = revmap.get(node)
342 if newrev is not None:
342 if newrev is not None:
343 if label == b'transplant_source':
343 if label == b'transplant_source':
344 newrev = bin(newrev)
344 newrev = bin(newrev)
345
345
346 extra[label] = newrev
346 extra[label] = newrev
347
347
348 if self.branchnames and commit.branch:
348 if self.branchnames and commit.branch:
349 extra[b'branch'] = commit.branch
349 extra[b'branch'] = commit.branch
350 if commit.rev and commit.saverev:
350 if commit.rev and commit.saverev:
351 extra[b'convert_revision'] = commit.rev
351 extra[b'convert_revision'] = commit.rev
352
352
353 while parents:
353 while parents:
354 p1 = p2
354 p1 = p2
355 p2 = parents.pop(0)
355 p2 = parents.pop(0)
356 p1ctx = self.repo[p1]
356 p1ctx = self.repo[p1]
357 p2ctx = None
357 p2ctx = None
358 if p2 != self.repo.nullid:
358 if p2 != self.repo.nullid:
359 p2ctx = self.repo[p2]
359 p2ctx = self.repo[p2]
360 fileset = set(files)
360 fileset = set(files)
361 if full:
361 if full:
362 fileset.update(self.repo[p1])
362 fileset.update(self.repo[p1])
363 fileset.update(self.repo[p2])
363 fileset.update(self.repo[p2])
364
364
365 if p2ctx:
365 if p2ctx:
366 p2files = set(cleanp2)
366 p2files = set(cleanp2)
367 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
367 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
368 p2files.add(file)
368 p2files.add(file)
369 fileset.add(file)
369 fileset.add(file)
370
370
371 ctx = context.memctx(
371 ctx = context.memctx(
372 self.repo,
372 self.repo,
373 (p1, p2),
373 (p1, p2),
374 text,
374 text,
375 fileset,
375 fileset,
376 getfilectx,
376 getfilectx,
377 commit.author,
377 commit.author,
378 commit.date,
378 commit.date,
379 extra,
379 extra,
380 )
380 )
381
381
382 # We won't know if the conversion changes the node until after the
382 # We won't know if the conversion changes the node until after the
383 # commit, so copy the source's phase for now.
383 # commit, so copy the source's phase for now.
384 self.repo.ui.setconfig(
384 self.repo.ui.setconfig(
385 b'phases',
385 b'phases',
386 b'new-commit',
386 b'new-commit',
387 phases.phasenames[commit.phase],
387 phases.phasenames[commit.phase],
388 b'convert',
388 b'convert',
389 )
389 )
390
390
391 with self.repo.transaction(b"convert") as tr:
391 with self.repo.transaction(b"convert") as tr:
392 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
392 if self.repo.ui.config(b'convert', b'hg.preserve-hash'):
393 origctx = commit.ctx
393 origctx = commit.ctx
394 else:
394 else:
395 origctx = None
395 origctx = None
396 node = hex(self.repo.commitctx(ctx, origctx=origctx))
396 node = hex(self.repo.commitctx(ctx, origctx=origctx))
397
397
398 # If the node value has changed, but the phase is lower than
398 # If the node value has changed, but the phase is lower than
399 # draft, set it back to draft since it hasn't been exposed
399 # draft, set it back to draft since it hasn't been exposed
400 # anywhere.
400 # anywhere.
401 if commit.rev != node:
401 if commit.rev != node:
402 ctx = self.repo[node]
402 ctx = self.repo[node]
403 if ctx.phase() < phases.draft:
403 if ctx.phase() < phases.draft:
404 phases.registernew(
404 phases.registernew(
405 self.repo, tr, phases.draft, [ctx.rev()]
405 self.repo, tr, phases.draft, [ctx.rev()]
406 )
406 )
407
407
408 text = b"(octopus merge fixup)\n"
408 text = b"(octopus merge fixup)\n"
409 p2 = node
409 p2 = node
410
410
411 if self.filemapmode and nparents == 1:
411 if self.filemapmode and nparents == 1:
412 man = self.repo.manifestlog.getstorage(b'')
412 man = self.repo.manifestlog.getstorage(b'')
413 mnode = self.repo.changelog.read(bin(p2))[0]
413 mnode = self.repo.changelog.read(bin(p2))[0]
414 closed = b'close' in commit.extra
414 closed = b'close' in commit.extra
415 if not closed and not man.cmp(m1node, man.revision(mnode)):
415 if not closed and not man.cmp(m1node, man.revision(mnode)):
416 self.ui.status(_(b"filtering out empty revision\n"))
416 self.ui.status(_(b"filtering out empty revision\n"))
417 self.repo.rollback(force=True)
417 self.repo.rollback(force=True)
418 return parent
418 return parent
419 return p2
419 return p2
420
420
421 def puttags(self, tags):
421 def puttags(self, tags):
422 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
422 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
423 tagparent = tagparent or self.repo.nullid
423 tagparent = tagparent or self.repo.nullid
424
424
425 oldlines = set()
425 oldlines = set()
426 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
426 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
427 for h in heads:
427 for h in heads:
428 if b'.hgtags' in self.repo[h]:
428 if b'.hgtags' in self.repo[h]:
429 oldlines.update(
429 oldlines.update(
430 set(self.repo[h][b'.hgtags'].data().splitlines(True))
430 set(self.repo[h][b'.hgtags'].data().splitlines(True))
431 )
431 )
432 oldlines = sorted(list(oldlines))
432 oldlines = sorted(list(oldlines))
433
433
434 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
434 newlines = sorted([(b"%s %s\n" % (tags[tag], tag)) for tag in tags])
435 if newlines == oldlines:
435 if newlines == oldlines:
436 return None, None
436 return None, None
437
437
438 # if the old and new tags match, then there is nothing to update
438 # if the old and new tags match, then there is nothing to update
439 oldtags = set()
439 oldtags = set()
440 newtags = set()
440 newtags = set()
441 for line in oldlines:
441 for line in oldlines:
442 s = line.strip().split(b' ', 1)
442 s = line.strip().split(b' ', 1)
443 if len(s) != 2:
443 if len(s) != 2:
444 continue
444 continue
445 oldtags.add(s[1])
445 oldtags.add(s[1])
446 for line in newlines:
446 for line in newlines:
447 s = line.strip().split(b' ', 1)
447 s = line.strip().split(b' ', 1)
448 if len(s) != 2:
448 if len(s) != 2:
449 continue
449 continue
450 if s[1] not in oldtags:
450 if s[1] not in oldtags:
451 newtags.add(s[1].strip())
451 newtags.add(s[1].strip())
452
452
453 if not newtags:
453 if not newtags:
454 return None, None
454 return None, None
455
455
456 data = b"".join(newlines)
456 data = b"".join(newlines)
457
457
458 def getfilectx(repo, memctx, f):
458 def getfilectx(repo, memctx, f):
459 return context.memfilectx(repo, memctx, f, data, False, False, None)
459 return context.memfilectx(repo, memctx, f, data, False, False, None)
460
460
461 self.ui.status(_(b"updating tags\n"))
461 self.ui.status(_(b"updating tags\n"))
462 date = b"%d 0" % int(time.mktime(time.gmtime()))
462 date = b"%d 0" % int(time.mktime(time.gmtime()))
463 extra = {b'branch': self.tagsbranch}
463 extra = {b'branch': self.tagsbranch}
464 ctx = context.memctx(
464 ctx = context.memctx(
465 self.repo,
465 self.repo,
466 (tagparent, None),
466 (tagparent, None),
467 b"update tags",
467 b"update tags",
468 [b".hgtags"],
468 [b".hgtags"],
469 getfilectx,
469 getfilectx,
470 b"convert-repo",
470 b"convert-repo",
471 date,
471 date,
472 extra,
472 extra,
473 )
473 )
474 node = self.repo.commitctx(ctx)
474 node = self.repo.commitctx(ctx)
475 return hex(node), hex(tagparent)
475 return hex(node), hex(tagparent)
476
476
477 def setfilemapmode(self, active):
477 def setfilemapmode(self, active):
478 self.filemapmode = active
478 self.filemapmode = active
479
479
480 def putbookmarks(self, updatedbookmark):
480 def putbookmarks(self, updatedbookmark):
481 if not len(updatedbookmark):
481 if not len(updatedbookmark):
482 return
482 return
483 wlock = lock = tr = None
483 wlock = lock = tr = None
484 try:
484 try:
485 wlock = self.repo.wlock()
485 wlock = self.repo.wlock()
486 lock = self.repo.lock()
486 lock = self.repo.lock()
487 tr = self.repo.transaction(b'bookmark')
487 tr = self.repo.transaction(b'bookmark')
488 self.ui.status(_(b"updating bookmarks\n"))
488 self.ui.status(_(b"updating bookmarks\n"))
489 destmarks = self.repo._bookmarks
489 destmarks = self.repo._bookmarks
490 changes = [
490 changes = [
491 (bookmark, bin(updatedbookmark[bookmark]))
491 (bookmark, bin(updatedbookmark[bookmark]))
492 for bookmark in updatedbookmark
492 for bookmark in updatedbookmark
493 ]
493 ]
494 destmarks.applychanges(self.repo, tr, changes)
494 destmarks.applychanges(self.repo, tr, changes)
495 tr.close()
495 tr.close()
496 finally:
496 finally:
497 lockmod.release(lock, wlock, tr)
497 lockmod.release(lock, wlock, tr)
498
498
499 def hascommitfrommap(self, rev):
499 def hascommitfrommap(self, rev):
500 # the exact semantics of clonebranches is unclear so we can't say no
500 # the exact semantics of clonebranches is unclear so we can't say no
501 return rev in self.repo or self.clonebranches
501 return rev in self.repo or self.clonebranches
502
502
503 def hascommitforsplicemap(self, rev):
503 def hascommitforsplicemap(self, rev):
504 if rev not in self.repo and self.clonebranches:
504 if rev not in self.repo and self.clonebranches:
505 raise error.Abort(
505 raise error.Abort(
506 _(
506 _(
507 b'revision %s not found in destination '
507 b'revision %s not found in destination '
508 b'repository (lookups with clonebranches=true '
508 b'repository (lookups with clonebranches=true '
509 b'are not implemented)'
509 b'are not implemented)'
510 )
510 )
511 % rev
511 % rev
512 )
512 )
513 return rev in self.repo
513 return rev in self.repo
514
514
515
515
516 class mercurial_source(common.converter_source):
516 class mercurial_source(common.converter_source):
517 def __init__(self, ui, repotype, path, revs=None):
517 def __init__(self, ui, repotype, path, revs=None):
518 common.converter_source.__init__(self, ui, repotype, path, revs)
518 common.converter_source.__init__(self, ui, repotype, path, revs)
519 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
519 self.ignoreerrors = ui.configbool(b'convert', b'hg.ignoreerrors')
520 self.ignored = set()
520 self.ignored = set()
521 self.saverev = ui.configbool(b'convert', b'hg.saverev')
521 self.saverev = ui.configbool(b'convert', b'hg.saverev')
522 try:
522 try:
523 self.repo = hg.repository(self.ui, path)
523 self.repo = hg.repository(self.ui, path)
524 # try to provoke an exception if this isn't really a hg
524 # try to provoke an exception if this isn't really a hg
525 # repo, but some other bogus compatible-looking url
525 # repo, but some other bogus compatible-looking url
526 if not self.repo.local():
526 if not self.repo.local():
527 raise error.RepoError
527 raise error.RepoError
528 except error.RepoError:
528 except error.RepoError:
529 ui.traceback()
529 ui.traceback()
530 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
530 raise NoRepo(_(b"%s is not a local Mercurial repository") % path)
531 self.lastrev = None
531 self.lastrev = None
532 self.lastctx = None
532 self.lastctx = None
533 self._changescache = None, None
533 self._changescache = None, None
534 self.convertfp = None
534 self.convertfp = None
535 # Restrict converted revisions to startrev descendants
535 # Restrict converted revisions to startrev descendants
536 startnode = ui.config(b'convert', b'hg.startrev')
536 startnode = ui.config(b'convert', b'hg.startrev')
537 hgrevs = ui.config(b'convert', b'hg.revs')
537 hgrevs = ui.config(b'convert', b'hg.revs')
538 if hgrevs is None:
538 if hgrevs is None:
539 if startnode is not None:
539 if startnode is not None:
540 try:
540 try:
541 startnode = self.repo.lookup(startnode)
541 startnode = self.repo.lookup(startnode)
542 except error.RepoError:
542 except error.RepoError:
543 raise error.Abort(
543 raise error.Abort(
544 _(b'%s is not a valid start revision') % startnode
544 _(b'%s is not a valid start revision') % startnode
545 )
545 )
546 startrev = self.repo.changelog.rev(startnode)
546 startrev = self.repo.changelog.rev(startnode)
547 children = {startnode: 1}
547 children = {startnode: 1}
548 for r in self.repo.changelog.descendants([startrev]):
548 for r in self.repo.changelog.descendants([startrev]):
549 children[self.repo.changelog.node(r)] = 1
549 children[self.repo.changelog.node(r)] = 1
550 self.keep = children.__contains__
550 self.keep = children.__contains__
551 else:
551 else:
552 self.keep = util.always
552 self.keep = util.always
553 if revs:
553 if revs:
554 self._heads = [self.repo.lookup(r) for r in revs]
554 self._heads = [self.repo.lookup(r) for r in revs]
555 else:
555 else:
556 self._heads = self.repo.heads()
556 self._heads = self.repo.heads()
557 else:
557 else:
558 if revs or startnode is not None:
558 if revs or startnode is not None:
559 raise error.Abort(
559 raise error.Abort(
560 _(
560 _(
561 b'hg.revs cannot be combined with '
561 b'hg.revs cannot be combined with '
562 b'hg.startrev or --rev'
562 b'hg.startrev or --rev'
563 )
563 )
564 )
564 )
565 nodes = set()
565 nodes = set()
566 parents = set()
566 parents = set()
567 for r in logcmdutil.revrange(self.repo, [hgrevs]):
567 for r in logcmdutil.revrange(self.repo, [hgrevs]):
568 ctx = self.repo[r]
568 ctx = self.repo[r]
569 nodes.add(ctx.node())
569 nodes.add(ctx.node())
570 parents.update(p.node() for p in ctx.parents())
570 parents.update(p.node() for p in ctx.parents())
571 self.keep = nodes.__contains__
571 self.keep = nodes.__contains__
572 self._heads = nodes - parents
572 self._heads = nodes - parents
573
573
574 def _changectx(self, rev):
574 def _changectx(self, rev):
575 if self.lastrev != rev:
575 if self.lastrev != rev:
576 self.lastctx = self.repo[rev]
576 self.lastctx = self.repo[rev]
577 self.lastrev = rev
577 self.lastrev = rev
578 return self.lastctx
578 return self.lastctx
579
579
580 def _parents(self, ctx):
580 def _parents(self, ctx):
581 return [p for p in ctx.parents() if p and self.keep(p.node())]
581 return [p for p in ctx.parents() if p and self.keep(p.node())]
582
582
583 def getheads(self):
583 def getheads(self):
584 return [hex(h) for h in self._heads if self.keep(h)]
584 return [hex(h) for h in self._heads if self.keep(h)]
585
585
586 def getfile(self, name, rev):
586 def getfile(self, name, rev):
587 try:
587 try:
588 fctx = self._changectx(rev)[name]
588 fctx = self._changectx(rev)[name]
589 return fctx.data(), fctx.flags()
589 return fctx.data(), fctx.flags()
590 except error.LookupError:
590 except error.LookupError:
591 return None, None
591 return None, None
592
592
593 def _changedfiles(self, ctx1, ctx2):
593 def _changedfiles(self, ctx1, ctx2):
594 ma, r = [], []
594 ma, r = [], []
595 maappend = ma.append
595 maappend = ma.append
596 rappend = r.append
596 rappend = r.append
597 d = ctx1.manifest().diff(ctx2.manifest())
597 d = ctx1.manifest().diff(ctx2.manifest())
598 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
598 for f, ((node1, flag1), (node2, flag2)) in d.items():
599 if node2 is None:
599 if node2 is None:
600 rappend(f)
600 rappend(f)
601 else:
601 else:
602 maappend(f)
602 maappend(f)
603 return ma, r
603 return ma, r
604
604
605 def getchanges(self, rev, full):
605 def getchanges(self, rev, full):
606 ctx = self._changectx(rev)
606 ctx = self._changectx(rev)
607 parents = self._parents(ctx)
607 parents = self._parents(ctx)
608 if full or not parents:
608 if full or not parents:
609 files = copyfiles = ctx.manifest()
609 files = copyfiles = ctx.manifest()
610 if parents:
610 if parents:
611 if self._changescache[0] == rev:
611 if self._changescache[0] == rev:
612 ma, r = self._changescache[1]
612 ma, r = self._changescache[1]
613 else:
613 else:
614 ma, r = self._changedfiles(parents[0], ctx)
614 ma, r = self._changedfiles(parents[0], ctx)
615 if not full:
615 if not full:
616 files = ma + r
616 files = ma + r
617 copyfiles = ma
617 copyfiles = ma
618 # _getcopies() is also run for roots and before filtering so missing
618 # _getcopies() is also run for roots and before filtering so missing
619 # revlogs are detected early
619 # revlogs are detected early
620 copies = self._getcopies(ctx, parents, copyfiles)
620 copies = self._getcopies(ctx, parents, copyfiles)
621 cleanp2 = set()
621 cleanp2 = set()
622 if len(parents) == 2:
622 if len(parents) == 2:
623 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
623 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
624 for f, value in pycompat.iteritems(d):
624 for f, value in d.items():
625 if value is None:
625 if value is None:
626 cleanp2.add(f)
626 cleanp2.add(f)
627 changes = [(f, rev) for f in files if f not in self.ignored]
627 changes = [(f, rev) for f in files if f not in self.ignored]
628 changes.sort()
628 changes.sort()
629 return changes, copies, cleanp2
629 return changes, copies, cleanp2
630
630
631 def _getcopies(self, ctx, parents, files):
631 def _getcopies(self, ctx, parents, files):
632 copies = {}
632 copies = {}
633 for name in files:
633 for name in files:
634 if name in self.ignored:
634 if name in self.ignored:
635 continue
635 continue
636 try:
636 try:
637 copysource = ctx.filectx(name).copysource()
637 copysource = ctx.filectx(name).copysource()
638 if copysource in self.ignored:
638 if copysource in self.ignored:
639 continue
639 continue
640 # Ignore copy sources not in parent revisions
640 # Ignore copy sources not in parent revisions
641 if not any(copysource in p for p in parents):
641 if not any(copysource in p for p in parents):
642 continue
642 continue
643 copies[name] = copysource
643 copies[name] = copysource
644 except TypeError:
644 except TypeError:
645 pass
645 pass
646 except error.LookupError as e:
646 except error.LookupError as e:
647 if not self.ignoreerrors:
647 if not self.ignoreerrors:
648 raise
648 raise
649 self.ignored.add(name)
649 self.ignored.add(name)
650 self.ui.warn(_(b'ignoring: %s\n') % e)
650 self.ui.warn(_(b'ignoring: %s\n') % e)
651 return copies
651 return copies
652
652
653 def getcommit(self, rev):
653 def getcommit(self, rev):
654 ctx = self._changectx(rev)
654 ctx = self._changectx(rev)
655 _parents = self._parents(ctx)
655 _parents = self._parents(ctx)
656 parents = [p.hex() for p in _parents]
656 parents = [p.hex() for p in _parents]
657 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
657 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
658 crev = rev
658 crev = rev
659
659
660 return common.commit(
660 return common.commit(
661 author=ctx.user(),
661 author=ctx.user(),
662 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
662 date=dateutil.datestr(ctx.date(), b'%Y-%m-%d %H:%M:%S %1%2'),
663 desc=ctx.description(),
663 desc=ctx.description(),
664 rev=crev,
664 rev=crev,
665 parents=parents,
665 parents=parents,
666 optparents=optparents,
666 optparents=optparents,
667 branch=ctx.branch(),
667 branch=ctx.branch(),
668 extra=ctx.extra(),
668 extra=ctx.extra(),
669 sortkey=ctx.rev(),
669 sortkey=ctx.rev(),
670 saverev=self.saverev,
670 saverev=self.saverev,
671 phase=ctx.phase(),
671 phase=ctx.phase(),
672 ctx=ctx,
672 ctx=ctx,
673 )
673 )
674
674
675 def numcommits(self):
675 def numcommits(self):
676 return len(self.repo)
676 return len(self.repo)
677
677
678 def gettags(self):
678 def gettags(self):
679 # This will get written to .hgtags, filter non global tags out.
679 # This will get written to .hgtags, filter non global tags out.
680 tags = [
680 tags = [
681 t
681 t
682 for t in self.repo.tagslist()
682 for t in self.repo.tagslist()
683 if self.repo.tagtype(t[0]) == b'global'
683 if self.repo.tagtype(t[0]) == b'global'
684 ]
684 ]
685 return {name: hex(node) for name, node in tags if self.keep(node)}
685 return {name: hex(node) for name, node in tags if self.keep(node)}
686
686
687 def getchangedfiles(self, rev, i):
687 def getchangedfiles(self, rev, i):
688 ctx = self._changectx(rev)
688 ctx = self._changectx(rev)
689 parents = self._parents(ctx)
689 parents = self._parents(ctx)
690 if not parents and i is None:
690 if not parents and i is None:
691 i = 0
691 i = 0
692 ma, r = ctx.manifest().keys(), []
692 ma, r = ctx.manifest().keys(), []
693 else:
693 else:
694 i = i or 0
694 i = i or 0
695 ma, r = self._changedfiles(parents[i], ctx)
695 ma, r = self._changedfiles(parents[i], ctx)
696 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
696 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
697
697
698 if i == 0:
698 if i == 0:
699 self._changescache = (rev, (ma, r))
699 self._changescache = (rev, (ma, r))
700
700
701 return ma + r
701 return ma + r
702
702
703 def converted(self, rev, destrev):
703 def converted(self, rev, destrev):
704 if self.convertfp is None:
704 if self.convertfp is None:
705 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
705 self.convertfp = open(self.repo.vfs.join(b'shamap'), b'ab')
706 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
706 self.convertfp.write(util.tonativeeol(b'%s %s\n' % (destrev, rev)))
707 self.convertfp.flush()
707 self.convertfp.flush()
708
708
709 def before(self):
709 def before(self):
710 self.ui.debug(b'run hg source pre-conversion action\n')
710 self.ui.debug(b'run hg source pre-conversion action\n')
711
711
712 def after(self):
712 def after(self):
713 self.ui.debug(b'run hg source post-conversion action\n')
713 self.ui.debug(b'run hg source post-conversion action\n')
714
714
715 def hasnativeorder(self):
715 def hasnativeorder(self):
716 return True
716 return True
717
717
718 def hasnativeclose(self):
718 def hasnativeclose(self):
719 return True
719 return True
720
720
721 def lookuprev(self, rev):
721 def lookuprev(self, rev):
722 try:
722 try:
723 return hex(self.repo.lookup(rev))
723 return hex(self.repo.lookup(rev))
724 except (error.RepoError, error.LookupError):
724 except (error.RepoError, error.LookupError):
725 return None
725 return None
726
726
727 def getbookmarks(self):
727 def getbookmarks(self):
728 return bookmarks.listbookmarks(self.repo)
728 return bookmarks.listbookmarks(self.repo)
729
729
730 def checkrevformat(self, revstr, mapname=b'splicemap'):
730 def checkrevformat(self, revstr, mapname=b'splicemap'):
731 """Mercurial, revision string is a 40 byte hex"""
731 """Mercurial, revision string is a 40 byte hex"""
732 self.checkhexformat(revstr, mapname)
732 self.checkhexformat(revstr, mapname)
@@ -1,410 +1,410 b''
1 # monotone.py - monotone support for the convert extension
1 # monotone.py - monotone support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
4 # others
4 # others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os
9 import os
10 import re
10 import re
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.pycompat import open
13 from mercurial.pycompat import open
14 from mercurial import (
14 from mercurial import (
15 error,
15 error,
16 pycompat,
16 pycompat,
17 )
17 )
18 from mercurial.utils import dateutil
18 from mercurial.utils import dateutil
19
19
20 from . import common
20 from . import common
21
21
22
22
23 class monotone_source(common.converter_source, common.commandline):
23 class monotone_source(common.converter_source, common.commandline):
24 def __init__(self, ui, repotype, path=None, revs=None):
24 def __init__(self, ui, repotype, path=None, revs=None):
25 common.converter_source.__init__(self, ui, repotype, path, revs)
25 common.converter_source.__init__(self, ui, repotype, path, revs)
26 if revs and len(revs) > 1:
26 if revs and len(revs) > 1:
27 raise error.Abort(
27 raise error.Abort(
28 _(
28 _(
29 b'monotone source does not support specifying '
29 b'monotone source does not support specifying '
30 b'multiple revs'
30 b'multiple revs'
31 )
31 )
32 )
32 )
33 common.commandline.__init__(self, ui, b'mtn')
33 common.commandline.__init__(self, ui, b'mtn')
34
34
35 self.ui = ui
35 self.ui = ui
36 self.path = path
36 self.path = path
37 self.automatestdio = False
37 self.automatestdio = False
38 self.revs = revs
38 self.revs = revs
39
39
40 norepo = common.NoRepo(
40 norepo = common.NoRepo(
41 _(b"%s does not look like a monotone repository") % path
41 _(b"%s does not look like a monotone repository") % path
42 )
42 )
43 if not os.path.exists(os.path.join(path, b'_MTN')):
43 if not os.path.exists(os.path.join(path, b'_MTN')):
44 # Could be a monotone repository (SQLite db file)
44 # Could be a monotone repository (SQLite db file)
45 try:
45 try:
46 f = open(path, b'rb')
46 f = open(path, b'rb')
47 header = f.read(16)
47 header = f.read(16)
48 f.close()
48 f.close()
49 except IOError:
49 except IOError:
50 header = b''
50 header = b''
51 if header != b'SQLite format 3\x00':
51 if header != b'SQLite format 3\x00':
52 raise norepo
52 raise norepo
53
53
54 # regular expressions for parsing monotone output
54 # regular expressions for parsing monotone output
55 space = br'\s*'
55 space = br'\s*'
56 name = br'\s+"((?:\\"|[^"])*)"\s*'
56 name = br'\s+"((?:\\"|[^"])*)"\s*'
57 value = name
57 value = name
58 revision = br'\s+\[(\w+)\]\s*'
58 revision = br'\s+\[(\w+)\]\s*'
59 lines = br'(?:.|\n)+'
59 lines = br'(?:.|\n)+'
60
60
61 self.dir_re = re.compile(space + b"dir" + name)
61 self.dir_re = re.compile(space + b"dir" + name)
62 self.file_re = re.compile(
62 self.file_re = re.compile(
63 space + b"file" + name + b"content" + revision
63 space + b"file" + name + b"content" + revision
64 )
64 )
65 self.add_file_re = re.compile(
65 self.add_file_re = re.compile(
66 space + b"add_file" + name + b"content" + revision
66 space + b"add_file" + name + b"content" + revision
67 )
67 )
68 self.patch_re = re.compile(
68 self.patch_re = re.compile(
69 space + b"patch" + name + b"from" + revision + b"to" + revision
69 space + b"patch" + name + b"from" + revision + b"to" + revision
70 )
70 )
71 self.rename_re = re.compile(space + b"rename" + name + b"to" + name)
71 self.rename_re = re.compile(space + b"rename" + name + b"to" + name)
72 self.delete_re = re.compile(space + b"delete" + name)
72 self.delete_re = re.compile(space + b"delete" + name)
73 self.tag_re = re.compile(space + b"tag" + name + b"revision" + revision)
73 self.tag_re = re.compile(space + b"tag" + name + b"revision" + revision)
74 self.cert_re = re.compile(
74 self.cert_re = re.compile(
75 lines + space + b"name" + name + b"value" + value
75 lines + space + b"name" + name + b"value" + value
76 )
76 )
77
77
78 attr = space + b"file" + lines + space + b"attr" + space
78 attr = space + b"file" + lines + space + b"attr" + space
79 self.attr_execute_re = re.compile(
79 self.attr_execute_re = re.compile(
80 attr + b'"mtn:execute"' + space + b'"true"'
80 attr + b'"mtn:execute"' + space + b'"true"'
81 )
81 )
82
82
83 # cached data
83 # cached data
84 self.manifest_rev = None
84 self.manifest_rev = None
85 self.manifest = None
85 self.manifest = None
86 self.files = None
86 self.files = None
87 self.dirs = None
87 self.dirs = None
88
88
89 common.checktool(b'mtn', abort=False)
89 common.checktool(b'mtn', abort=False)
90
90
91 def mtnrun(self, *args, **kwargs):
91 def mtnrun(self, *args, **kwargs):
92 if self.automatestdio:
92 if self.automatestdio:
93 return self.mtnrunstdio(*args, **kwargs)
93 return self.mtnrunstdio(*args, **kwargs)
94 else:
94 else:
95 return self.mtnrunsingle(*args, **kwargs)
95 return self.mtnrunsingle(*args, **kwargs)
96
96
97 def mtnrunsingle(self, *args, **kwargs):
97 def mtnrunsingle(self, *args, **kwargs):
98 kwargs['d'] = self.path
98 kwargs['d'] = self.path
99 return self.run0(b'automate', *args, **kwargs)
99 return self.run0(b'automate', *args, **kwargs)
100
100
101 def mtnrunstdio(self, *args, **kwargs):
101 def mtnrunstdio(self, *args, **kwargs):
102 # Prepare the command in automate stdio format
102 # Prepare the command in automate stdio format
103 kwargs = pycompat.byteskwargs(kwargs)
103 kwargs = pycompat.byteskwargs(kwargs)
104 command = []
104 command = []
105 for k, v in pycompat.iteritems(kwargs):
105 for k, v in kwargs.items():
106 command.append(b"%d:%s" % (len(k), k))
106 command.append(b"%d:%s" % (len(k), k))
107 if v:
107 if v:
108 command.append(b"%d:%s" % (len(v), v))
108 command.append(b"%d:%s" % (len(v), v))
109 if command:
109 if command:
110 command.insert(0, b'o')
110 command.insert(0, b'o')
111 command.append(b'e')
111 command.append(b'e')
112
112
113 command.append(b'l')
113 command.append(b'l')
114 for arg in args:
114 for arg in args:
115 command.append(b"%d:%s" % (len(arg), arg))
115 command.append(b"%d:%s" % (len(arg), arg))
116 command.append(b'e')
116 command.append(b'e')
117 command = b''.join(command)
117 command = b''.join(command)
118
118
119 self.ui.debug(b"mtn: sending '%s'\n" % command)
119 self.ui.debug(b"mtn: sending '%s'\n" % command)
120 self.mtnwritefp.write(command)
120 self.mtnwritefp.write(command)
121 self.mtnwritefp.flush()
121 self.mtnwritefp.flush()
122
122
123 return self.mtnstdioreadcommandoutput(command)
123 return self.mtnstdioreadcommandoutput(command)
124
124
125 def mtnstdioreadpacket(self):
125 def mtnstdioreadpacket(self):
126 read = None
126 read = None
127 commandnbr = b''
127 commandnbr = b''
128 while read != b':':
128 while read != b':':
129 read = self.mtnreadfp.read(1)
129 read = self.mtnreadfp.read(1)
130 if not read:
130 if not read:
131 raise error.Abort(_(b'bad mtn packet - no end of commandnbr'))
131 raise error.Abort(_(b'bad mtn packet - no end of commandnbr'))
132 commandnbr += read
132 commandnbr += read
133 commandnbr = commandnbr[:-1]
133 commandnbr = commandnbr[:-1]
134
134
135 stream = self.mtnreadfp.read(1)
135 stream = self.mtnreadfp.read(1)
136 if stream not in b'mewptl':
136 if stream not in b'mewptl':
137 raise error.Abort(
137 raise error.Abort(
138 _(b'bad mtn packet - bad stream type %s') % stream
138 _(b'bad mtn packet - bad stream type %s') % stream
139 )
139 )
140
140
141 read = self.mtnreadfp.read(1)
141 read = self.mtnreadfp.read(1)
142 if read != b':':
142 if read != b':':
143 raise error.Abort(_(b'bad mtn packet - no divider before size'))
143 raise error.Abort(_(b'bad mtn packet - no divider before size'))
144
144
145 read = None
145 read = None
146 lengthstr = b''
146 lengthstr = b''
147 while read != b':':
147 while read != b':':
148 read = self.mtnreadfp.read(1)
148 read = self.mtnreadfp.read(1)
149 if not read:
149 if not read:
150 raise error.Abort(_(b'bad mtn packet - no end of packet size'))
150 raise error.Abort(_(b'bad mtn packet - no end of packet size'))
151 lengthstr += read
151 lengthstr += read
152 try:
152 try:
153 length = pycompat.long(lengthstr[:-1])
153 length = pycompat.long(lengthstr[:-1])
154 except TypeError:
154 except TypeError:
155 raise error.Abort(
155 raise error.Abort(
156 _(b'bad mtn packet - bad packet size %s') % lengthstr
156 _(b'bad mtn packet - bad packet size %s') % lengthstr
157 )
157 )
158
158
159 read = self.mtnreadfp.read(length)
159 read = self.mtnreadfp.read(length)
160 if len(read) != length:
160 if len(read) != length:
161 raise error.Abort(
161 raise error.Abort(
162 _(
162 _(
163 b"bad mtn packet - unable to read full packet "
163 b"bad mtn packet - unable to read full packet "
164 b"read %s of %s"
164 b"read %s of %s"
165 )
165 )
166 % (len(read), length)
166 % (len(read), length)
167 )
167 )
168
168
169 return (commandnbr, stream, length, read)
169 return (commandnbr, stream, length, read)
170
170
171 def mtnstdioreadcommandoutput(self, command):
171 def mtnstdioreadcommandoutput(self, command):
172 retval = []
172 retval = []
173 while True:
173 while True:
174 commandnbr, stream, length, output = self.mtnstdioreadpacket()
174 commandnbr, stream, length, output = self.mtnstdioreadpacket()
175 self.ui.debug(
175 self.ui.debug(
176 b'mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)
176 b'mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)
177 )
177 )
178
178
179 if stream == b'l':
179 if stream == b'l':
180 # End of command
180 # End of command
181 if output != b'0':
181 if output != b'0':
182 raise error.Abort(
182 raise error.Abort(
183 _(b"mtn command '%s' returned %s") % (command, output)
183 _(b"mtn command '%s' returned %s") % (command, output)
184 )
184 )
185 break
185 break
186 elif stream in b'ew':
186 elif stream in b'ew':
187 # Error, warning output
187 # Error, warning output
188 self.ui.warn(_(b'%s error:\n') % self.command)
188 self.ui.warn(_(b'%s error:\n') % self.command)
189 self.ui.warn(output)
189 self.ui.warn(output)
190 elif stream == b'p':
190 elif stream == b'p':
191 # Progress messages
191 # Progress messages
192 self.ui.debug(b'mtn: ' + output)
192 self.ui.debug(b'mtn: ' + output)
193 elif stream == b'm':
193 elif stream == b'm':
194 # Main stream - command output
194 # Main stream - command output
195 retval.append(output)
195 retval.append(output)
196
196
197 return b''.join(retval)
197 return b''.join(retval)
198
198
199 def mtnloadmanifest(self, rev):
199 def mtnloadmanifest(self, rev):
200 if self.manifest_rev == rev:
200 if self.manifest_rev == rev:
201 return
201 return
202 self.manifest = self.mtnrun(b"get_manifest_of", rev).split(b"\n\n")
202 self.manifest = self.mtnrun(b"get_manifest_of", rev).split(b"\n\n")
203 self.manifest_rev = rev
203 self.manifest_rev = rev
204 self.files = {}
204 self.files = {}
205 self.dirs = {}
205 self.dirs = {}
206
206
207 for e in self.manifest:
207 for e in self.manifest:
208 m = self.file_re.match(e)
208 m = self.file_re.match(e)
209 if m:
209 if m:
210 attr = b""
210 attr = b""
211 name = m.group(1)
211 name = m.group(1)
212 node = m.group(2)
212 node = m.group(2)
213 if self.attr_execute_re.match(e):
213 if self.attr_execute_re.match(e):
214 attr += b"x"
214 attr += b"x"
215 self.files[name] = (node, attr)
215 self.files[name] = (node, attr)
216 m = self.dir_re.match(e)
216 m = self.dir_re.match(e)
217 if m:
217 if m:
218 self.dirs[m.group(1)] = True
218 self.dirs[m.group(1)] = True
219
219
220 def mtnisfile(self, name, rev):
220 def mtnisfile(self, name, rev):
221 # a non-file could be a directory or a deleted or renamed file
221 # a non-file could be a directory or a deleted or renamed file
222 self.mtnloadmanifest(rev)
222 self.mtnloadmanifest(rev)
223 return name in self.files
223 return name in self.files
224
224
225 def mtnisdir(self, name, rev):
225 def mtnisdir(self, name, rev):
226 self.mtnloadmanifest(rev)
226 self.mtnloadmanifest(rev)
227 return name in self.dirs
227 return name in self.dirs
228
228
229 def mtngetcerts(self, rev):
229 def mtngetcerts(self, rev):
230 certs = {
230 certs = {
231 b"author": b"<missing>",
231 b"author": b"<missing>",
232 b"date": b"<missing>",
232 b"date": b"<missing>",
233 b"changelog": b"<missing>",
233 b"changelog": b"<missing>",
234 b"branch": b"<missing>",
234 b"branch": b"<missing>",
235 }
235 }
236 certlist = self.mtnrun(b"certs", rev)
236 certlist = self.mtnrun(b"certs", rev)
237 # mtn < 0.45:
237 # mtn < 0.45:
238 # key "test@selenic.com"
238 # key "test@selenic.com"
239 # mtn >= 0.45:
239 # mtn >= 0.45:
240 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
240 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
241 certlist = re.split(br'\n\n {6}key ["\[]', certlist)
241 certlist = re.split(br'\n\n {6}key ["\[]', certlist)
242 for e in certlist:
242 for e in certlist:
243 m = self.cert_re.match(e)
243 m = self.cert_re.match(e)
244 if m:
244 if m:
245 name, value = m.groups()
245 name, value = m.groups()
246 value = value.replace(br'\"', b'"')
246 value = value.replace(br'\"', b'"')
247 value = value.replace(br'\\', b'\\')
247 value = value.replace(br'\\', b'\\')
248 certs[name] = value
248 certs[name] = value
249 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
249 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
250 # and all times are stored in UTC
250 # and all times are stored in UTC
251 certs[b"date"] = certs[b"date"].split(b'.')[0] + b" UTC"
251 certs[b"date"] = certs[b"date"].split(b'.')[0] + b" UTC"
252 return certs
252 return certs
253
253
254 # implement the converter_source interface:
254 # implement the converter_source interface:
255
255
256 def getheads(self):
256 def getheads(self):
257 if not self.revs:
257 if not self.revs:
258 return self.mtnrun(b"leaves").splitlines()
258 return self.mtnrun(b"leaves").splitlines()
259 else:
259 else:
260 return self.revs
260 return self.revs
261
261
262 def getchanges(self, rev, full):
262 def getchanges(self, rev, full):
263 if full:
263 if full:
264 raise error.Abort(
264 raise error.Abort(
265 _(b"convert from monotone does not support --full")
265 _(b"convert from monotone does not support --full")
266 )
266 )
267 revision = self.mtnrun(b"get_revision", rev).split(b"\n\n")
267 revision = self.mtnrun(b"get_revision", rev).split(b"\n\n")
268 files = {}
268 files = {}
269 ignoremove = {}
269 ignoremove = {}
270 renameddirs = []
270 renameddirs = []
271 copies = {}
271 copies = {}
272 for e in revision:
272 for e in revision:
273 m = self.add_file_re.match(e)
273 m = self.add_file_re.match(e)
274 if m:
274 if m:
275 files[m.group(1)] = rev
275 files[m.group(1)] = rev
276 ignoremove[m.group(1)] = rev
276 ignoremove[m.group(1)] = rev
277 m = self.patch_re.match(e)
277 m = self.patch_re.match(e)
278 if m:
278 if m:
279 files[m.group(1)] = rev
279 files[m.group(1)] = rev
280 # Delete/rename is handled later when the convert engine
280 # Delete/rename is handled later when the convert engine
281 # discovers an IOError exception from getfile,
281 # discovers an IOError exception from getfile,
282 # but only if we add the "from" file to the list of changes.
282 # but only if we add the "from" file to the list of changes.
283 m = self.delete_re.match(e)
283 m = self.delete_re.match(e)
284 if m:
284 if m:
285 files[m.group(1)] = rev
285 files[m.group(1)] = rev
286 m = self.rename_re.match(e)
286 m = self.rename_re.match(e)
287 if m:
287 if m:
288 toname = m.group(2)
288 toname = m.group(2)
289 fromname = m.group(1)
289 fromname = m.group(1)
290 if self.mtnisfile(toname, rev):
290 if self.mtnisfile(toname, rev):
291 ignoremove[toname] = 1
291 ignoremove[toname] = 1
292 copies[toname] = fromname
292 copies[toname] = fromname
293 files[toname] = rev
293 files[toname] = rev
294 files[fromname] = rev
294 files[fromname] = rev
295 elif self.mtnisdir(toname, rev):
295 elif self.mtnisdir(toname, rev):
296 renameddirs.append((fromname, toname))
296 renameddirs.append((fromname, toname))
297
297
298 # Directory renames can be handled only once we have recorded
298 # Directory renames can be handled only once we have recorded
299 # all new files
299 # all new files
300 for fromdir, todir in renameddirs:
300 for fromdir, todir in renameddirs:
301 renamed = {}
301 renamed = {}
302 for tofile in self.files:
302 for tofile in self.files:
303 if tofile in ignoremove:
303 if tofile in ignoremove:
304 continue
304 continue
305 if tofile.startswith(todir + b'/'):
305 if tofile.startswith(todir + b'/'):
306 renamed[tofile] = fromdir + tofile[len(todir) :]
306 renamed[tofile] = fromdir + tofile[len(todir) :]
307 # Avoid chained moves like:
307 # Avoid chained moves like:
308 # d1(/a) => d3/d1(/a)
308 # d1(/a) => d3/d1(/a)
309 # d2 => d3
309 # d2 => d3
310 ignoremove[tofile] = 1
310 ignoremove[tofile] = 1
311 for tofile, fromfile in renamed.items():
311 for tofile, fromfile in renamed.items():
312 self.ui.debug(
312 self.ui.debug(
313 b"copying file in renamed directory from '%s' to '%s'"
313 b"copying file in renamed directory from '%s' to '%s'"
314 % (fromfile, tofile),
314 % (fromfile, tofile),
315 b'\n',
315 b'\n',
316 )
316 )
317 files[tofile] = rev
317 files[tofile] = rev
318 copies[tofile] = fromfile
318 copies[tofile] = fromfile
319 for fromfile in renamed.values():
319 for fromfile in renamed.values():
320 files[fromfile] = rev
320 files[fromfile] = rev
321
321
322 return (files.items(), copies, set())
322 return (files.items(), copies, set())
323
323
324 def getfile(self, name, rev):
324 def getfile(self, name, rev):
325 if not self.mtnisfile(name, rev):
325 if not self.mtnisfile(name, rev):
326 return None, None
326 return None, None
327 try:
327 try:
328 data = self.mtnrun(b"get_file_of", name, r=rev)
328 data = self.mtnrun(b"get_file_of", name, r=rev)
329 except Exception:
329 except Exception:
330 return None, None
330 return None, None
331 self.mtnloadmanifest(rev)
331 self.mtnloadmanifest(rev)
332 node, attr = self.files.get(name, (None, b""))
332 node, attr = self.files.get(name, (None, b""))
333 return data, attr
333 return data, attr
334
334
335 def getcommit(self, rev):
335 def getcommit(self, rev):
336 extra = {}
336 extra = {}
337 certs = self.mtngetcerts(rev)
337 certs = self.mtngetcerts(rev)
338 if certs.get(b'suspend') == certs[b"branch"]:
338 if certs.get(b'suspend') == certs[b"branch"]:
339 extra[b'close'] = b'1'
339 extra[b'close'] = b'1'
340 dateformat = b"%Y-%m-%dT%H:%M:%S"
340 dateformat = b"%Y-%m-%dT%H:%M:%S"
341 return common.commit(
341 return common.commit(
342 author=certs[b"author"],
342 author=certs[b"author"],
343 date=dateutil.datestr(dateutil.strdate(certs[b"date"], dateformat)),
343 date=dateutil.datestr(dateutil.strdate(certs[b"date"], dateformat)),
344 desc=certs[b"changelog"],
344 desc=certs[b"changelog"],
345 rev=rev,
345 rev=rev,
346 parents=self.mtnrun(b"parents", rev).splitlines(),
346 parents=self.mtnrun(b"parents", rev).splitlines(),
347 branch=certs[b"branch"],
347 branch=certs[b"branch"],
348 extra=extra,
348 extra=extra,
349 )
349 )
350
350
351 def gettags(self):
351 def gettags(self):
352 tags = {}
352 tags = {}
353 for e in self.mtnrun(b"tags").split(b"\n\n"):
353 for e in self.mtnrun(b"tags").split(b"\n\n"):
354 m = self.tag_re.match(e)
354 m = self.tag_re.match(e)
355 if m:
355 if m:
356 tags[m.group(1)] = m.group(2)
356 tags[m.group(1)] = m.group(2)
357 return tags
357 return tags
358
358
359 def getchangedfiles(self, rev, i):
359 def getchangedfiles(self, rev, i):
360 # This function is only needed to support --filemap
360 # This function is only needed to support --filemap
361 # ... and we don't support that
361 # ... and we don't support that
362 raise NotImplementedError
362 raise NotImplementedError
363
363
364 def before(self):
364 def before(self):
365 # Check if we have a new enough version to use automate stdio
365 # Check if we have a new enough version to use automate stdio
366 try:
366 try:
367 versionstr = self.mtnrunsingle(b"interface_version")
367 versionstr = self.mtnrunsingle(b"interface_version")
368 version = float(versionstr)
368 version = float(versionstr)
369 except Exception:
369 except Exception:
370 raise error.Abort(
370 raise error.Abort(
371 _(b"unable to determine mtn automate interface version")
371 _(b"unable to determine mtn automate interface version")
372 )
372 )
373
373
374 if version >= 12.0:
374 if version >= 12.0:
375 self.automatestdio = True
375 self.automatestdio = True
376 self.ui.debug(
376 self.ui.debug(
377 b"mtn automate version %f - using automate stdio\n" % version
377 b"mtn automate version %f - using automate stdio\n" % version
378 )
378 )
379
379
380 # launch the long-running automate stdio process
380 # launch the long-running automate stdio process
381 self.mtnwritefp, self.mtnreadfp = self._run2(
381 self.mtnwritefp, self.mtnreadfp = self._run2(
382 b'automate', b'stdio', b'-d', self.path
382 b'automate', b'stdio', b'-d', self.path
383 )
383 )
384 # read the headers
384 # read the headers
385 read = self.mtnreadfp.readline()
385 read = self.mtnreadfp.readline()
386 if read != b'format-version: 2\n':
386 if read != b'format-version: 2\n':
387 raise error.Abort(
387 raise error.Abort(
388 _(b'mtn automate stdio header unexpected: %s') % read
388 _(b'mtn automate stdio header unexpected: %s') % read
389 )
389 )
390 while read != b'\n':
390 while read != b'\n':
391 read = self.mtnreadfp.readline()
391 read = self.mtnreadfp.readline()
392 if not read:
392 if not read:
393 raise error.Abort(
393 raise error.Abort(
394 _(
394 _(
395 b"failed to reach end of mtn automate "
395 b"failed to reach end of mtn automate "
396 b"stdio headers"
396 b"stdio headers"
397 )
397 )
398 )
398 )
399 else:
399 else:
400 self.ui.debug(
400 self.ui.debug(
401 b"mtn automate version %s - not using automate stdio "
401 b"mtn automate version %s - not using automate stdio "
402 b"(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version
402 b"(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version
403 )
403 )
404
404
405 def after(self):
405 def after(self):
406 if self.automatestdio:
406 if self.automatestdio:
407 self.mtnwritefp.close()
407 self.mtnwritefp.close()
408 self.mtnwritefp = None
408 self.mtnwritefp = None
409 self.mtnreadfp.close()
409 self.mtnreadfp.close()
410 self.mtnreadfp = None
410 self.mtnreadfp = None
@@ -1,1740 +1,1740 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import codecs
5 import codecs
6 import locale
6 import locale
7 import os
7 import os
8 import pickle
8 import pickle
9 import re
9 import re
10 import xml.dom.minidom
10 import xml.dom.minidom
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial.pycompat import open
13 from mercurial.pycompat import open
14 from mercurial import (
14 from mercurial import (
15 encoding,
15 encoding,
16 error,
16 error,
17 pycompat,
17 pycompat,
18 util,
18 util,
19 vfs as vfsmod,
19 vfs as vfsmod,
20 )
20 )
21 from mercurial.utils import (
21 from mercurial.utils import (
22 dateutil,
22 dateutil,
23 procutil,
23 procutil,
24 stringutil,
24 stringutil,
25 )
25 )
26
26
27 from . import common
27 from . import common
28
28
29 stringio = util.stringio
29 stringio = util.stringio
30 propertycache = util.propertycache
30 propertycache = util.propertycache
31 urlerr = util.urlerr
31 urlerr = util.urlerr
32 urlreq = util.urlreq
32 urlreq = util.urlreq
33
33
34 commandline = common.commandline
34 commandline = common.commandline
35 commit = common.commit
35 commit = common.commit
36 converter_sink = common.converter_sink
36 converter_sink = common.converter_sink
37 converter_source = common.converter_source
37 converter_source = common.converter_source
38 decodeargs = common.decodeargs
38 decodeargs = common.decodeargs
39 encodeargs = common.encodeargs
39 encodeargs = common.encodeargs
40 makedatetimestamp = common.makedatetimestamp
40 makedatetimestamp = common.makedatetimestamp
41 mapfile = common.mapfile
41 mapfile = common.mapfile
42 MissingTool = common.MissingTool
42 MissingTool = common.MissingTool
43 NoRepo = common.NoRepo
43 NoRepo = common.NoRepo
44
44
45 # Subversion stuff. Works best with very recent Python SVN bindings
45 # Subversion stuff. Works best with very recent Python SVN bindings
46 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
46 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
47 # these bindings.
47 # these bindings.
48
48
49 try:
49 try:
50 import svn
50 import svn
51 import svn.client
51 import svn.client
52 import svn.core
52 import svn.core
53 import svn.ra
53 import svn.ra
54 import svn.delta
54 import svn.delta
55 from . import transport
55 from . import transport
56 import warnings
56 import warnings
57
57
58 warnings.filterwarnings(
58 warnings.filterwarnings(
59 'ignore', module='svn.core', category=DeprecationWarning
59 'ignore', module='svn.core', category=DeprecationWarning
60 )
60 )
61 svn.core.SubversionException # trigger import to catch error
61 svn.core.SubversionException # trigger import to catch error
62
62
63 except ImportError:
63 except ImportError:
64 svn = None
64 svn = None
65
65
66
66
67 # In Subversion, paths and URLs are Unicode (encoded as UTF-8), which
67 # In Subversion, paths and URLs are Unicode (encoded as UTF-8), which
68 # Subversion converts from / to native strings when interfacing with the OS.
68 # Subversion converts from / to native strings when interfacing with the OS.
69 # When passing paths and URLs to Subversion, we have to recode them such that
69 # When passing paths and URLs to Subversion, we have to recode them such that
70 # it roundstrips with what Subversion is doing.
70 # it roundstrips with what Subversion is doing.
71
71
72 fsencoding = None
72 fsencoding = None
73
73
74
74
75 def init_fsencoding():
75 def init_fsencoding():
76 global fsencoding, fsencoding_is_utf8
76 global fsencoding, fsencoding_is_utf8
77 if fsencoding is not None:
77 if fsencoding is not None:
78 return
78 return
79 if pycompat.iswindows:
79 if pycompat.iswindows:
80 # On Windows, filenames are Unicode, but we store them using the MBCS
80 # On Windows, filenames are Unicode, but we store them using the MBCS
81 # encoding.
81 # encoding.
82 fsencoding = 'mbcs'
82 fsencoding = 'mbcs'
83 else:
83 else:
84 # This is the encoding used to convert UTF-8 back to natively-encoded
84 # This is the encoding used to convert UTF-8 back to natively-encoded
85 # strings in Subversion 1.14.0 or earlier with APR 1.7.0 or earlier.
85 # strings in Subversion 1.14.0 or earlier with APR 1.7.0 or earlier.
86 with util.with_lc_ctype():
86 with util.with_lc_ctype():
87 fsencoding = locale.nl_langinfo(locale.CODESET) or 'ISO-8859-1'
87 fsencoding = locale.nl_langinfo(locale.CODESET) or 'ISO-8859-1'
88 fsencoding = codecs.lookup(fsencoding).name
88 fsencoding = codecs.lookup(fsencoding).name
89 fsencoding_is_utf8 = fsencoding == codecs.lookup('utf-8').name
89 fsencoding_is_utf8 = fsencoding == codecs.lookup('utf-8').name
90
90
91
91
92 def fs2svn(s):
92 def fs2svn(s):
93 if fsencoding_is_utf8:
93 if fsencoding_is_utf8:
94 return s
94 return s
95 else:
95 else:
96 return s.decode(fsencoding).encode('utf-8')
96 return s.decode(fsencoding).encode('utf-8')
97
97
98
98
99 def formatsvndate(date):
99 def formatsvndate(date):
100 return dateutil.datestr(date, b'%Y-%m-%dT%H:%M:%S.000000Z')
100 return dateutil.datestr(date, b'%Y-%m-%dT%H:%M:%S.000000Z')
101
101
102
102
103 def parsesvndate(s):
103 def parsesvndate(s):
104 # Example SVN datetime. Includes microseconds.
104 # Example SVN datetime. Includes microseconds.
105 # ISO-8601 conformant
105 # ISO-8601 conformant
106 # '2007-01-04T17:35:00.902377Z'
106 # '2007-01-04T17:35:00.902377Z'
107 return dateutil.parsedate(s[:19] + b' UTC', [b'%Y-%m-%dT%H:%M:%S'])
107 return dateutil.parsedate(s[:19] + b' UTC', [b'%Y-%m-%dT%H:%M:%S'])
108
108
109
109
110 class SvnPathNotFound(Exception):
110 class SvnPathNotFound(Exception):
111 pass
111 pass
112
112
113
113
114 def revsplit(rev):
114 def revsplit(rev):
115 """Parse a revision string and return (uuid, path, revnum).
115 """Parse a revision string and return (uuid, path, revnum).
116 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
116 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
117 ... b'/proj%20B/mytrunk/mytrunk@1')
117 ... b'/proj%20B/mytrunk/mytrunk@1')
118 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
118 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
119 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
119 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
120 ('', '', 1)
120 ('', '', 1)
121 >>> revsplit(b'@7')
121 >>> revsplit(b'@7')
122 ('', '', 7)
122 ('', '', 7)
123 >>> revsplit(b'7')
123 >>> revsplit(b'7')
124 ('', '', 0)
124 ('', '', 0)
125 >>> revsplit(b'bad')
125 >>> revsplit(b'bad')
126 ('', '', 0)
126 ('', '', 0)
127 """
127 """
128 parts = rev.rsplit(b'@', 1)
128 parts = rev.rsplit(b'@', 1)
129 revnum = 0
129 revnum = 0
130 if len(parts) > 1:
130 if len(parts) > 1:
131 revnum = int(parts[1])
131 revnum = int(parts[1])
132 parts = parts[0].split(b'/', 1)
132 parts = parts[0].split(b'/', 1)
133 uuid = b''
133 uuid = b''
134 mod = b''
134 mod = b''
135 if len(parts) > 1 and parts[0].startswith(b'svn:'):
135 if len(parts) > 1 and parts[0].startswith(b'svn:'):
136 uuid = parts[0][4:]
136 uuid = parts[0][4:]
137 mod = b'/' + parts[1]
137 mod = b'/' + parts[1]
138 return uuid, mod, revnum
138 return uuid, mod, revnum
139
139
140
140
141 def quote(s):
141 def quote(s):
142 # As of svn 1.7, many svn calls expect "canonical" paths. In
142 # As of svn 1.7, many svn calls expect "canonical" paths. In
143 # theory, we should call svn.core.*canonicalize() on all paths
143 # theory, we should call svn.core.*canonicalize() on all paths
144 # before passing them to the API. Instead, we assume the base url
144 # before passing them to the API. Instead, we assume the base url
145 # is canonical and copy the behaviour of svn URL encoding function
145 # is canonical and copy the behaviour of svn URL encoding function
146 # so we can extend it safely with new components. The "safe"
146 # so we can extend it safely with new components. The "safe"
147 # characters were taken from the "svn_uri__char_validity" table in
147 # characters were taken from the "svn_uri__char_validity" table in
148 # libsvn_subr/path.c.
148 # libsvn_subr/path.c.
149 return urlreq.quote(s, b"!$&'()*+,-./:=@_~")
149 return urlreq.quote(s, b"!$&'()*+,-./:=@_~")
150
150
151
151
152 def geturl(path):
152 def geturl(path):
153 """Convert path or URL to a SVN URL, encoded in UTF-8.
153 """Convert path or URL to a SVN URL, encoded in UTF-8.
154
154
155 This can raise UnicodeDecodeError if the path or URL can't be converted to
155 This can raise UnicodeDecodeError if the path or URL can't be converted to
156 unicode using `fsencoding`.
156 unicode using `fsencoding`.
157 """
157 """
158 try:
158 try:
159 return svn.client.url_from_path(
159 return svn.client.url_from_path(
160 svn.core.svn_path_canonicalize(fs2svn(path))
160 svn.core.svn_path_canonicalize(fs2svn(path))
161 )
161 )
162 except svn.core.SubversionException:
162 except svn.core.SubversionException:
163 # svn.client.url_from_path() fails with local repositories
163 # svn.client.url_from_path() fails with local repositories
164 pass
164 pass
165 if os.path.isdir(path):
165 if os.path.isdir(path):
166 path = os.path.normpath(util.abspath(path))
166 path = os.path.normpath(util.abspath(path))
167 if pycompat.iswindows:
167 if pycompat.iswindows:
168 path = b'/' + util.normpath(path)
168 path = b'/' + util.normpath(path)
169 # Module URL is later compared with the repository URL returned
169 # Module URL is later compared with the repository URL returned
170 # by svn API, which is UTF-8.
170 # by svn API, which is UTF-8.
171 path = fs2svn(path)
171 path = fs2svn(path)
172 path = b'file://%s' % quote(path)
172 path = b'file://%s' % quote(path)
173 return svn.core.svn_path_canonicalize(path)
173 return svn.core.svn_path_canonicalize(path)
174
174
175
175
176 def optrev(number):
176 def optrev(number):
177 optrev = svn.core.svn_opt_revision_t()
177 optrev = svn.core.svn_opt_revision_t()
178 optrev.kind = svn.core.svn_opt_revision_number
178 optrev.kind = svn.core.svn_opt_revision_number
179 optrev.value.number = number
179 optrev.value.number = number
180 return optrev
180 return optrev
181
181
182
182
183 class changedpath(object):
183 class changedpath(object):
184 def __init__(self, p):
184 def __init__(self, p):
185 self.copyfrom_path = p.copyfrom_path
185 self.copyfrom_path = p.copyfrom_path
186 self.copyfrom_rev = p.copyfrom_rev
186 self.copyfrom_rev = p.copyfrom_rev
187 self.action = p.action
187 self.action = p.action
188
188
189
189
190 def get_log_child(
190 def get_log_child(
191 fp,
191 fp,
192 url,
192 url,
193 paths,
193 paths,
194 start,
194 start,
195 end,
195 end,
196 limit=0,
196 limit=0,
197 discover_changed_paths=True,
197 discover_changed_paths=True,
198 strict_node_history=False,
198 strict_node_history=False,
199 ):
199 ):
200 protocol = -1
200 protocol = -1
201
201
202 def receiver(orig_paths, revnum, author, date, message, pool):
202 def receiver(orig_paths, revnum, author, date, message, pool):
203 paths = {}
203 paths = {}
204 if orig_paths is not None:
204 if orig_paths is not None:
205 for k, v in pycompat.iteritems(orig_paths):
205 for k, v in orig_paths.items():
206 paths[k] = changedpath(v)
206 paths[k] = changedpath(v)
207 pickle.dump((paths, revnum, author, date, message), fp, protocol)
207 pickle.dump((paths, revnum, author, date, message), fp, protocol)
208
208
209 try:
209 try:
210 # Use an ra of our own so that our parent can consume
210 # Use an ra of our own so that our parent can consume
211 # our results without confusing the server.
211 # our results without confusing the server.
212 t = transport.SvnRaTransport(url=url)
212 t = transport.SvnRaTransport(url=url)
213 svn.ra.get_log(
213 svn.ra.get_log(
214 t.ra,
214 t.ra,
215 paths,
215 paths,
216 start,
216 start,
217 end,
217 end,
218 limit,
218 limit,
219 discover_changed_paths,
219 discover_changed_paths,
220 strict_node_history,
220 strict_node_history,
221 receiver,
221 receiver,
222 )
222 )
223 except IOError:
223 except IOError:
224 # Caller may interrupt the iteration
224 # Caller may interrupt the iteration
225 pickle.dump(None, fp, protocol)
225 pickle.dump(None, fp, protocol)
226 except Exception as inst:
226 except Exception as inst:
227 pickle.dump(stringutil.forcebytestr(inst), fp, protocol)
227 pickle.dump(stringutil.forcebytestr(inst), fp, protocol)
228 else:
228 else:
229 pickle.dump(None, fp, protocol)
229 pickle.dump(None, fp, protocol)
230 fp.flush()
230 fp.flush()
231 # With large history, cleanup process goes crazy and suddenly
231 # With large history, cleanup process goes crazy and suddenly
232 # consumes *huge* amount of memory. The output file being closed,
232 # consumes *huge* amount of memory. The output file being closed,
233 # there is no need for clean termination.
233 # there is no need for clean termination.
234 os._exit(0)
234 os._exit(0)
235
235
236
236
237 def debugsvnlog(ui, **opts):
237 def debugsvnlog(ui, **opts):
238 """Fetch SVN log in a subprocess and channel them back to parent to
238 """Fetch SVN log in a subprocess and channel them back to parent to
239 avoid memory collection issues.
239 avoid memory collection issues.
240 """
240 """
241 with util.with_lc_ctype():
241 with util.with_lc_ctype():
242 if svn is None:
242 if svn is None:
243 raise error.Abort(
243 raise error.Abort(
244 _(b'debugsvnlog could not load Subversion python bindings')
244 _(b'debugsvnlog could not load Subversion python bindings')
245 )
245 )
246
246
247 args = decodeargs(ui.fin.read())
247 args = decodeargs(ui.fin.read())
248 get_log_child(ui.fout, *args)
248 get_log_child(ui.fout, *args)
249
249
250
250
251 class logstream(object):
251 class logstream(object):
252 """Interruptible revision log iterator."""
252 """Interruptible revision log iterator."""
253
253
254 def __init__(self, stdout):
254 def __init__(self, stdout):
255 self._stdout = stdout
255 self._stdout = stdout
256
256
257 def __iter__(self):
257 def __iter__(self):
258 while True:
258 while True:
259 try:
259 try:
260 entry = pickle.load(self._stdout)
260 entry = pickle.load(self._stdout)
261 except EOFError:
261 except EOFError:
262 raise error.Abort(
262 raise error.Abort(
263 _(
263 _(
264 b'Mercurial failed to run itself, check'
264 b'Mercurial failed to run itself, check'
265 b' hg executable is in PATH'
265 b' hg executable is in PATH'
266 )
266 )
267 )
267 )
268 try:
268 try:
269 orig_paths, revnum, author, date, message = entry
269 orig_paths, revnum, author, date, message = entry
270 except (TypeError, ValueError):
270 except (TypeError, ValueError):
271 if entry is None:
271 if entry is None:
272 break
272 break
273 raise error.Abort(_(b"log stream exception '%s'") % entry)
273 raise error.Abort(_(b"log stream exception '%s'") % entry)
274 yield entry
274 yield entry
275
275
276 def close(self):
276 def close(self):
277 if self._stdout:
277 if self._stdout:
278 self._stdout.close()
278 self._stdout.close()
279 self._stdout = None
279 self._stdout = None
280
280
281
281
282 class directlogstream(list):
282 class directlogstream(list):
283 """Direct revision log iterator.
283 """Direct revision log iterator.
284 This can be used for debugging and development but it will probably leak
284 This can be used for debugging and development but it will probably leak
285 memory and is not suitable for real conversions."""
285 memory and is not suitable for real conversions."""
286
286
287 def __init__(
287 def __init__(
288 self,
288 self,
289 url,
289 url,
290 paths,
290 paths,
291 start,
291 start,
292 end,
292 end,
293 limit=0,
293 limit=0,
294 discover_changed_paths=True,
294 discover_changed_paths=True,
295 strict_node_history=False,
295 strict_node_history=False,
296 ):
296 ):
297 def receiver(orig_paths, revnum, author, date, message, pool):
297 def receiver(orig_paths, revnum, author, date, message, pool):
298 paths = {}
298 paths = {}
299 if orig_paths is not None:
299 if orig_paths is not None:
300 for k, v in pycompat.iteritems(orig_paths):
300 for k, v in orig_paths.items():
301 paths[k] = changedpath(v)
301 paths[k] = changedpath(v)
302 self.append((paths, revnum, author, date, message))
302 self.append((paths, revnum, author, date, message))
303
303
304 # Use an ra of our own so that our parent can consume
304 # Use an ra of our own so that our parent can consume
305 # our results without confusing the server.
305 # our results without confusing the server.
306 t = transport.SvnRaTransport(url=url)
306 t = transport.SvnRaTransport(url=url)
307 svn.ra.get_log(
307 svn.ra.get_log(
308 t.ra,
308 t.ra,
309 paths,
309 paths,
310 start,
310 start,
311 end,
311 end,
312 limit,
312 limit,
313 discover_changed_paths,
313 discover_changed_paths,
314 strict_node_history,
314 strict_node_history,
315 receiver,
315 receiver,
316 )
316 )
317
317
318 def close(self):
318 def close(self):
319 pass
319 pass
320
320
321
321
322 # Check to see if the given path is a local Subversion repo. Verify this by
322 # Check to see if the given path is a local Subversion repo. Verify this by
323 # looking for several svn-specific files and directories in the given
323 # looking for several svn-specific files and directories in the given
324 # directory.
324 # directory.
325 def filecheck(ui, path, proto):
325 def filecheck(ui, path, proto):
326 for x in (b'locks', b'hooks', b'format', b'db'):
326 for x in (b'locks', b'hooks', b'format', b'db'):
327 if not os.path.exists(os.path.join(path, x)):
327 if not os.path.exists(os.path.join(path, x)):
328 return False
328 return False
329 return True
329 return True
330
330
331
331
332 # Check to see if a given path is the root of an svn repo over http. We verify
332 # Check to see if a given path is the root of an svn repo over http. We verify
333 # this by requesting a version-controlled URL we know can't exist and looking
333 # this by requesting a version-controlled URL we know can't exist and looking
334 # for the svn-specific "not found" XML.
334 # for the svn-specific "not found" XML.
335 def httpcheck(ui, path, proto):
335 def httpcheck(ui, path, proto):
336 try:
336 try:
337 opener = urlreq.buildopener()
337 opener = urlreq.buildopener()
338 rsp = opener.open(
338 rsp = opener.open(
339 pycompat.strurl(b'%s://%s/!svn/ver/0/.svn' % (proto, path)), b'rb'
339 pycompat.strurl(b'%s://%s/!svn/ver/0/.svn' % (proto, path)), b'rb'
340 )
340 )
341 data = rsp.read()
341 data = rsp.read()
342 except urlerr.httperror as inst:
342 except urlerr.httperror as inst:
343 if inst.code != 404:
343 if inst.code != 404:
344 # Except for 404 we cannot know for sure this is not an svn repo
344 # Except for 404 we cannot know for sure this is not an svn repo
345 ui.warn(
345 ui.warn(
346 _(
346 _(
347 b'svn: cannot probe remote repository, assume it could '
347 b'svn: cannot probe remote repository, assume it could '
348 b'be a subversion repository. Use --source-type if you '
348 b'be a subversion repository. Use --source-type if you '
349 b'know better.\n'
349 b'know better.\n'
350 )
350 )
351 )
351 )
352 return True
352 return True
353 data = inst.fp.read()
353 data = inst.fp.read()
354 except Exception:
354 except Exception:
355 # Could be urlerr.urlerror if the URL is invalid or anything else.
355 # Could be urlerr.urlerror if the URL is invalid or anything else.
356 return False
356 return False
357 return b'<m:human-readable errcode="160013">' in data
357 return b'<m:human-readable errcode="160013">' in data
358
358
359
359
360 protomap = {
360 protomap = {
361 b'http': httpcheck,
361 b'http': httpcheck,
362 b'https': httpcheck,
362 b'https': httpcheck,
363 b'file': filecheck,
363 b'file': filecheck,
364 }
364 }
365
365
366
366
367 class NonUtf8PercentEncodedBytes(Exception):
367 class NonUtf8PercentEncodedBytes(Exception):
368 pass
368 pass
369
369
370
370
371 # Subversion paths are Unicode. Since the percent-decoding is done on
371 # Subversion paths are Unicode. Since the percent-decoding is done on
372 # UTF-8-encoded strings, percent-encoded bytes are interpreted as UTF-8.
372 # UTF-8-encoded strings, percent-encoded bytes are interpreted as UTF-8.
373 def url2pathname_like_subversion(unicodepath):
373 def url2pathname_like_subversion(unicodepath):
374 if pycompat.ispy3:
374 if pycompat.ispy3:
375 # On Python 3, we have to pass unicode to urlreq.url2pathname().
375 # On Python 3, we have to pass unicode to urlreq.url2pathname().
376 # Percent-decoded bytes get decoded using UTF-8 and the 'replace' error
376 # Percent-decoded bytes get decoded using UTF-8 and the 'replace' error
377 # handler.
377 # handler.
378 unicodepath = urlreq.url2pathname(unicodepath)
378 unicodepath = urlreq.url2pathname(unicodepath)
379 if u'\N{REPLACEMENT CHARACTER}' in unicodepath:
379 if u'\N{REPLACEMENT CHARACTER}' in unicodepath:
380 raise NonUtf8PercentEncodedBytes
380 raise NonUtf8PercentEncodedBytes
381 else:
381 else:
382 return unicodepath
382 return unicodepath
383 else:
383 else:
384 # If we passed unicode on Python 2, it would be converted using the
384 # If we passed unicode on Python 2, it would be converted using the
385 # latin-1 encoding. Therefore, we pass UTF-8-encoded bytes.
385 # latin-1 encoding. Therefore, we pass UTF-8-encoded bytes.
386 unicodepath = urlreq.url2pathname(unicodepath.encode('utf-8'))
386 unicodepath = urlreq.url2pathname(unicodepath.encode('utf-8'))
387 try:
387 try:
388 return unicodepath.decode('utf-8')
388 return unicodepath.decode('utf-8')
389 except UnicodeDecodeError:
389 except UnicodeDecodeError:
390 raise NonUtf8PercentEncodedBytes
390 raise NonUtf8PercentEncodedBytes
391
391
392
392
393 def issvnurl(ui, url):
393 def issvnurl(ui, url):
394 try:
394 try:
395 proto, path = url.split(b'://', 1)
395 proto, path = url.split(b'://', 1)
396 if proto == b'file':
396 if proto == b'file':
397 if (
397 if (
398 pycompat.iswindows
398 pycompat.iswindows
399 and path[:1] == b'/'
399 and path[:1] == b'/'
400 and path[1:2].isalpha()
400 and path[1:2].isalpha()
401 and path[2:6].lower() == b'%3a/'
401 and path[2:6].lower() == b'%3a/'
402 ):
402 ):
403 path = path[:2] + b':/' + path[6:]
403 path = path[:2] + b':/' + path[6:]
404 try:
404 try:
405 unicodepath = path.decode(fsencoding)
405 unicodepath = path.decode(fsencoding)
406 except UnicodeDecodeError:
406 except UnicodeDecodeError:
407 ui.warn(
407 ui.warn(
408 _(
408 _(
409 b'Subversion requires that file URLs can be converted '
409 b'Subversion requires that file URLs can be converted '
410 b'to Unicode using the current locale encoding (%s)\n'
410 b'to Unicode using the current locale encoding (%s)\n'
411 )
411 )
412 % pycompat.sysbytes(fsencoding)
412 % pycompat.sysbytes(fsencoding)
413 )
413 )
414 return False
414 return False
415 try:
415 try:
416 unicodepath = url2pathname_like_subversion(unicodepath)
416 unicodepath = url2pathname_like_subversion(unicodepath)
417 except NonUtf8PercentEncodedBytes:
417 except NonUtf8PercentEncodedBytes:
418 ui.warn(
418 ui.warn(
419 _(
419 _(
420 b'Subversion does not support non-UTF-8 '
420 b'Subversion does not support non-UTF-8 '
421 b'percent-encoded bytes in file URLs\n'
421 b'percent-encoded bytes in file URLs\n'
422 )
422 )
423 )
423 )
424 return False
424 return False
425 # Below, we approximate how Subversion checks the path. On Unix, we
425 # Below, we approximate how Subversion checks the path. On Unix, we
426 # should therefore convert the path to bytes using `fsencoding`
426 # should therefore convert the path to bytes using `fsencoding`
427 # (like Subversion does). On Windows, the right thing would
427 # (like Subversion does). On Windows, the right thing would
428 # actually be to leave the path as unicode. For now, we restrict
428 # actually be to leave the path as unicode. For now, we restrict
429 # the path to MBCS.
429 # the path to MBCS.
430 path = unicodepath.encode(fsencoding)
430 path = unicodepath.encode(fsencoding)
431 except ValueError:
431 except ValueError:
432 proto = b'file'
432 proto = b'file'
433 path = util.abspath(url)
433 path = util.abspath(url)
434 try:
434 try:
435 path.decode(fsencoding)
435 path.decode(fsencoding)
436 except UnicodeDecodeError:
436 except UnicodeDecodeError:
437 ui.warn(
437 ui.warn(
438 _(
438 _(
439 b'Subversion requires that paths can be converted to '
439 b'Subversion requires that paths can be converted to '
440 b'Unicode using the current locale encoding (%s)\n'
440 b'Unicode using the current locale encoding (%s)\n'
441 )
441 )
442 % pycompat.sysbytes(fsencoding)
442 % pycompat.sysbytes(fsencoding)
443 )
443 )
444 return False
444 return False
445 if proto == b'file':
445 if proto == b'file':
446 path = util.pconvert(path)
446 path = util.pconvert(path)
447 elif proto in (b'http', 'https'):
447 elif proto in (b'http', 'https'):
448 if not encoding.isasciistr(path):
448 if not encoding.isasciistr(path):
449 ui.warn(
449 ui.warn(
450 _(
450 _(
451 b"Subversion sources don't support non-ASCII characters in "
451 b"Subversion sources don't support non-ASCII characters in "
452 b"HTTP(S) URLs. Please percent-encode them.\n"
452 b"HTTP(S) URLs. Please percent-encode them.\n"
453 )
453 )
454 )
454 )
455 return False
455 return False
456 check = protomap.get(proto, lambda *args: False)
456 check = protomap.get(proto, lambda *args: False)
457 while b'/' in path:
457 while b'/' in path:
458 if check(ui, path, proto):
458 if check(ui, path, proto):
459 return True
459 return True
460 path = path.rsplit(b'/', 1)[0]
460 path = path.rsplit(b'/', 1)[0]
461 return False
461 return False
462
462
463
463
464 # SVN conversion code stolen from bzr-svn and tailor
464 # SVN conversion code stolen from bzr-svn and tailor
465 #
465 #
466 # Subversion looks like a versioned filesystem, branches structures
466 # Subversion looks like a versioned filesystem, branches structures
467 # are defined by conventions and not enforced by the tool. First,
467 # are defined by conventions and not enforced by the tool. First,
468 # we define the potential branches (modules) as "trunk" and "branches"
468 # we define the potential branches (modules) as "trunk" and "branches"
469 # children directories. Revisions are then identified by their
469 # children directories. Revisions are then identified by their
470 # module and revision number (and a repository identifier).
470 # module and revision number (and a repository identifier).
471 #
471 #
472 # The revision graph is really a tree (or a forest). By default, a
472 # The revision graph is really a tree (or a forest). By default, a
473 # revision parent is the previous revision in the same module. If the
473 # revision parent is the previous revision in the same module. If the
474 # module directory is copied/moved from another module then the
474 # module directory is copied/moved from another module then the
475 # revision is the module root and its parent the source revision in
475 # revision is the module root and its parent the source revision in
476 # the parent module. A revision has at most one parent.
476 # the parent module. A revision has at most one parent.
477 #
477 #
478 class svn_source(converter_source):
478 class svn_source(converter_source):
479 def __init__(self, ui, repotype, url, revs=None):
479 def __init__(self, ui, repotype, url, revs=None):
480 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
480 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
481
481
482 init_fsencoding()
482 init_fsencoding()
483 if not (
483 if not (
484 url.startswith(b'svn://')
484 url.startswith(b'svn://')
485 or url.startswith(b'svn+ssh://')
485 or url.startswith(b'svn+ssh://')
486 or (
486 or (
487 os.path.exists(url)
487 os.path.exists(url)
488 and os.path.exists(os.path.join(url, b'.svn'))
488 and os.path.exists(os.path.join(url, b'.svn'))
489 )
489 )
490 or issvnurl(ui, url)
490 or issvnurl(ui, url)
491 ):
491 ):
492 raise NoRepo(
492 raise NoRepo(
493 _(b"%s does not look like a Subversion repository") % url
493 _(b"%s does not look like a Subversion repository") % url
494 )
494 )
495 if svn is None:
495 if svn is None:
496 raise MissingTool(_(b'could not load Subversion python bindings'))
496 raise MissingTool(_(b'could not load Subversion python bindings'))
497
497
498 try:
498 try:
499 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
499 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
500 if version < (1, 4):
500 if version < (1, 4):
501 raise MissingTool(
501 raise MissingTool(
502 _(
502 _(
503 b'Subversion python bindings %d.%d found, '
503 b'Subversion python bindings %d.%d found, '
504 b'1.4 or later required'
504 b'1.4 or later required'
505 )
505 )
506 % version
506 % version
507 )
507 )
508 except AttributeError:
508 except AttributeError:
509 raise MissingTool(
509 raise MissingTool(
510 _(
510 _(
511 b'Subversion python bindings are too old, 1.4 '
511 b'Subversion python bindings are too old, 1.4 '
512 b'or later required'
512 b'or later required'
513 )
513 )
514 )
514 )
515
515
516 self.lastrevs = {}
516 self.lastrevs = {}
517
517
518 latest = None
518 latest = None
519 try:
519 try:
520 # Support file://path@rev syntax. Useful e.g. to convert
520 # Support file://path@rev syntax. Useful e.g. to convert
521 # deleted branches.
521 # deleted branches.
522 at = url.rfind(b'@')
522 at = url.rfind(b'@')
523 if at >= 0:
523 if at >= 0:
524 latest = int(url[at + 1 :])
524 latest = int(url[at + 1 :])
525 url = url[:at]
525 url = url[:at]
526 except ValueError:
526 except ValueError:
527 pass
527 pass
528 self.url = geturl(url)
528 self.url = geturl(url)
529 self.encoding = b'UTF-8' # Subversion is always nominal UTF-8
529 self.encoding = b'UTF-8' # Subversion is always nominal UTF-8
530 try:
530 try:
531 with util.with_lc_ctype():
531 with util.with_lc_ctype():
532 self.transport = transport.SvnRaTransport(url=self.url)
532 self.transport = transport.SvnRaTransport(url=self.url)
533 self.ra = self.transport.ra
533 self.ra = self.transport.ra
534 self.ctx = self.transport.client
534 self.ctx = self.transport.client
535 self.baseurl = svn.ra.get_repos_root(self.ra)
535 self.baseurl = svn.ra.get_repos_root(self.ra)
536 # Module is either empty or a repository path starting with
536 # Module is either empty or a repository path starting with
537 # a slash and not ending with a slash.
537 # a slash and not ending with a slash.
538 self.module = urlreq.unquote(self.url[len(self.baseurl) :])
538 self.module = urlreq.unquote(self.url[len(self.baseurl) :])
539 self.prevmodule = None
539 self.prevmodule = None
540 self.rootmodule = self.module
540 self.rootmodule = self.module
541 self.commits = {}
541 self.commits = {}
542 self.paths = {}
542 self.paths = {}
543 self.uuid = svn.ra.get_uuid(self.ra)
543 self.uuid = svn.ra.get_uuid(self.ra)
544 except svn.core.SubversionException:
544 except svn.core.SubversionException:
545 ui.traceback()
545 ui.traceback()
546 svnversion = b'%d.%d.%d' % (
546 svnversion = b'%d.%d.%d' % (
547 svn.core.SVN_VER_MAJOR,
547 svn.core.SVN_VER_MAJOR,
548 svn.core.SVN_VER_MINOR,
548 svn.core.SVN_VER_MINOR,
549 svn.core.SVN_VER_MICRO,
549 svn.core.SVN_VER_MICRO,
550 )
550 )
551 raise NoRepo(
551 raise NoRepo(
552 _(
552 _(
553 b"%s does not look like a Subversion repository "
553 b"%s does not look like a Subversion repository "
554 b"to libsvn version %s"
554 b"to libsvn version %s"
555 )
555 )
556 % (self.url, svnversion)
556 % (self.url, svnversion)
557 )
557 )
558
558
559 if revs:
559 if revs:
560 if len(revs) > 1:
560 if len(revs) > 1:
561 raise error.Abort(
561 raise error.Abort(
562 _(
562 _(
563 b'subversion source does not support '
563 b'subversion source does not support '
564 b'specifying multiple revisions'
564 b'specifying multiple revisions'
565 )
565 )
566 )
566 )
567 try:
567 try:
568 latest = int(revs[0])
568 latest = int(revs[0])
569 except ValueError:
569 except ValueError:
570 raise error.Abort(
570 raise error.Abort(
571 _(b'svn: revision %s is not an integer') % revs[0]
571 _(b'svn: revision %s is not an integer') % revs[0]
572 )
572 )
573
573
574 trunkcfg = self.ui.config(b'convert', b'svn.trunk')
574 trunkcfg = self.ui.config(b'convert', b'svn.trunk')
575 if trunkcfg is None:
575 if trunkcfg is None:
576 trunkcfg = b'trunk'
576 trunkcfg = b'trunk'
577 self.trunkname = trunkcfg.strip(b'/')
577 self.trunkname = trunkcfg.strip(b'/')
578 self.startrev = self.ui.config(b'convert', b'svn.startrev')
578 self.startrev = self.ui.config(b'convert', b'svn.startrev')
579 try:
579 try:
580 self.startrev = int(self.startrev)
580 self.startrev = int(self.startrev)
581 if self.startrev < 0:
581 if self.startrev < 0:
582 self.startrev = 0
582 self.startrev = 0
583 except ValueError:
583 except ValueError:
584 raise error.Abort(
584 raise error.Abort(
585 _(b'svn: start revision %s is not an integer') % self.startrev
585 _(b'svn: start revision %s is not an integer') % self.startrev
586 )
586 )
587
587
588 try:
588 try:
589 with util.with_lc_ctype():
589 with util.with_lc_ctype():
590 self.head = self.latest(self.module, latest)
590 self.head = self.latest(self.module, latest)
591 except SvnPathNotFound:
591 except SvnPathNotFound:
592 self.head = None
592 self.head = None
593 if not self.head:
593 if not self.head:
594 raise error.Abort(
594 raise error.Abort(
595 _(b'no revision found in module %s') % self.module
595 _(b'no revision found in module %s') % self.module
596 )
596 )
597 self.last_changed = self.revnum(self.head)
597 self.last_changed = self.revnum(self.head)
598
598
599 self._changescache = (None, None)
599 self._changescache = (None, None)
600
600
601 if os.path.exists(os.path.join(url, b'.svn/entries')):
601 if os.path.exists(os.path.join(url, b'.svn/entries')):
602 self.wc = url
602 self.wc = url
603 else:
603 else:
604 self.wc = None
604 self.wc = None
605 self.convertfp = None
605 self.convertfp = None
606
606
607 def before(self):
607 def before(self):
608 self.with_lc_ctype = util.with_lc_ctype()
608 self.with_lc_ctype = util.with_lc_ctype()
609 self.with_lc_ctype.__enter__()
609 self.with_lc_ctype.__enter__()
610
610
611 def after(self):
611 def after(self):
612 self.with_lc_ctype.__exit__(None, None, None)
612 self.with_lc_ctype.__exit__(None, None, None)
613
613
614 def setrevmap(self, revmap):
614 def setrevmap(self, revmap):
615 lastrevs = {}
615 lastrevs = {}
616 for revid in revmap:
616 for revid in revmap:
617 uuid, module, revnum = revsplit(revid)
617 uuid, module, revnum = revsplit(revid)
618 lastrevnum = lastrevs.setdefault(module, revnum)
618 lastrevnum = lastrevs.setdefault(module, revnum)
619 if revnum > lastrevnum:
619 if revnum > lastrevnum:
620 lastrevs[module] = revnum
620 lastrevs[module] = revnum
621 self.lastrevs = lastrevs
621 self.lastrevs = lastrevs
622
622
623 def exists(self, path, optrev):
623 def exists(self, path, optrev):
624 try:
624 try:
625 svn.client.ls(
625 svn.client.ls(
626 self.url.rstrip(b'/') + b'/' + quote(path),
626 self.url.rstrip(b'/') + b'/' + quote(path),
627 optrev,
627 optrev,
628 False,
628 False,
629 self.ctx,
629 self.ctx,
630 )
630 )
631 return True
631 return True
632 except svn.core.SubversionException:
632 except svn.core.SubversionException:
633 return False
633 return False
634
634
635 def getheads(self):
635 def getheads(self):
636 def isdir(path, revnum):
636 def isdir(path, revnum):
637 kind = self._checkpath(path, revnum)
637 kind = self._checkpath(path, revnum)
638 return kind == svn.core.svn_node_dir
638 return kind == svn.core.svn_node_dir
639
639
640 def getcfgpath(name, rev):
640 def getcfgpath(name, rev):
641 cfgpath = self.ui.config(b'convert', b'svn.' + name)
641 cfgpath = self.ui.config(b'convert', b'svn.' + name)
642 if cfgpath is not None and cfgpath.strip() == b'':
642 if cfgpath is not None and cfgpath.strip() == b'':
643 return None
643 return None
644 path = (cfgpath or name).strip(b'/')
644 path = (cfgpath or name).strip(b'/')
645 if not self.exists(path, rev):
645 if not self.exists(path, rev):
646 if self.module.endswith(path) and name == b'trunk':
646 if self.module.endswith(path) and name == b'trunk':
647 # we are converting from inside this directory
647 # we are converting from inside this directory
648 return None
648 return None
649 if cfgpath:
649 if cfgpath:
650 raise error.Abort(
650 raise error.Abort(
651 _(b'expected %s to be at %r, but not found')
651 _(b'expected %s to be at %r, but not found')
652 % (name, path)
652 % (name, path)
653 )
653 )
654 return None
654 return None
655 self.ui.note(
655 self.ui.note(
656 _(b'found %s at %r\n') % (name, pycompat.bytestr(path))
656 _(b'found %s at %r\n') % (name, pycompat.bytestr(path))
657 )
657 )
658 return path
658 return path
659
659
660 rev = optrev(self.last_changed)
660 rev = optrev(self.last_changed)
661 oldmodule = b''
661 oldmodule = b''
662 trunk = getcfgpath(b'trunk', rev)
662 trunk = getcfgpath(b'trunk', rev)
663 self.tags = getcfgpath(b'tags', rev)
663 self.tags = getcfgpath(b'tags', rev)
664 branches = getcfgpath(b'branches', rev)
664 branches = getcfgpath(b'branches', rev)
665
665
666 # If the project has a trunk or branches, we will extract heads
666 # If the project has a trunk or branches, we will extract heads
667 # from them. We keep the project root otherwise.
667 # from them. We keep the project root otherwise.
668 if trunk:
668 if trunk:
669 oldmodule = self.module or b''
669 oldmodule = self.module or b''
670 self.module += b'/' + trunk
670 self.module += b'/' + trunk
671 self.head = self.latest(self.module, self.last_changed)
671 self.head = self.latest(self.module, self.last_changed)
672 if not self.head:
672 if not self.head:
673 raise error.Abort(
673 raise error.Abort(
674 _(b'no revision found in module %s') % self.module
674 _(b'no revision found in module %s') % self.module
675 )
675 )
676
676
677 # First head in the list is the module's head
677 # First head in the list is the module's head
678 self.heads = [self.head]
678 self.heads = [self.head]
679 if self.tags is not None:
679 if self.tags is not None:
680 self.tags = b'%s/%s' % (oldmodule, (self.tags or b'tags'))
680 self.tags = b'%s/%s' % (oldmodule, (self.tags or b'tags'))
681
681
682 # Check if branches bring a few more heads to the list
682 # Check if branches bring a few more heads to the list
683 if branches:
683 if branches:
684 rpath = self.url.strip(b'/')
684 rpath = self.url.strip(b'/')
685 branchnames = svn.client.ls(
685 branchnames = svn.client.ls(
686 rpath + b'/' + quote(branches), rev, False, self.ctx
686 rpath + b'/' + quote(branches), rev, False, self.ctx
687 )
687 )
688 for branch in sorted(branchnames):
688 for branch in sorted(branchnames):
689 module = b'%s/%s/%s' % (oldmodule, branches, branch)
689 module = b'%s/%s/%s' % (oldmodule, branches, branch)
690 if not isdir(module, self.last_changed):
690 if not isdir(module, self.last_changed):
691 continue
691 continue
692 brevid = self.latest(module, self.last_changed)
692 brevid = self.latest(module, self.last_changed)
693 if not brevid:
693 if not brevid:
694 self.ui.note(_(b'ignoring empty branch %s\n') % branch)
694 self.ui.note(_(b'ignoring empty branch %s\n') % branch)
695 continue
695 continue
696 self.ui.note(
696 self.ui.note(
697 _(b'found branch %s at %d\n')
697 _(b'found branch %s at %d\n')
698 % (branch, self.revnum(brevid))
698 % (branch, self.revnum(brevid))
699 )
699 )
700 self.heads.append(brevid)
700 self.heads.append(brevid)
701
701
702 if self.startrev and self.heads:
702 if self.startrev and self.heads:
703 if len(self.heads) > 1:
703 if len(self.heads) > 1:
704 raise error.Abort(
704 raise error.Abort(
705 _(
705 _(
706 b'svn: start revision is not supported '
706 b'svn: start revision is not supported '
707 b'with more than one branch'
707 b'with more than one branch'
708 )
708 )
709 )
709 )
710 revnum = self.revnum(self.heads[0])
710 revnum = self.revnum(self.heads[0])
711 if revnum < self.startrev:
711 if revnum < self.startrev:
712 raise error.Abort(
712 raise error.Abort(
713 _(b'svn: no revision found after start revision %d')
713 _(b'svn: no revision found after start revision %d')
714 % self.startrev
714 % self.startrev
715 )
715 )
716
716
717 return self.heads
717 return self.heads
718
718
719 def _getchanges(self, rev, full):
719 def _getchanges(self, rev, full):
720 (paths, parents) = self.paths[rev]
720 (paths, parents) = self.paths[rev]
721 copies = {}
721 copies = {}
722 if parents:
722 if parents:
723 files, self.removed, copies = self.expandpaths(rev, paths, parents)
723 files, self.removed, copies = self.expandpaths(rev, paths, parents)
724 if full or not parents:
724 if full or not parents:
725 # Perform a full checkout on roots
725 # Perform a full checkout on roots
726 uuid, module, revnum = revsplit(rev)
726 uuid, module, revnum = revsplit(rev)
727 entries = svn.client.ls(
727 entries = svn.client.ls(
728 self.baseurl + quote(module), optrev(revnum), True, self.ctx
728 self.baseurl + quote(module), optrev(revnum), True, self.ctx
729 )
729 )
730 files = [
730 files = [
731 n
731 n
732 for n, e in pycompat.iteritems(entries)
732 for n, e in entries.items()
733 if e.kind == svn.core.svn_node_file
733 if e.kind == svn.core.svn_node_file
734 ]
734 ]
735 self.removed = set()
735 self.removed = set()
736
736
737 files.sort()
737 files.sort()
738 files = pycompat.ziplist(files, [rev] * len(files))
738 files = pycompat.ziplist(files, [rev] * len(files))
739 return (files, copies)
739 return (files, copies)
740
740
741 def getchanges(self, rev, full):
741 def getchanges(self, rev, full):
742 # reuse cache from getchangedfiles
742 # reuse cache from getchangedfiles
743 if self._changescache[0] == rev and not full:
743 if self._changescache[0] == rev and not full:
744 (files, copies) = self._changescache[1]
744 (files, copies) = self._changescache[1]
745 else:
745 else:
746 (files, copies) = self._getchanges(rev, full)
746 (files, copies) = self._getchanges(rev, full)
747 # caller caches the result, so free it here to release memory
747 # caller caches the result, so free it here to release memory
748 del self.paths[rev]
748 del self.paths[rev]
749 return (files, copies, set())
749 return (files, copies, set())
750
750
751 def getchangedfiles(self, rev, i):
751 def getchangedfiles(self, rev, i):
752 # called from filemap - cache computed values for reuse in getchanges
752 # called from filemap - cache computed values for reuse in getchanges
753 (files, copies) = self._getchanges(rev, False)
753 (files, copies) = self._getchanges(rev, False)
754 self._changescache = (rev, (files, copies))
754 self._changescache = (rev, (files, copies))
755 return [f[0] for f in files]
755 return [f[0] for f in files]
756
756
757 def getcommit(self, rev):
757 def getcommit(self, rev):
758 if rev not in self.commits:
758 if rev not in self.commits:
759 uuid, module, revnum = revsplit(rev)
759 uuid, module, revnum = revsplit(rev)
760 self.module = module
760 self.module = module
761 self.reparent(module)
761 self.reparent(module)
762 # We assume that:
762 # We assume that:
763 # - requests for revisions after "stop" come from the
763 # - requests for revisions after "stop" come from the
764 # revision graph backward traversal. Cache all of them
764 # revision graph backward traversal. Cache all of them
765 # down to stop, they will be used eventually.
765 # down to stop, they will be used eventually.
766 # - requests for revisions before "stop" come to get
766 # - requests for revisions before "stop" come to get
767 # isolated branches parents. Just fetch what is needed.
767 # isolated branches parents. Just fetch what is needed.
768 stop = self.lastrevs.get(module, 0)
768 stop = self.lastrevs.get(module, 0)
769 if revnum < stop:
769 if revnum < stop:
770 stop = revnum + 1
770 stop = revnum + 1
771 self._fetch_revisions(revnum, stop)
771 self._fetch_revisions(revnum, stop)
772 if rev not in self.commits:
772 if rev not in self.commits:
773 raise error.Abort(_(b'svn: revision %s not found') % revnum)
773 raise error.Abort(_(b'svn: revision %s not found') % revnum)
774 revcommit = self.commits[rev]
774 revcommit = self.commits[rev]
775 # caller caches the result, so free it here to release memory
775 # caller caches the result, so free it here to release memory
776 del self.commits[rev]
776 del self.commits[rev]
777 return revcommit
777 return revcommit
778
778
779 def checkrevformat(self, revstr, mapname=b'splicemap'):
779 def checkrevformat(self, revstr, mapname=b'splicemap'):
780 """fails if revision format does not match the correct format"""
780 """fails if revision format does not match the correct format"""
781 if not re.match(
781 if not re.match(
782 br'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
782 br'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
783 br'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
783 br'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
784 br'{12,12}(.*)@[0-9]+$',
784 br'{12,12}(.*)@[0-9]+$',
785 revstr,
785 revstr,
786 ):
786 ):
787 raise error.Abort(
787 raise error.Abort(
788 _(b'%s entry %s is not a valid revision identifier')
788 _(b'%s entry %s is not a valid revision identifier')
789 % (mapname, revstr)
789 % (mapname, revstr)
790 )
790 )
791
791
792 def numcommits(self):
792 def numcommits(self):
793 return int(self.head.rsplit(b'@', 1)[1]) - self.startrev
793 return int(self.head.rsplit(b'@', 1)[1]) - self.startrev
794
794
795 def gettags(self):
795 def gettags(self):
796 tags = {}
796 tags = {}
797 if self.tags is None:
797 if self.tags is None:
798 return tags
798 return tags
799
799
800 # svn tags are just a convention, project branches left in a
800 # svn tags are just a convention, project branches left in a
801 # 'tags' directory. There is no other relationship than
801 # 'tags' directory. There is no other relationship than
802 # ancestry, which is expensive to discover and makes them hard
802 # ancestry, which is expensive to discover and makes them hard
803 # to update incrementally. Worse, past revisions may be
803 # to update incrementally. Worse, past revisions may be
804 # referenced by tags far away in the future, requiring a deep
804 # referenced by tags far away in the future, requiring a deep
805 # history traversal on every calculation. Current code
805 # history traversal on every calculation. Current code
806 # performs a single backward traversal, tracking moves within
806 # performs a single backward traversal, tracking moves within
807 # the tags directory (tag renaming) and recording a new tag
807 # the tags directory (tag renaming) and recording a new tag
808 # everytime a project is copied from outside the tags
808 # everytime a project is copied from outside the tags
809 # directory. It also lists deleted tags, this behaviour may
809 # directory. It also lists deleted tags, this behaviour may
810 # change in the future.
810 # change in the future.
811 pendings = []
811 pendings = []
812 tagspath = self.tags
812 tagspath = self.tags
813 start = svn.ra.get_latest_revnum(self.ra)
813 start = svn.ra.get_latest_revnum(self.ra)
814 stream = self._getlog([self.tags], start, self.startrev)
814 stream = self._getlog([self.tags], start, self.startrev)
815 try:
815 try:
816 for entry in stream:
816 for entry in stream:
817 origpaths, revnum, author, date, message = entry
817 origpaths, revnum, author, date, message = entry
818 if not origpaths:
818 if not origpaths:
819 origpaths = []
819 origpaths = []
820 copies = [
820 copies = [
821 (e.copyfrom_path, e.copyfrom_rev, p)
821 (e.copyfrom_path, e.copyfrom_rev, p)
822 for p, e in pycompat.iteritems(origpaths)
822 for p, e in origpaths.items()
823 if e.copyfrom_path
823 if e.copyfrom_path
824 ]
824 ]
825 # Apply moves/copies from more specific to general
825 # Apply moves/copies from more specific to general
826 copies.sort(reverse=True)
826 copies.sort(reverse=True)
827
827
828 srctagspath = tagspath
828 srctagspath = tagspath
829 if copies and copies[-1][2] == tagspath:
829 if copies and copies[-1][2] == tagspath:
830 # Track tags directory moves
830 # Track tags directory moves
831 srctagspath = copies.pop()[0]
831 srctagspath = copies.pop()[0]
832
832
833 for source, sourcerev, dest in copies:
833 for source, sourcerev, dest in copies:
834 if not dest.startswith(tagspath + b'/'):
834 if not dest.startswith(tagspath + b'/'):
835 continue
835 continue
836 for tag in pendings:
836 for tag in pendings:
837 if tag[0].startswith(dest):
837 if tag[0].startswith(dest):
838 tagpath = source + tag[0][len(dest) :]
838 tagpath = source + tag[0][len(dest) :]
839 tag[:2] = [tagpath, sourcerev]
839 tag[:2] = [tagpath, sourcerev]
840 break
840 break
841 else:
841 else:
842 pendings.append([source, sourcerev, dest])
842 pendings.append([source, sourcerev, dest])
843
843
844 # Filter out tags with children coming from different
844 # Filter out tags with children coming from different
845 # parts of the repository like:
845 # parts of the repository like:
846 # /tags/tag.1 (from /trunk:10)
846 # /tags/tag.1 (from /trunk:10)
847 # /tags/tag.1/foo (from /branches/foo:12)
847 # /tags/tag.1/foo (from /branches/foo:12)
848 # Here/tags/tag.1 discarded as well as its children.
848 # Here/tags/tag.1 discarded as well as its children.
849 # It happens with tools like cvs2svn. Such tags cannot
849 # It happens with tools like cvs2svn. Such tags cannot
850 # be represented in mercurial.
850 # be represented in mercurial.
851 addeds = {
851 addeds = {
852 p: e.copyfrom_path
852 p: e.copyfrom_path
853 for p, e in pycompat.iteritems(origpaths)
853 for p, e in origpaths.items()
854 if e.action == b'A' and e.copyfrom_path
854 if e.action == b'A' and e.copyfrom_path
855 }
855 }
856 badroots = set()
856 badroots = set()
857 for destroot in addeds:
857 for destroot in addeds:
858 for source, sourcerev, dest in pendings:
858 for source, sourcerev, dest in pendings:
859 if not dest.startswith(
859 if not dest.startswith(
860 destroot + b'/'
860 destroot + b'/'
861 ) or source.startswith(addeds[destroot] + b'/'):
861 ) or source.startswith(addeds[destroot] + b'/'):
862 continue
862 continue
863 badroots.add(destroot)
863 badroots.add(destroot)
864 break
864 break
865
865
866 for badroot in badroots:
866 for badroot in badroots:
867 pendings = [
867 pendings = [
868 p
868 p
869 for p in pendings
869 for p in pendings
870 if p[2] != badroot
870 if p[2] != badroot
871 and not p[2].startswith(badroot + b'/')
871 and not p[2].startswith(badroot + b'/')
872 ]
872 ]
873
873
874 # Tell tag renamings from tag creations
874 # Tell tag renamings from tag creations
875 renamings = []
875 renamings = []
876 for source, sourcerev, dest in pendings:
876 for source, sourcerev, dest in pendings:
877 tagname = dest.split(b'/')[-1]
877 tagname = dest.split(b'/')[-1]
878 if source.startswith(srctagspath):
878 if source.startswith(srctagspath):
879 renamings.append([source, sourcerev, tagname])
879 renamings.append([source, sourcerev, tagname])
880 continue
880 continue
881 if tagname in tags:
881 if tagname in tags:
882 # Keep the latest tag value
882 # Keep the latest tag value
883 continue
883 continue
884 # From revision may be fake, get one with changes
884 # From revision may be fake, get one with changes
885 try:
885 try:
886 tagid = self.latest(source, sourcerev)
886 tagid = self.latest(source, sourcerev)
887 if tagid and tagname not in tags:
887 if tagid and tagname not in tags:
888 tags[tagname] = tagid
888 tags[tagname] = tagid
889 except SvnPathNotFound:
889 except SvnPathNotFound:
890 # It happens when we are following directories
890 # It happens when we are following directories
891 # we assumed were copied with their parents
891 # we assumed were copied with their parents
892 # but were really created in the tag
892 # but were really created in the tag
893 # directory.
893 # directory.
894 pass
894 pass
895 pendings = renamings
895 pendings = renamings
896 tagspath = srctagspath
896 tagspath = srctagspath
897 finally:
897 finally:
898 stream.close()
898 stream.close()
899 return tags
899 return tags
900
900
901 def converted(self, rev, destrev):
901 def converted(self, rev, destrev):
902 if not self.wc:
902 if not self.wc:
903 return
903 return
904 if self.convertfp is None:
904 if self.convertfp is None:
905 self.convertfp = open(
905 self.convertfp = open(
906 os.path.join(self.wc, b'.svn', b'hg-shamap'), b'ab'
906 os.path.join(self.wc, b'.svn', b'hg-shamap'), b'ab'
907 )
907 )
908 self.convertfp.write(
908 self.convertfp.write(
909 util.tonativeeol(b'%s %d\n' % (destrev, self.revnum(rev)))
909 util.tonativeeol(b'%s %d\n' % (destrev, self.revnum(rev)))
910 )
910 )
911 self.convertfp.flush()
911 self.convertfp.flush()
912
912
913 def revid(self, revnum, module=None):
913 def revid(self, revnum, module=None):
914 return b'svn:%s%s@%d' % (self.uuid, module or self.module, revnum)
914 return b'svn:%s%s@%d' % (self.uuid, module or self.module, revnum)
915
915
916 def revnum(self, rev):
916 def revnum(self, rev):
917 return int(rev.split(b'@')[-1])
917 return int(rev.split(b'@')[-1])
918
918
919 def latest(self, path, stop=None):
919 def latest(self, path, stop=None):
920 """Find the latest revid affecting path, up to stop revision
920 """Find the latest revid affecting path, up to stop revision
921 number. If stop is None, default to repository latest
921 number. If stop is None, default to repository latest
922 revision. It may return a revision in a different module,
922 revision. It may return a revision in a different module,
923 since a branch may be moved without a change being
923 since a branch may be moved without a change being
924 reported. Return None if computed module does not belong to
924 reported. Return None if computed module does not belong to
925 rootmodule subtree.
925 rootmodule subtree.
926 """
926 """
927
927
928 def findchanges(path, start, stop=None):
928 def findchanges(path, start, stop=None):
929 stream = self._getlog([path], start, stop or 1)
929 stream = self._getlog([path], start, stop or 1)
930 try:
930 try:
931 for entry in stream:
931 for entry in stream:
932 paths, revnum, author, date, message = entry
932 paths, revnum, author, date, message = entry
933 if stop is None and paths:
933 if stop is None and paths:
934 # We do not know the latest changed revision,
934 # We do not know the latest changed revision,
935 # keep the first one with changed paths.
935 # keep the first one with changed paths.
936 break
936 break
937 if stop is not None and revnum <= stop:
937 if stop is not None and revnum <= stop:
938 break
938 break
939
939
940 for p in paths:
940 for p in paths:
941 if not path.startswith(p) or not paths[p].copyfrom_path:
941 if not path.startswith(p) or not paths[p].copyfrom_path:
942 continue
942 continue
943 newpath = paths[p].copyfrom_path + path[len(p) :]
943 newpath = paths[p].copyfrom_path + path[len(p) :]
944 self.ui.debug(
944 self.ui.debug(
945 b"branch renamed from %s to %s at %d\n"
945 b"branch renamed from %s to %s at %d\n"
946 % (path, newpath, revnum)
946 % (path, newpath, revnum)
947 )
947 )
948 path = newpath
948 path = newpath
949 break
949 break
950 if not paths:
950 if not paths:
951 revnum = None
951 revnum = None
952 return revnum, path
952 return revnum, path
953 finally:
953 finally:
954 stream.close()
954 stream.close()
955
955
956 if not path.startswith(self.rootmodule):
956 if not path.startswith(self.rootmodule):
957 # Requests on foreign branches may be forbidden at server level
957 # Requests on foreign branches may be forbidden at server level
958 self.ui.debug(b'ignoring foreign branch %r\n' % path)
958 self.ui.debug(b'ignoring foreign branch %r\n' % path)
959 return None
959 return None
960
960
961 if stop is None:
961 if stop is None:
962 stop = svn.ra.get_latest_revnum(self.ra)
962 stop = svn.ra.get_latest_revnum(self.ra)
963 try:
963 try:
964 prevmodule = self.reparent(b'')
964 prevmodule = self.reparent(b'')
965 dirent = svn.ra.stat(self.ra, path.strip(b'/'), stop)
965 dirent = svn.ra.stat(self.ra, path.strip(b'/'), stop)
966 self.reparent(prevmodule)
966 self.reparent(prevmodule)
967 except svn.core.SubversionException:
967 except svn.core.SubversionException:
968 dirent = None
968 dirent = None
969 if not dirent:
969 if not dirent:
970 raise SvnPathNotFound(
970 raise SvnPathNotFound(
971 _(b'%s not found up to revision %d') % (path, stop)
971 _(b'%s not found up to revision %d') % (path, stop)
972 )
972 )
973
973
974 # stat() gives us the previous revision on this line of
974 # stat() gives us the previous revision on this line of
975 # development, but it might be in *another module*. Fetch the
975 # development, but it might be in *another module*. Fetch the
976 # log and detect renames down to the latest revision.
976 # log and detect renames down to the latest revision.
977 revnum, realpath = findchanges(path, stop, dirent.created_rev)
977 revnum, realpath = findchanges(path, stop, dirent.created_rev)
978 if revnum is None:
978 if revnum is None:
979 # Tools like svnsync can create empty revision, when
979 # Tools like svnsync can create empty revision, when
980 # synchronizing only a subtree for instance. These empty
980 # synchronizing only a subtree for instance. These empty
981 # revisions created_rev still have their original values
981 # revisions created_rev still have their original values
982 # despite all changes having disappeared and can be
982 # despite all changes having disappeared and can be
983 # returned by ra.stat(), at least when stating the root
983 # returned by ra.stat(), at least when stating the root
984 # module. In that case, do not trust created_rev and scan
984 # module. In that case, do not trust created_rev and scan
985 # the whole history.
985 # the whole history.
986 revnum, realpath = findchanges(path, stop)
986 revnum, realpath = findchanges(path, stop)
987 if revnum is None:
987 if revnum is None:
988 self.ui.debug(b'ignoring empty branch %r\n' % realpath)
988 self.ui.debug(b'ignoring empty branch %r\n' % realpath)
989 return None
989 return None
990
990
991 if not realpath.startswith(self.rootmodule):
991 if not realpath.startswith(self.rootmodule):
992 self.ui.debug(b'ignoring foreign branch %r\n' % realpath)
992 self.ui.debug(b'ignoring foreign branch %r\n' % realpath)
993 return None
993 return None
994 return self.revid(revnum, realpath)
994 return self.revid(revnum, realpath)
995
995
996 def reparent(self, module):
996 def reparent(self, module):
997 """Reparent the svn transport and return the previous parent."""
997 """Reparent the svn transport and return the previous parent."""
998 if self.prevmodule == module:
998 if self.prevmodule == module:
999 return module
999 return module
1000 svnurl = self.baseurl + quote(module)
1000 svnurl = self.baseurl + quote(module)
1001 prevmodule = self.prevmodule
1001 prevmodule = self.prevmodule
1002 if prevmodule is None:
1002 if prevmodule is None:
1003 prevmodule = b''
1003 prevmodule = b''
1004 self.ui.debug(b"reparent to %s\n" % svnurl)
1004 self.ui.debug(b"reparent to %s\n" % svnurl)
1005 svn.ra.reparent(self.ra, svnurl)
1005 svn.ra.reparent(self.ra, svnurl)
1006 self.prevmodule = module
1006 self.prevmodule = module
1007 return prevmodule
1007 return prevmodule
1008
1008
1009 def expandpaths(self, rev, paths, parents):
1009 def expandpaths(self, rev, paths, parents):
1010 changed, removed = set(), set()
1010 changed, removed = set(), set()
1011 copies = {}
1011 copies = {}
1012
1012
1013 new_module, revnum = revsplit(rev)[1:]
1013 new_module, revnum = revsplit(rev)[1:]
1014 if new_module != self.module:
1014 if new_module != self.module:
1015 self.module = new_module
1015 self.module = new_module
1016 self.reparent(self.module)
1016 self.reparent(self.module)
1017
1017
1018 progress = self.ui.makeprogress(
1018 progress = self.ui.makeprogress(
1019 _(b'scanning paths'), unit=_(b'paths'), total=len(paths)
1019 _(b'scanning paths'), unit=_(b'paths'), total=len(paths)
1020 )
1020 )
1021 for i, (path, ent) in enumerate(paths):
1021 for i, (path, ent) in enumerate(paths):
1022 progress.update(i, item=path)
1022 progress.update(i, item=path)
1023 entrypath = self.getrelpath(path)
1023 entrypath = self.getrelpath(path)
1024
1024
1025 kind = self._checkpath(entrypath, revnum)
1025 kind = self._checkpath(entrypath, revnum)
1026 if kind == svn.core.svn_node_file:
1026 if kind == svn.core.svn_node_file:
1027 changed.add(self.recode(entrypath))
1027 changed.add(self.recode(entrypath))
1028 if not ent.copyfrom_path or not parents:
1028 if not ent.copyfrom_path or not parents:
1029 continue
1029 continue
1030 # Copy sources not in parent revisions cannot be
1030 # Copy sources not in parent revisions cannot be
1031 # represented, ignore their origin for now
1031 # represented, ignore their origin for now
1032 pmodule, prevnum = revsplit(parents[0])[1:]
1032 pmodule, prevnum = revsplit(parents[0])[1:]
1033 if ent.copyfrom_rev < prevnum:
1033 if ent.copyfrom_rev < prevnum:
1034 continue
1034 continue
1035 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
1035 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
1036 if not copyfrom_path:
1036 if not copyfrom_path:
1037 continue
1037 continue
1038 self.ui.debug(
1038 self.ui.debug(
1039 b"copied to %s from %s@%d\n"
1039 b"copied to %s from %s@%d\n"
1040 % (entrypath, copyfrom_path, ent.copyfrom_rev)
1040 % (entrypath, copyfrom_path, ent.copyfrom_rev)
1041 )
1041 )
1042 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
1042 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
1043 elif kind == 0: # gone, but had better be a deleted *file*
1043 elif kind == 0: # gone, but had better be a deleted *file*
1044 self.ui.debug(b"gone from %d\n" % ent.copyfrom_rev)
1044 self.ui.debug(b"gone from %d\n" % ent.copyfrom_rev)
1045 pmodule, prevnum = revsplit(parents[0])[1:]
1045 pmodule, prevnum = revsplit(parents[0])[1:]
1046 parentpath = pmodule + b"/" + entrypath
1046 parentpath = pmodule + b"/" + entrypath
1047 fromkind = self._checkpath(entrypath, prevnum, pmodule)
1047 fromkind = self._checkpath(entrypath, prevnum, pmodule)
1048
1048
1049 if fromkind == svn.core.svn_node_file:
1049 if fromkind == svn.core.svn_node_file:
1050 removed.add(self.recode(entrypath))
1050 removed.add(self.recode(entrypath))
1051 elif fromkind == svn.core.svn_node_dir:
1051 elif fromkind == svn.core.svn_node_dir:
1052 oroot = parentpath.strip(b'/')
1052 oroot = parentpath.strip(b'/')
1053 nroot = path.strip(b'/')
1053 nroot = path.strip(b'/')
1054 children = self._iterfiles(oroot, prevnum)
1054 children = self._iterfiles(oroot, prevnum)
1055 for childpath in children:
1055 for childpath in children:
1056 childpath = childpath.replace(oroot, nroot)
1056 childpath = childpath.replace(oroot, nroot)
1057 childpath = self.getrelpath(b"/" + childpath, pmodule)
1057 childpath = self.getrelpath(b"/" + childpath, pmodule)
1058 if childpath:
1058 if childpath:
1059 removed.add(self.recode(childpath))
1059 removed.add(self.recode(childpath))
1060 else:
1060 else:
1061 self.ui.debug(
1061 self.ui.debug(
1062 b'unknown path in revision %d: %s\n' % (revnum, path)
1062 b'unknown path in revision %d: %s\n' % (revnum, path)
1063 )
1063 )
1064 elif kind == svn.core.svn_node_dir:
1064 elif kind == svn.core.svn_node_dir:
1065 if ent.action == b'M':
1065 if ent.action == b'M':
1066 # If the directory just had a prop change,
1066 # If the directory just had a prop change,
1067 # then we shouldn't need to look for its children.
1067 # then we shouldn't need to look for its children.
1068 continue
1068 continue
1069 if ent.action == b'R' and parents:
1069 if ent.action == b'R' and parents:
1070 # If a directory is replacing a file, mark the previous
1070 # If a directory is replacing a file, mark the previous
1071 # file as deleted
1071 # file as deleted
1072 pmodule, prevnum = revsplit(parents[0])[1:]
1072 pmodule, prevnum = revsplit(parents[0])[1:]
1073 pkind = self._checkpath(entrypath, prevnum, pmodule)
1073 pkind = self._checkpath(entrypath, prevnum, pmodule)
1074 if pkind == svn.core.svn_node_file:
1074 if pkind == svn.core.svn_node_file:
1075 removed.add(self.recode(entrypath))
1075 removed.add(self.recode(entrypath))
1076 elif pkind == svn.core.svn_node_dir:
1076 elif pkind == svn.core.svn_node_dir:
1077 # We do not know what files were kept or removed,
1077 # We do not know what files were kept or removed,
1078 # mark them all as changed.
1078 # mark them all as changed.
1079 for childpath in self._iterfiles(pmodule, prevnum):
1079 for childpath in self._iterfiles(pmodule, prevnum):
1080 childpath = self.getrelpath(b"/" + childpath)
1080 childpath = self.getrelpath(b"/" + childpath)
1081 if childpath:
1081 if childpath:
1082 changed.add(self.recode(childpath))
1082 changed.add(self.recode(childpath))
1083
1083
1084 for childpath in self._iterfiles(path, revnum):
1084 for childpath in self._iterfiles(path, revnum):
1085 childpath = self.getrelpath(b"/" + childpath)
1085 childpath = self.getrelpath(b"/" + childpath)
1086 if childpath:
1086 if childpath:
1087 changed.add(self.recode(childpath))
1087 changed.add(self.recode(childpath))
1088
1088
1089 # Handle directory copies
1089 # Handle directory copies
1090 if not ent.copyfrom_path or not parents:
1090 if not ent.copyfrom_path or not parents:
1091 continue
1091 continue
1092 # Copy sources not in parent revisions cannot be
1092 # Copy sources not in parent revisions cannot be
1093 # represented, ignore their origin for now
1093 # represented, ignore their origin for now
1094 pmodule, prevnum = revsplit(parents[0])[1:]
1094 pmodule, prevnum = revsplit(parents[0])[1:]
1095 if ent.copyfrom_rev < prevnum:
1095 if ent.copyfrom_rev < prevnum:
1096 continue
1096 continue
1097 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
1097 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
1098 if not copyfrompath:
1098 if not copyfrompath:
1099 continue
1099 continue
1100 self.ui.debug(
1100 self.ui.debug(
1101 b"mark %s came from %s:%d\n"
1101 b"mark %s came from %s:%d\n"
1102 % (path, copyfrompath, ent.copyfrom_rev)
1102 % (path, copyfrompath, ent.copyfrom_rev)
1103 )
1103 )
1104 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
1104 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
1105 for childpath in children:
1105 for childpath in children:
1106 childpath = self.getrelpath(b"/" + childpath, pmodule)
1106 childpath = self.getrelpath(b"/" + childpath, pmodule)
1107 if not childpath:
1107 if not childpath:
1108 continue
1108 continue
1109 copytopath = path + childpath[len(copyfrompath) :]
1109 copytopath = path + childpath[len(copyfrompath) :]
1110 copytopath = self.getrelpath(copytopath)
1110 copytopath = self.getrelpath(copytopath)
1111 copies[self.recode(copytopath)] = self.recode(childpath)
1111 copies[self.recode(copytopath)] = self.recode(childpath)
1112
1112
1113 progress.complete()
1113 progress.complete()
1114 changed.update(removed)
1114 changed.update(removed)
1115 return (list(changed), removed, copies)
1115 return (list(changed), removed, copies)
1116
1116
1117 def _fetch_revisions(self, from_revnum, to_revnum):
1117 def _fetch_revisions(self, from_revnum, to_revnum):
1118 if from_revnum < to_revnum:
1118 if from_revnum < to_revnum:
1119 from_revnum, to_revnum = to_revnum, from_revnum
1119 from_revnum, to_revnum = to_revnum, from_revnum
1120
1120
1121 self.child_cset = None
1121 self.child_cset = None
1122
1122
1123 def parselogentry(orig_paths, revnum, author, date, message):
1123 def parselogentry(orig_paths, revnum, author, date, message):
1124 """Return the parsed commit object or None, and True if
1124 """Return the parsed commit object or None, and True if
1125 the revision is a branch root.
1125 the revision is a branch root.
1126 """
1126 """
1127 self.ui.debug(
1127 self.ui.debug(
1128 b"parsing revision %d (%d changes)\n"
1128 b"parsing revision %d (%d changes)\n"
1129 % (revnum, len(orig_paths))
1129 % (revnum, len(orig_paths))
1130 )
1130 )
1131
1131
1132 branched = False
1132 branched = False
1133 rev = self.revid(revnum)
1133 rev = self.revid(revnum)
1134 # branch log might return entries for a parent we already have
1134 # branch log might return entries for a parent we already have
1135
1135
1136 if rev in self.commits or revnum < to_revnum:
1136 if rev in self.commits or revnum < to_revnum:
1137 return None, branched
1137 return None, branched
1138
1138
1139 parents = []
1139 parents = []
1140 # check whether this revision is the start of a branch or part
1140 # check whether this revision is the start of a branch or part
1141 # of a branch renaming
1141 # of a branch renaming
1142 orig_paths = sorted(pycompat.iteritems(orig_paths))
1142 orig_paths = sorted(orig_paths.items())
1143 root_paths = [
1143 root_paths = [
1144 (p, e) for p, e in orig_paths if self.module.startswith(p)
1144 (p, e) for p, e in orig_paths if self.module.startswith(p)
1145 ]
1145 ]
1146 if root_paths:
1146 if root_paths:
1147 path, ent = root_paths[-1]
1147 path, ent = root_paths[-1]
1148 if ent.copyfrom_path:
1148 if ent.copyfrom_path:
1149 branched = True
1149 branched = True
1150 newpath = ent.copyfrom_path + self.module[len(path) :]
1150 newpath = ent.copyfrom_path + self.module[len(path) :]
1151 # ent.copyfrom_rev may not be the actual last revision
1151 # ent.copyfrom_rev may not be the actual last revision
1152 previd = self.latest(newpath, ent.copyfrom_rev)
1152 previd = self.latest(newpath, ent.copyfrom_rev)
1153 if previd is not None:
1153 if previd is not None:
1154 prevmodule, prevnum = revsplit(previd)[1:]
1154 prevmodule, prevnum = revsplit(previd)[1:]
1155 if prevnum >= self.startrev:
1155 if prevnum >= self.startrev:
1156 parents = [previd]
1156 parents = [previd]
1157 self.ui.note(
1157 self.ui.note(
1158 _(b'found parent of branch %s at %d: %s\n')
1158 _(b'found parent of branch %s at %d: %s\n')
1159 % (self.module, prevnum, prevmodule)
1159 % (self.module, prevnum, prevmodule)
1160 )
1160 )
1161 else:
1161 else:
1162 self.ui.debug(b"no copyfrom path, don't know what to do.\n")
1162 self.ui.debug(b"no copyfrom path, don't know what to do.\n")
1163
1163
1164 paths = []
1164 paths = []
1165 # filter out unrelated paths
1165 # filter out unrelated paths
1166 for path, ent in orig_paths:
1166 for path, ent in orig_paths:
1167 if self.getrelpath(path) is None:
1167 if self.getrelpath(path) is None:
1168 continue
1168 continue
1169 paths.append((path, ent))
1169 paths.append((path, ent))
1170
1170
1171 date = parsesvndate(date)
1171 date = parsesvndate(date)
1172 if self.ui.configbool(b'convert', b'localtimezone'):
1172 if self.ui.configbool(b'convert', b'localtimezone'):
1173 date = makedatetimestamp(date[0])
1173 date = makedatetimestamp(date[0])
1174
1174
1175 if message:
1175 if message:
1176 log = self.recode(message)
1176 log = self.recode(message)
1177 else:
1177 else:
1178 log = b''
1178 log = b''
1179
1179
1180 if author:
1180 if author:
1181 author = self.recode(author)
1181 author = self.recode(author)
1182 else:
1182 else:
1183 author = b''
1183 author = b''
1184
1184
1185 try:
1185 try:
1186 branch = self.module.split(b"/")[-1]
1186 branch = self.module.split(b"/")[-1]
1187 if branch == self.trunkname:
1187 if branch == self.trunkname:
1188 branch = None
1188 branch = None
1189 except IndexError:
1189 except IndexError:
1190 branch = None
1190 branch = None
1191
1191
1192 cset = commit(
1192 cset = commit(
1193 author=author,
1193 author=author,
1194 date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
1194 date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
1195 desc=log,
1195 desc=log,
1196 parents=parents,
1196 parents=parents,
1197 branch=branch,
1197 branch=branch,
1198 rev=rev,
1198 rev=rev,
1199 )
1199 )
1200
1200
1201 self.commits[rev] = cset
1201 self.commits[rev] = cset
1202 # The parents list is *shared* among self.paths and the
1202 # The parents list is *shared* among self.paths and the
1203 # commit object. Both will be updated below.
1203 # commit object. Both will be updated below.
1204 self.paths[rev] = (paths, cset.parents)
1204 self.paths[rev] = (paths, cset.parents)
1205 if self.child_cset and not self.child_cset.parents:
1205 if self.child_cset and not self.child_cset.parents:
1206 self.child_cset.parents[:] = [rev]
1206 self.child_cset.parents[:] = [rev]
1207 self.child_cset = cset
1207 self.child_cset = cset
1208 return cset, branched
1208 return cset, branched
1209
1209
1210 self.ui.note(
1210 self.ui.note(
1211 _(b'fetching revision log for "%s" from %d to %d\n')
1211 _(b'fetching revision log for "%s" from %d to %d\n')
1212 % (self.module, from_revnum, to_revnum)
1212 % (self.module, from_revnum, to_revnum)
1213 )
1213 )
1214
1214
1215 try:
1215 try:
1216 firstcset = None
1216 firstcset = None
1217 lastonbranch = False
1217 lastonbranch = False
1218 stream = self._getlog([self.module], from_revnum, to_revnum)
1218 stream = self._getlog([self.module], from_revnum, to_revnum)
1219 try:
1219 try:
1220 for entry in stream:
1220 for entry in stream:
1221 paths, revnum, author, date, message = entry
1221 paths, revnum, author, date, message = entry
1222 if revnum < self.startrev:
1222 if revnum < self.startrev:
1223 lastonbranch = True
1223 lastonbranch = True
1224 break
1224 break
1225 if not paths:
1225 if not paths:
1226 self.ui.debug(b'revision %d has no entries\n' % revnum)
1226 self.ui.debug(b'revision %d has no entries\n' % revnum)
1227 # If we ever leave the loop on an empty
1227 # If we ever leave the loop on an empty
1228 # revision, do not try to get a parent branch
1228 # revision, do not try to get a parent branch
1229 lastonbranch = lastonbranch or revnum == 0
1229 lastonbranch = lastonbranch or revnum == 0
1230 continue
1230 continue
1231 cset, lastonbranch = parselogentry(
1231 cset, lastonbranch = parselogentry(
1232 paths, revnum, author, date, message
1232 paths, revnum, author, date, message
1233 )
1233 )
1234 if cset:
1234 if cset:
1235 firstcset = cset
1235 firstcset = cset
1236 if lastonbranch:
1236 if lastonbranch:
1237 break
1237 break
1238 finally:
1238 finally:
1239 stream.close()
1239 stream.close()
1240
1240
1241 if not lastonbranch and firstcset and not firstcset.parents:
1241 if not lastonbranch and firstcset and not firstcset.parents:
1242 # The first revision of the sequence (the last fetched one)
1242 # The first revision of the sequence (the last fetched one)
1243 # has invalid parents if not a branch root. Find the parent
1243 # has invalid parents if not a branch root. Find the parent
1244 # revision now, if any.
1244 # revision now, if any.
1245 try:
1245 try:
1246 firstrevnum = self.revnum(firstcset.rev)
1246 firstrevnum = self.revnum(firstcset.rev)
1247 if firstrevnum > 1:
1247 if firstrevnum > 1:
1248 latest = self.latest(self.module, firstrevnum - 1)
1248 latest = self.latest(self.module, firstrevnum - 1)
1249 if latest:
1249 if latest:
1250 firstcset.parents.append(latest)
1250 firstcset.parents.append(latest)
1251 except SvnPathNotFound:
1251 except SvnPathNotFound:
1252 pass
1252 pass
1253 except svn.core.SubversionException as xxx_todo_changeme:
1253 except svn.core.SubversionException as xxx_todo_changeme:
1254 (inst, num) = xxx_todo_changeme.args
1254 (inst, num) = xxx_todo_changeme.args
1255 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
1255 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
1256 raise error.Abort(
1256 raise error.Abort(
1257 _(b'svn: branch has no revision %s') % to_revnum
1257 _(b'svn: branch has no revision %s') % to_revnum
1258 )
1258 )
1259 raise
1259 raise
1260
1260
1261 def getfile(self, file, rev):
1261 def getfile(self, file, rev):
1262 # TODO: ra.get_file transmits the whole file instead of diffs.
1262 # TODO: ra.get_file transmits the whole file instead of diffs.
1263 if file in self.removed:
1263 if file in self.removed:
1264 return None, None
1264 return None, None
1265 try:
1265 try:
1266 new_module, revnum = revsplit(rev)[1:]
1266 new_module, revnum = revsplit(rev)[1:]
1267 if self.module != new_module:
1267 if self.module != new_module:
1268 self.module = new_module
1268 self.module = new_module
1269 self.reparent(self.module)
1269 self.reparent(self.module)
1270 io = stringio()
1270 io = stringio()
1271 info = svn.ra.get_file(self.ra, file, revnum, io)
1271 info = svn.ra.get_file(self.ra, file, revnum, io)
1272 data = io.getvalue()
1272 data = io.getvalue()
1273 # ra.get_file() seems to keep a reference on the input buffer
1273 # ra.get_file() seems to keep a reference on the input buffer
1274 # preventing collection. Release it explicitly.
1274 # preventing collection. Release it explicitly.
1275 io.close()
1275 io.close()
1276 if isinstance(info, list):
1276 if isinstance(info, list):
1277 info = info[-1]
1277 info = info[-1]
1278 mode = (b"svn:executable" in info) and b'x' or b''
1278 mode = (b"svn:executable" in info) and b'x' or b''
1279 mode = (b"svn:special" in info) and b'l' or mode
1279 mode = (b"svn:special" in info) and b'l' or mode
1280 except svn.core.SubversionException as e:
1280 except svn.core.SubversionException as e:
1281 notfound = (
1281 notfound = (
1282 svn.core.SVN_ERR_FS_NOT_FOUND,
1282 svn.core.SVN_ERR_FS_NOT_FOUND,
1283 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND,
1283 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND,
1284 )
1284 )
1285 if e.apr_err in notfound: # File not found
1285 if e.apr_err in notfound: # File not found
1286 return None, None
1286 return None, None
1287 raise
1287 raise
1288 if mode == b'l':
1288 if mode == b'l':
1289 link_prefix = b"link "
1289 link_prefix = b"link "
1290 if data.startswith(link_prefix):
1290 if data.startswith(link_prefix):
1291 data = data[len(link_prefix) :]
1291 data = data[len(link_prefix) :]
1292 return data, mode
1292 return data, mode
1293
1293
1294 def _iterfiles(self, path, revnum):
1294 def _iterfiles(self, path, revnum):
1295 """Enumerate all files in path at revnum, recursively."""
1295 """Enumerate all files in path at revnum, recursively."""
1296 path = path.strip(b'/')
1296 path = path.strip(b'/')
1297 pool = svn.core.Pool()
1297 pool = svn.core.Pool()
1298 rpath = b'/'.join([self.baseurl, quote(path)]).strip(b'/')
1298 rpath = b'/'.join([self.baseurl, quote(path)]).strip(b'/')
1299 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1299 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1300 if path:
1300 if path:
1301 path += b'/'
1301 path += b'/'
1302 return (
1302 return (
1303 (path + p)
1303 (path + p)
1304 for p, e in pycompat.iteritems(entries)
1304 for p, e in entries.items()
1305 if e.kind == svn.core.svn_node_file
1305 if e.kind == svn.core.svn_node_file
1306 )
1306 )
1307
1307
1308 def getrelpath(self, path, module=None):
1308 def getrelpath(self, path, module=None):
1309 if module is None:
1309 if module is None:
1310 module = self.module
1310 module = self.module
1311 # Given the repository url of this wc, say
1311 # Given the repository url of this wc, say
1312 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1312 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1313 # extract the "entry" portion (a relative path) from what
1313 # extract the "entry" portion (a relative path) from what
1314 # svn log --xml says, i.e.
1314 # svn log --xml says, i.e.
1315 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1315 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1316 # that is to say "tests/PloneTestCase.py"
1316 # that is to say "tests/PloneTestCase.py"
1317 if path.startswith(module):
1317 if path.startswith(module):
1318 relative = path.rstrip(b'/')[len(module) :]
1318 relative = path.rstrip(b'/')[len(module) :]
1319 if relative.startswith(b'/'):
1319 if relative.startswith(b'/'):
1320 return relative[1:]
1320 return relative[1:]
1321 elif relative == b'':
1321 elif relative == b'':
1322 return relative
1322 return relative
1323
1323
1324 # The path is outside our tracked tree...
1324 # The path is outside our tracked tree...
1325 self.ui.debug(
1325 self.ui.debug(
1326 b'%r is not under %r, ignoring\n'
1326 b'%r is not under %r, ignoring\n'
1327 % (pycompat.bytestr(path), pycompat.bytestr(module))
1327 % (pycompat.bytestr(path), pycompat.bytestr(module))
1328 )
1328 )
1329 return None
1329 return None
1330
1330
1331 def _checkpath(self, path, revnum, module=None):
1331 def _checkpath(self, path, revnum, module=None):
1332 if module is not None:
1332 if module is not None:
1333 prevmodule = self.reparent(b'')
1333 prevmodule = self.reparent(b'')
1334 path = module + b'/' + path
1334 path = module + b'/' + path
1335 try:
1335 try:
1336 # ra.check_path does not like leading slashes very much, it leads
1336 # ra.check_path does not like leading slashes very much, it leads
1337 # to PROPFIND subversion errors
1337 # to PROPFIND subversion errors
1338 return svn.ra.check_path(self.ra, path.strip(b'/'), revnum)
1338 return svn.ra.check_path(self.ra, path.strip(b'/'), revnum)
1339 finally:
1339 finally:
1340 if module is not None:
1340 if module is not None:
1341 self.reparent(prevmodule)
1341 self.reparent(prevmodule)
1342
1342
1343 def _getlog(
1343 def _getlog(
1344 self,
1344 self,
1345 paths,
1345 paths,
1346 start,
1346 start,
1347 end,
1347 end,
1348 limit=0,
1348 limit=0,
1349 discover_changed_paths=True,
1349 discover_changed_paths=True,
1350 strict_node_history=False,
1350 strict_node_history=False,
1351 ):
1351 ):
1352 # Normalize path names, svn >= 1.5 only wants paths relative to
1352 # Normalize path names, svn >= 1.5 only wants paths relative to
1353 # supplied URL
1353 # supplied URL
1354 relpaths = []
1354 relpaths = []
1355 for p in paths:
1355 for p in paths:
1356 if not p.startswith(b'/'):
1356 if not p.startswith(b'/'):
1357 p = self.module + b'/' + p
1357 p = self.module + b'/' + p
1358 relpaths.append(p.strip(b'/'))
1358 relpaths.append(p.strip(b'/'))
1359 args = [
1359 args = [
1360 self.baseurl,
1360 self.baseurl,
1361 relpaths,
1361 relpaths,
1362 start,
1362 start,
1363 end,
1363 end,
1364 limit,
1364 limit,
1365 discover_changed_paths,
1365 discover_changed_paths,
1366 strict_node_history,
1366 strict_node_history,
1367 ]
1367 ]
1368 # developer config: convert.svn.debugsvnlog
1368 # developer config: convert.svn.debugsvnlog
1369 if not self.ui.configbool(b'convert', b'svn.debugsvnlog'):
1369 if not self.ui.configbool(b'convert', b'svn.debugsvnlog'):
1370 return directlogstream(*args)
1370 return directlogstream(*args)
1371 arg = encodeargs(args)
1371 arg = encodeargs(args)
1372 hgexe = procutil.hgexecutable()
1372 hgexe = procutil.hgexecutable()
1373 cmd = b'%s debugsvnlog' % procutil.shellquote(hgexe)
1373 cmd = b'%s debugsvnlog' % procutil.shellquote(hgexe)
1374 stdin, stdout = procutil.popen2(cmd)
1374 stdin, stdout = procutil.popen2(cmd)
1375 stdin.write(arg)
1375 stdin.write(arg)
1376 try:
1376 try:
1377 stdin.close()
1377 stdin.close()
1378 except IOError:
1378 except IOError:
1379 raise error.Abort(
1379 raise error.Abort(
1380 _(
1380 _(
1381 b'Mercurial failed to run itself, check'
1381 b'Mercurial failed to run itself, check'
1382 b' hg executable is in PATH'
1382 b' hg executable is in PATH'
1383 )
1383 )
1384 )
1384 )
1385 return logstream(stdout)
1385 return logstream(stdout)
1386
1386
1387
1387
1388 pre_revprop_change_template = b'''#!/bin/sh
1388 pre_revprop_change_template = b'''#!/bin/sh
1389
1389
1390 REPOS="$1"
1390 REPOS="$1"
1391 REV="$2"
1391 REV="$2"
1392 USER="$3"
1392 USER="$3"
1393 PROPNAME="$4"
1393 PROPNAME="$4"
1394 ACTION="$5"
1394 ACTION="$5"
1395
1395
1396 %(rules)s
1396 %(rules)s
1397
1397
1398 echo "Changing prohibited revision property" >&2
1398 echo "Changing prohibited revision property" >&2
1399 exit 1
1399 exit 1
1400 '''
1400 '''
1401
1401
1402
1402
1403 def gen_pre_revprop_change_hook(prop_actions_allowed):
1403 def gen_pre_revprop_change_hook(prop_actions_allowed):
1404 rules = []
1404 rules = []
1405 for action, propname in prop_actions_allowed:
1405 for action, propname in prop_actions_allowed:
1406 rules.append(
1406 rules.append(
1407 (
1407 (
1408 b'if [ "$ACTION" = "%s" -a "$PROPNAME" = "%s" ]; '
1408 b'if [ "$ACTION" = "%s" -a "$PROPNAME" = "%s" ]; '
1409 b'then exit 0; fi'
1409 b'then exit 0; fi'
1410 )
1410 )
1411 % (action, propname)
1411 % (action, propname)
1412 )
1412 )
1413 return pre_revprop_change_template % {b'rules': b'\n'.join(rules)}
1413 return pre_revprop_change_template % {b'rules': b'\n'.join(rules)}
1414
1414
1415
1415
1416 class svn_sink(converter_sink, commandline):
1416 class svn_sink(converter_sink, commandline):
1417 commit_re = re.compile(br'Committed revision (\d+).', re.M)
1417 commit_re = re.compile(br'Committed revision (\d+).', re.M)
1418 uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
1418 uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
1419
1419
1420 def prerun(self):
1420 def prerun(self):
1421 if self.wc:
1421 if self.wc:
1422 os.chdir(self.wc)
1422 os.chdir(self.wc)
1423
1423
1424 def postrun(self):
1424 def postrun(self):
1425 if self.wc:
1425 if self.wc:
1426 os.chdir(self.cwd)
1426 os.chdir(self.cwd)
1427
1427
1428 def join(self, name):
1428 def join(self, name):
1429 return os.path.join(self.wc, b'.svn', name)
1429 return os.path.join(self.wc, b'.svn', name)
1430
1430
1431 def revmapfile(self):
1431 def revmapfile(self):
1432 return self.join(b'hg-shamap')
1432 return self.join(b'hg-shamap')
1433
1433
1434 def authorfile(self):
1434 def authorfile(self):
1435 return self.join(b'hg-authormap')
1435 return self.join(b'hg-authormap')
1436
1436
1437 def __init__(self, ui, repotype, path):
1437 def __init__(self, ui, repotype, path):
1438
1438
1439 converter_sink.__init__(self, ui, repotype, path)
1439 converter_sink.__init__(self, ui, repotype, path)
1440 commandline.__init__(self, ui, b'svn')
1440 commandline.__init__(self, ui, b'svn')
1441 self.delete = []
1441 self.delete = []
1442 self.setexec = []
1442 self.setexec = []
1443 self.delexec = []
1443 self.delexec = []
1444 self.copies = []
1444 self.copies = []
1445 self.wc = None
1445 self.wc = None
1446 self.cwd = encoding.getcwd()
1446 self.cwd = encoding.getcwd()
1447
1447
1448 created = False
1448 created = False
1449 if os.path.isfile(os.path.join(path, b'.svn', b'entries')):
1449 if os.path.isfile(os.path.join(path, b'.svn', b'entries')):
1450 self.wc = os.path.realpath(path)
1450 self.wc = os.path.realpath(path)
1451 self.run0(b'update')
1451 self.run0(b'update')
1452 else:
1452 else:
1453 if not re.search(br'^(file|http|https|svn|svn\+ssh)://', path):
1453 if not re.search(br'^(file|http|https|svn|svn\+ssh)://', path):
1454 path = os.path.realpath(path)
1454 path = os.path.realpath(path)
1455 if os.path.isdir(os.path.dirname(path)):
1455 if os.path.isdir(os.path.dirname(path)):
1456 if not os.path.exists(
1456 if not os.path.exists(
1457 os.path.join(path, b'db', b'fs-type')
1457 os.path.join(path, b'db', b'fs-type')
1458 ):
1458 ):
1459 ui.status(
1459 ui.status(
1460 _(b"initializing svn repository '%s'\n")
1460 _(b"initializing svn repository '%s'\n")
1461 % os.path.basename(path)
1461 % os.path.basename(path)
1462 )
1462 )
1463 commandline(ui, b'svnadmin').run0(b'create', path)
1463 commandline(ui, b'svnadmin').run0(b'create', path)
1464 created = path
1464 created = path
1465 path = util.normpath(path)
1465 path = util.normpath(path)
1466 if not path.startswith(b'/'):
1466 if not path.startswith(b'/'):
1467 path = b'/' + path
1467 path = b'/' + path
1468 path = b'file://' + path
1468 path = b'file://' + path
1469
1469
1470 wcpath = os.path.join(
1470 wcpath = os.path.join(
1471 encoding.getcwd(), os.path.basename(path) + b'-wc'
1471 encoding.getcwd(), os.path.basename(path) + b'-wc'
1472 )
1472 )
1473 ui.status(
1473 ui.status(
1474 _(b"initializing svn working copy '%s'\n")
1474 _(b"initializing svn working copy '%s'\n")
1475 % os.path.basename(wcpath)
1475 % os.path.basename(wcpath)
1476 )
1476 )
1477 self.run0(b'checkout', path, wcpath)
1477 self.run0(b'checkout', path, wcpath)
1478
1478
1479 self.wc = wcpath
1479 self.wc = wcpath
1480 self.opener = vfsmod.vfs(self.wc)
1480 self.opener = vfsmod.vfs(self.wc)
1481 self.wopener = vfsmod.vfs(self.wc)
1481 self.wopener = vfsmod.vfs(self.wc)
1482 self.childmap = mapfile(ui, self.join(b'hg-childmap'))
1482 self.childmap = mapfile(ui, self.join(b'hg-childmap'))
1483 if util.checkexec(self.wc):
1483 if util.checkexec(self.wc):
1484 self.is_exec = util.isexec
1484 self.is_exec = util.isexec
1485 else:
1485 else:
1486 self.is_exec = None
1486 self.is_exec = None
1487
1487
1488 if created:
1488 if created:
1489 prop_actions_allowed = [
1489 prop_actions_allowed = [
1490 (b'M', b'svn:log'),
1490 (b'M', b'svn:log'),
1491 (b'A', b'hg:convert-branch'),
1491 (b'A', b'hg:convert-branch'),
1492 (b'A', b'hg:convert-rev'),
1492 (b'A', b'hg:convert-rev'),
1493 ]
1493 ]
1494
1494
1495 if self.ui.configbool(
1495 if self.ui.configbool(
1496 b'convert', b'svn.dangerous-set-commit-dates'
1496 b'convert', b'svn.dangerous-set-commit-dates'
1497 ):
1497 ):
1498 prop_actions_allowed.append((b'M', b'svn:date'))
1498 prop_actions_allowed.append((b'M', b'svn:date'))
1499
1499
1500 hook = os.path.join(created, b'hooks', b'pre-revprop-change')
1500 hook = os.path.join(created, b'hooks', b'pre-revprop-change')
1501 fp = open(hook, b'wb')
1501 fp = open(hook, b'wb')
1502 fp.write(gen_pre_revprop_change_hook(prop_actions_allowed))
1502 fp.write(gen_pre_revprop_change_hook(prop_actions_allowed))
1503 fp.close()
1503 fp.close()
1504 util.setflags(hook, False, True)
1504 util.setflags(hook, False, True)
1505
1505
1506 output = self.run0(b'info')
1506 output = self.run0(b'info')
1507 self.uuid = self.uuid_re.search(output).group(1).strip()
1507 self.uuid = self.uuid_re.search(output).group(1).strip()
1508
1508
1509 def wjoin(self, *names):
1509 def wjoin(self, *names):
1510 return os.path.join(self.wc, *names)
1510 return os.path.join(self.wc, *names)
1511
1511
1512 @propertycache
1512 @propertycache
1513 def manifest(self):
1513 def manifest(self):
1514 # As of svn 1.7, the "add" command fails when receiving
1514 # As of svn 1.7, the "add" command fails when receiving
1515 # already tracked entries, so we have to track and filter them
1515 # already tracked entries, so we have to track and filter them
1516 # ourselves.
1516 # ourselves.
1517 m = set()
1517 m = set()
1518 output = self.run0(b'ls', recursive=True, xml=True)
1518 output = self.run0(b'ls', recursive=True, xml=True)
1519 doc = xml.dom.minidom.parseString(output)
1519 doc = xml.dom.minidom.parseString(output)
1520 for e in doc.getElementsByTagName('entry'):
1520 for e in doc.getElementsByTagName('entry'):
1521 for n in e.childNodes:
1521 for n in e.childNodes:
1522 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1522 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1523 continue
1523 continue
1524 name = ''.join(
1524 name = ''.join(
1525 c.data for c in n.childNodes if c.nodeType == c.TEXT_NODE
1525 c.data for c in n.childNodes if c.nodeType == c.TEXT_NODE
1526 )
1526 )
1527 # Entries are compared with names coming from
1527 # Entries are compared with names coming from
1528 # mercurial, so bytes with undefined encoding. Our
1528 # mercurial, so bytes with undefined encoding. Our
1529 # best bet is to assume they are in local
1529 # best bet is to assume they are in local
1530 # encoding. They will be passed to command line calls
1530 # encoding. They will be passed to command line calls
1531 # later anyway, so they better be.
1531 # later anyway, so they better be.
1532 m.add(encoding.unitolocal(name))
1532 m.add(encoding.unitolocal(name))
1533 break
1533 break
1534 return m
1534 return m
1535
1535
1536 def putfile(self, filename, flags, data):
1536 def putfile(self, filename, flags, data):
1537 if b'l' in flags:
1537 if b'l' in flags:
1538 self.wopener.symlink(data, filename)
1538 self.wopener.symlink(data, filename)
1539 else:
1539 else:
1540 try:
1540 try:
1541 if os.path.islink(self.wjoin(filename)):
1541 if os.path.islink(self.wjoin(filename)):
1542 os.unlink(filename)
1542 os.unlink(filename)
1543 except OSError:
1543 except OSError:
1544 pass
1544 pass
1545
1545
1546 if self.is_exec:
1546 if self.is_exec:
1547 # We need to check executability of the file before the change,
1547 # We need to check executability of the file before the change,
1548 # because `vfs.write` is able to reset exec bit.
1548 # because `vfs.write` is able to reset exec bit.
1549 wasexec = False
1549 wasexec = False
1550 if os.path.exists(self.wjoin(filename)):
1550 if os.path.exists(self.wjoin(filename)):
1551 wasexec = self.is_exec(self.wjoin(filename))
1551 wasexec = self.is_exec(self.wjoin(filename))
1552
1552
1553 self.wopener.write(filename, data)
1553 self.wopener.write(filename, data)
1554
1554
1555 if self.is_exec:
1555 if self.is_exec:
1556 if wasexec:
1556 if wasexec:
1557 if b'x' not in flags:
1557 if b'x' not in flags:
1558 self.delexec.append(filename)
1558 self.delexec.append(filename)
1559 else:
1559 else:
1560 if b'x' in flags:
1560 if b'x' in flags:
1561 self.setexec.append(filename)
1561 self.setexec.append(filename)
1562 util.setflags(self.wjoin(filename), False, b'x' in flags)
1562 util.setflags(self.wjoin(filename), False, b'x' in flags)
1563
1563
1564 def _copyfile(self, source, dest):
1564 def _copyfile(self, source, dest):
1565 # SVN's copy command pukes if the destination file exists, but
1565 # SVN's copy command pukes if the destination file exists, but
1566 # our copyfile method expects to record a copy that has
1566 # our copyfile method expects to record a copy that has
1567 # already occurred. Cross the semantic gap.
1567 # already occurred. Cross the semantic gap.
1568 wdest = self.wjoin(dest)
1568 wdest = self.wjoin(dest)
1569 exists = os.path.lexists(wdest)
1569 exists = os.path.lexists(wdest)
1570 if exists:
1570 if exists:
1571 fd, tempname = pycompat.mkstemp(
1571 fd, tempname = pycompat.mkstemp(
1572 prefix=b'hg-copy-', dir=os.path.dirname(wdest)
1572 prefix=b'hg-copy-', dir=os.path.dirname(wdest)
1573 )
1573 )
1574 os.close(fd)
1574 os.close(fd)
1575 os.unlink(tempname)
1575 os.unlink(tempname)
1576 os.rename(wdest, tempname)
1576 os.rename(wdest, tempname)
1577 try:
1577 try:
1578 self.run0(b'copy', source, dest)
1578 self.run0(b'copy', source, dest)
1579 finally:
1579 finally:
1580 self.manifest.add(dest)
1580 self.manifest.add(dest)
1581 if exists:
1581 if exists:
1582 try:
1582 try:
1583 os.unlink(wdest)
1583 os.unlink(wdest)
1584 except OSError:
1584 except OSError:
1585 pass
1585 pass
1586 os.rename(tempname, wdest)
1586 os.rename(tempname, wdest)
1587
1587
1588 def dirs_of(self, files):
1588 def dirs_of(self, files):
1589 dirs = set()
1589 dirs = set()
1590 for f in files:
1590 for f in files:
1591 if os.path.isdir(self.wjoin(f)):
1591 if os.path.isdir(self.wjoin(f)):
1592 dirs.add(f)
1592 dirs.add(f)
1593 i = len(f)
1593 i = len(f)
1594 for i in iter(lambda: f.rfind(b'/', 0, i), -1):
1594 for i in iter(lambda: f.rfind(b'/', 0, i), -1):
1595 dirs.add(f[:i])
1595 dirs.add(f[:i])
1596 return dirs
1596 return dirs
1597
1597
1598 def add_dirs(self, files):
1598 def add_dirs(self, files):
1599 add_dirs = [
1599 add_dirs = [
1600 d for d in sorted(self.dirs_of(files)) if d not in self.manifest
1600 d for d in sorted(self.dirs_of(files)) if d not in self.manifest
1601 ]
1601 ]
1602 if add_dirs:
1602 if add_dirs:
1603 self.manifest.update(add_dirs)
1603 self.manifest.update(add_dirs)
1604 self.xargs(add_dirs, b'add', non_recursive=True, quiet=True)
1604 self.xargs(add_dirs, b'add', non_recursive=True, quiet=True)
1605 return add_dirs
1605 return add_dirs
1606
1606
1607 def add_files(self, files):
1607 def add_files(self, files):
1608 files = [f for f in files if f not in self.manifest]
1608 files = [f for f in files if f not in self.manifest]
1609 if files:
1609 if files:
1610 self.manifest.update(files)
1610 self.manifest.update(files)
1611 self.xargs(files, b'add', quiet=True)
1611 self.xargs(files, b'add', quiet=True)
1612 return files
1612 return files
1613
1613
1614 def addchild(self, parent, child):
1614 def addchild(self, parent, child):
1615 self.childmap[parent] = child
1615 self.childmap[parent] = child
1616
1616
1617 def revid(self, rev):
1617 def revid(self, rev):
1618 return b"svn:%s@%s" % (self.uuid, rev)
1618 return b"svn:%s@%s" % (self.uuid, rev)
1619
1619
1620 def putcommit(
1620 def putcommit(
1621 self, files, copies, parents, commit, source, revmap, full, cleanp2
1621 self, files, copies, parents, commit, source, revmap, full, cleanp2
1622 ):
1622 ):
1623 for parent in parents:
1623 for parent in parents:
1624 try:
1624 try:
1625 return self.revid(self.childmap[parent])
1625 return self.revid(self.childmap[parent])
1626 except KeyError:
1626 except KeyError:
1627 pass
1627 pass
1628
1628
1629 # Apply changes to working copy
1629 # Apply changes to working copy
1630 for f, v in files:
1630 for f, v in files:
1631 data, mode = source.getfile(f, v)
1631 data, mode = source.getfile(f, v)
1632 if data is None:
1632 if data is None:
1633 self.delete.append(f)
1633 self.delete.append(f)
1634 else:
1634 else:
1635 self.putfile(f, mode, data)
1635 self.putfile(f, mode, data)
1636 if f in copies:
1636 if f in copies:
1637 self.copies.append([copies[f], f])
1637 self.copies.append([copies[f], f])
1638 if full:
1638 if full:
1639 self.delete.extend(sorted(self.manifest.difference(files)))
1639 self.delete.extend(sorted(self.manifest.difference(files)))
1640 files = [f[0] for f in files]
1640 files = [f[0] for f in files]
1641
1641
1642 entries = set(self.delete)
1642 entries = set(self.delete)
1643 files = frozenset(files)
1643 files = frozenset(files)
1644 entries.update(self.add_dirs(files.difference(entries)))
1644 entries.update(self.add_dirs(files.difference(entries)))
1645 if self.copies:
1645 if self.copies:
1646 for s, d in self.copies:
1646 for s, d in self.copies:
1647 self._copyfile(s, d)
1647 self._copyfile(s, d)
1648 self.copies = []
1648 self.copies = []
1649 if self.delete:
1649 if self.delete:
1650 self.xargs(self.delete, b'delete')
1650 self.xargs(self.delete, b'delete')
1651 for f in self.delete:
1651 for f in self.delete:
1652 self.manifest.remove(f)
1652 self.manifest.remove(f)
1653 self.delete = []
1653 self.delete = []
1654 entries.update(self.add_files(files.difference(entries)))
1654 entries.update(self.add_files(files.difference(entries)))
1655 if self.delexec:
1655 if self.delexec:
1656 self.xargs(self.delexec, b'propdel', b'svn:executable')
1656 self.xargs(self.delexec, b'propdel', b'svn:executable')
1657 self.delexec = []
1657 self.delexec = []
1658 if self.setexec:
1658 if self.setexec:
1659 self.xargs(self.setexec, b'propset', b'svn:executable', b'*')
1659 self.xargs(self.setexec, b'propset', b'svn:executable', b'*')
1660 self.setexec = []
1660 self.setexec = []
1661
1661
1662 fd, messagefile = pycompat.mkstemp(prefix=b'hg-convert-')
1662 fd, messagefile = pycompat.mkstemp(prefix=b'hg-convert-')
1663 fp = os.fdopen(fd, 'wb')
1663 fp = os.fdopen(fd, 'wb')
1664 fp.write(util.tonativeeol(commit.desc))
1664 fp.write(util.tonativeeol(commit.desc))
1665 fp.close()
1665 fp.close()
1666 try:
1666 try:
1667 output = self.run0(
1667 output = self.run0(
1668 b'commit',
1668 b'commit',
1669 username=stringutil.shortuser(commit.author),
1669 username=stringutil.shortuser(commit.author),
1670 file=messagefile,
1670 file=messagefile,
1671 encoding=b'utf-8',
1671 encoding=b'utf-8',
1672 )
1672 )
1673 try:
1673 try:
1674 rev = self.commit_re.search(output).group(1)
1674 rev = self.commit_re.search(output).group(1)
1675 except AttributeError:
1675 except AttributeError:
1676 if not files:
1676 if not files:
1677 return parents[0] if parents else b'None'
1677 return parents[0] if parents else b'None'
1678 self.ui.warn(_(b'unexpected svn output:\n'))
1678 self.ui.warn(_(b'unexpected svn output:\n'))
1679 self.ui.warn(output)
1679 self.ui.warn(output)
1680 raise error.Abort(_(b'unable to cope with svn output'))
1680 raise error.Abort(_(b'unable to cope with svn output'))
1681 if commit.rev:
1681 if commit.rev:
1682 self.run(
1682 self.run(
1683 b'propset',
1683 b'propset',
1684 b'hg:convert-rev',
1684 b'hg:convert-rev',
1685 commit.rev,
1685 commit.rev,
1686 revprop=True,
1686 revprop=True,
1687 revision=rev,
1687 revision=rev,
1688 )
1688 )
1689 if commit.branch and commit.branch != b'default':
1689 if commit.branch and commit.branch != b'default':
1690 self.run(
1690 self.run(
1691 b'propset',
1691 b'propset',
1692 b'hg:convert-branch',
1692 b'hg:convert-branch',
1693 commit.branch,
1693 commit.branch,
1694 revprop=True,
1694 revprop=True,
1695 revision=rev,
1695 revision=rev,
1696 )
1696 )
1697
1697
1698 if self.ui.configbool(
1698 if self.ui.configbool(
1699 b'convert', b'svn.dangerous-set-commit-dates'
1699 b'convert', b'svn.dangerous-set-commit-dates'
1700 ):
1700 ):
1701 # Subverson always uses UTC to represent date and time
1701 # Subverson always uses UTC to represent date and time
1702 date = dateutil.parsedate(commit.date)
1702 date = dateutil.parsedate(commit.date)
1703 date = (date[0], 0)
1703 date = (date[0], 0)
1704
1704
1705 # The only way to set date and time for svn commit is to use propset after commit is done
1705 # The only way to set date and time for svn commit is to use propset after commit is done
1706 self.run(
1706 self.run(
1707 b'propset',
1707 b'propset',
1708 b'svn:date',
1708 b'svn:date',
1709 formatsvndate(date),
1709 formatsvndate(date),
1710 revprop=True,
1710 revprop=True,
1711 revision=rev,
1711 revision=rev,
1712 )
1712 )
1713
1713
1714 for parent in parents:
1714 for parent in parents:
1715 self.addchild(parent, rev)
1715 self.addchild(parent, rev)
1716 return self.revid(rev)
1716 return self.revid(rev)
1717 finally:
1717 finally:
1718 os.unlink(messagefile)
1718 os.unlink(messagefile)
1719
1719
1720 def puttags(self, tags):
1720 def puttags(self, tags):
1721 self.ui.warn(_(b'writing Subversion tags is not yet implemented\n'))
1721 self.ui.warn(_(b'writing Subversion tags is not yet implemented\n'))
1722 return None, None
1722 return None, None
1723
1723
1724 def hascommitfrommap(self, rev):
1724 def hascommitfrommap(self, rev):
1725 # We trust that revisions referenced in a map still is present
1725 # We trust that revisions referenced in a map still is present
1726 # TODO: implement something better if necessary and feasible
1726 # TODO: implement something better if necessary and feasible
1727 return True
1727 return True
1728
1728
1729 def hascommitforsplicemap(self, rev):
1729 def hascommitforsplicemap(self, rev):
1730 # This is not correct as one can convert to an existing subversion
1730 # This is not correct as one can convert to an existing subversion
1731 # repository and childmap would not list all revisions. Too bad.
1731 # repository and childmap would not list all revisions. Too bad.
1732 if rev in self.childmap:
1732 if rev in self.childmap:
1733 return True
1733 return True
1734 raise error.Abort(
1734 raise error.Abort(
1735 _(
1735 _(
1736 b'splice map revision %s not found in subversion '
1736 b'splice map revision %s not found in subversion '
1737 b'child map (revision lookups are not implemented)'
1737 b'child map (revision lookups are not implemented)'
1738 )
1738 )
1739 % rev
1739 % rev
1740 )
1740 )
@@ -1,479 +1,479 b''
1 """automatically manage newlines in repository files
1 """automatically manage newlines in repository files
2
2
3 This extension allows you to manage the type of line endings (CRLF or
3 This extension allows you to manage the type of line endings (CRLF or
4 LF) that are used in the repository and in the local working
4 LF) that are used in the repository and in the local working
5 directory. That way you can get CRLF line endings on Windows and LF on
5 directory. That way you can get CRLF line endings on Windows and LF on
6 Unix/Mac, thereby letting everybody use their OS native line endings.
6 Unix/Mac, thereby letting everybody use their OS native line endings.
7
7
8 The extension reads its configuration from a versioned ``.hgeol``
8 The extension reads its configuration from a versioned ``.hgeol``
9 configuration file found in the root of the working directory. The
9 configuration file found in the root of the working directory. The
10 ``.hgeol`` file use the same syntax as all other Mercurial
10 ``.hgeol`` file use the same syntax as all other Mercurial
11 configuration files. It uses two sections, ``[patterns]`` and
11 configuration files. It uses two sections, ``[patterns]`` and
12 ``[repository]``.
12 ``[repository]``.
13
13
14 The ``[patterns]`` section specifies how line endings should be
14 The ``[patterns]`` section specifies how line endings should be
15 converted between the working directory and the repository. The format is
15 converted between the working directory and the repository. The format is
16 specified by a file pattern. The first match is used, so put more
16 specified by a file pattern. The first match is used, so put more
17 specific patterns first. The available line endings are ``LF``,
17 specific patterns first. The available line endings are ``LF``,
18 ``CRLF``, and ``BIN``.
18 ``CRLF``, and ``BIN``.
19
19
20 Files with the declared format of ``CRLF`` or ``LF`` are always
20 Files with the declared format of ``CRLF`` or ``LF`` are always
21 checked out and stored in the repository in that format and files
21 checked out and stored in the repository in that format and files
22 declared to be binary (``BIN``) are left unchanged. Additionally,
22 declared to be binary (``BIN``) are left unchanged. Additionally,
23 ``native`` is an alias for checking out in the platform's default line
23 ``native`` is an alias for checking out in the platform's default line
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
26 default behavior; it is only needed if you need to override a later,
26 default behavior; it is only needed if you need to override a later,
27 more general pattern.
27 more general pattern.
28
28
29 The optional ``[repository]`` section specifies the line endings to
29 The optional ``[repository]`` section specifies the line endings to
30 use for files stored in the repository. It has a single setting,
30 use for files stored in the repository. It has a single setting,
31 ``native``, which determines the storage line endings for files
31 ``native``, which determines the storage line endings for files
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
35 will be converted to ``LF`` when stored in the repository. Files
35 will be converted to ``LF`` when stored in the repository. Files
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
37 are always stored as-is in the repository.
37 are always stored as-is in the repository.
38
38
39 Example versioned ``.hgeol`` file::
39 Example versioned ``.hgeol`` file::
40
40
41 [patterns]
41 [patterns]
42 **.py = native
42 **.py = native
43 **.vcproj = CRLF
43 **.vcproj = CRLF
44 **.txt = native
44 **.txt = native
45 Makefile = LF
45 Makefile = LF
46 **.jpg = BIN
46 **.jpg = BIN
47
47
48 [repository]
48 [repository]
49 native = LF
49 native = LF
50
50
51 .. note::
51 .. note::
52
52
53 The rules will first apply when files are touched in the working
53 The rules will first apply when files are touched in the working
54 directory, e.g. by updating to null and back to tip to touch all files.
54 directory, e.g. by updating to null and back to tip to touch all files.
55
55
56 The extension uses an optional ``[eol]`` section read from both the
56 The extension uses an optional ``[eol]`` section read from both the
57 normal Mercurial configuration files and the ``.hgeol`` file, with the
57 normal Mercurial configuration files and the ``.hgeol`` file, with the
58 latter overriding the former. You can use that section to control the
58 latter overriding the former. You can use that section to control the
59 overall behavior. There are three settings:
59 overall behavior. There are three settings:
60
60
61 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
61 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
62 ``CRLF`` to override the default interpretation of ``native`` for
62 ``CRLF`` to override the default interpretation of ``native`` for
63 checkout. This can be used with :hg:`archive` on Unix, say, to
63 checkout. This can be used with :hg:`archive` on Unix, say, to
64 generate an archive where files have line endings for Windows.
64 generate an archive where files have line endings for Windows.
65
65
66 - ``eol.only-consistent`` (default True) can be set to False to make
66 - ``eol.only-consistent`` (default True) can be set to False to make
67 the extension convert files with inconsistent EOLs. Inconsistent
67 the extension convert files with inconsistent EOLs. Inconsistent
68 means that there is both ``CRLF`` and ``LF`` present in the file.
68 means that there is both ``CRLF`` and ``LF`` present in the file.
69 Such files are normally not touched under the assumption that they
69 Such files are normally not touched under the assumption that they
70 have mixed EOLs on purpose.
70 have mixed EOLs on purpose.
71
71
72 - ``eol.fix-trailing-newline`` (default False) can be set to True to
72 - ``eol.fix-trailing-newline`` (default False) can be set to True to
73 ensure that converted files end with a EOL character (either ``\\n``
73 ensure that converted files end with a EOL character (either ``\\n``
74 or ``\\r\\n`` as per the configured patterns).
74 or ``\\r\\n`` as per the configured patterns).
75
75
76 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
76 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
77 like the deprecated win32text extension does. This means that you can
77 like the deprecated win32text extension does. This means that you can
78 disable win32text and enable eol and your filters will still work. You
78 disable win32text and enable eol and your filters will still work. You
79 only need to these filters until you have prepared a ``.hgeol`` file.
79 only need to these filters until you have prepared a ``.hgeol`` file.
80
80
81 The ``win32text.forbid*`` hooks provided by the win32text extension
81 The ``win32text.forbid*`` hooks provided by the win32text extension
82 have been unified into a single hook named ``eol.checkheadshook``. The
82 have been unified into a single hook named ``eol.checkheadshook``. The
83 hook will lookup the expected line endings from the ``.hgeol`` file,
83 hook will lookup the expected line endings from the ``.hgeol`` file,
84 which means you must migrate to a ``.hgeol`` file first before using
84 which means you must migrate to a ``.hgeol`` file first before using
85 the hook. ``eol.checkheadshook`` only checks heads, intermediate
85 the hook. ``eol.checkheadshook`` only checks heads, intermediate
86 invalid revisions will be pushed. To forbid them completely, use the
86 invalid revisions will be pushed. To forbid them completely, use the
87 ``eol.checkallhook`` hook. These hooks are best used as
87 ``eol.checkallhook`` hook. These hooks are best used as
88 ``pretxnchangegroup`` hooks.
88 ``pretxnchangegroup`` hooks.
89
89
90 See :hg:`help patterns` for more information about the glob patterns
90 See :hg:`help patterns` for more information about the glob patterns
91 used.
91 used.
92 """
92 """
93
93
94
94
95 import os
95 import os
96 import re
96 import re
97 from mercurial.i18n import _
97 from mercurial.i18n import _
98 from mercurial import (
98 from mercurial import (
99 config,
99 config,
100 error as errormod,
100 error as errormod,
101 extensions,
101 extensions,
102 match,
102 match,
103 pycompat,
103 pycompat,
104 registrar,
104 registrar,
105 scmutil,
105 scmutil,
106 util,
106 util,
107 )
107 )
108 from mercurial.utils import stringutil
108 from mercurial.utils import stringutil
109
109
110 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
110 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
111 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
111 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
112 # be specifying the version(s) of Mercurial they are tested with, or
112 # be specifying the version(s) of Mercurial they are tested with, or
113 # leave the attribute unspecified.
113 # leave the attribute unspecified.
114 testedwith = b'ships-with-hg-core'
114 testedwith = b'ships-with-hg-core'
115
115
116 configtable = {}
116 configtable = {}
117 configitem = registrar.configitem(configtable)
117 configitem = registrar.configitem(configtable)
118
118
119 configitem(
119 configitem(
120 b'eol',
120 b'eol',
121 b'fix-trailing-newline',
121 b'fix-trailing-newline',
122 default=False,
122 default=False,
123 )
123 )
124 configitem(
124 configitem(
125 b'eol',
125 b'eol',
126 b'native',
126 b'native',
127 default=pycompat.oslinesep,
127 default=pycompat.oslinesep,
128 )
128 )
129 configitem(
129 configitem(
130 b'eol',
130 b'eol',
131 b'only-consistent',
131 b'only-consistent',
132 default=True,
132 default=True,
133 )
133 )
134
134
135 # Matches a lone LF, i.e., one that is not part of CRLF.
135 # Matches a lone LF, i.e., one that is not part of CRLF.
136 singlelf = re.compile(b'(^|[^\r])\n')
136 singlelf = re.compile(b'(^|[^\r])\n')
137
137
138
138
139 def inconsistenteol(data):
139 def inconsistenteol(data):
140 return b'\r\n' in data and singlelf.search(data)
140 return b'\r\n' in data and singlelf.search(data)
141
141
142
142
143 def tolf(s, params, ui, **kwargs):
143 def tolf(s, params, ui, **kwargs):
144 """Filter to convert to LF EOLs."""
144 """Filter to convert to LF EOLs."""
145 if stringutil.binary(s):
145 if stringutil.binary(s):
146 return s
146 return s
147 if ui.configbool(b'eol', b'only-consistent') and inconsistenteol(s):
147 if ui.configbool(b'eol', b'only-consistent') and inconsistenteol(s):
148 return s
148 return s
149 if (
149 if (
150 ui.configbool(b'eol', b'fix-trailing-newline')
150 ui.configbool(b'eol', b'fix-trailing-newline')
151 and s
151 and s
152 and not s.endswith(b'\n')
152 and not s.endswith(b'\n')
153 ):
153 ):
154 s = s + b'\n'
154 s = s + b'\n'
155 return util.tolf(s)
155 return util.tolf(s)
156
156
157
157
158 def tocrlf(s, params, ui, **kwargs):
158 def tocrlf(s, params, ui, **kwargs):
159 """Filter to convert to CRLF EOLs."""
159 """Filter to convert to CRLF EOLs."""
160 if stringutil.binary(s):
160 if stringutil.binary(s):
161 return s
161 return s
162 if ui.configbool(b'eol', b'only-consistent') and inconsistenteol(s):
162 if ui.configbool(b'eol', b'only-consistent') and inconsistenteol(s):
163 return s
163 return s
164 if (
164 if (
165 ui.configbool(b'eol', b'fix-trailing-newline')
165 ui.configbool(b'eol', b'fix-trailing-newline')
166 and s
166 and s
167 and not s.endswith(b'\n')
167 and not s.endswith(b'\n')
168 ):
168 ):
169 s = s + b'\n'
169 s = s + b'\n'
170 return util.tocrlf(s)
170 return util.tocrlf(s)
171
171
172
172
173 def isbinary(s, params, ui, **kwargs):
173 def isbinary(s, params, ui, **kwargs):
174 """Filter to do nothing with the file."""
174 """Filter to do nothing with the file."""
175 return s
175 return s
176
176
177
177
178 filters = {
178 filters = {
179 b'to-lf': tolf,
179 b'to-lf': tolf,
180 b'to-crlf': tocrlf,
180 b'to-crlf': tocrlf,
181 b'is-binary': isbinary,
181 b'is-binary': isbinary,
182 # The following provide backwards compatibility with win32text
182 # The following provide backwards compatibility with win32text
183 b'cleverencode:': tolf,
183 b'cleverencode:': tolf,
184 b'cleverdecode:': tocrlf,
184 b'cleverdecode:': tocrlf,
185 }
185 }
186
186
187
187
188 class eolfile(object):
188 class eolfile(object):
189 def __init__(self, ui, root, data):
189 def __init__(self, ui, root, data):
190 self._decode = {
190 self._decode = {
191 b'LF': b'to-lf',
191 b'LF': b'to-lf',
192 b'CRLF': b'to-crlf',
192 b'CRLF': b'to-crlf',
193 b'BIN': b'is-binary',
193 b'BIN': b'is-binary',
194 }
194 }
195 self._encode = {
195 self._encode = {
196 b'LF': b'to-lf',
196 b'LF': b'to-lf',
197 b'CRLF': b'to-crlf',
197 b'CRLF': b'to-crlf',
198 b'BIN': b'is-binary',
198 b'BIN': b'is-binary',
199 }
199 }
200
200
201 self.cfg = config.config()
201 self.cfg = config.config()
202 # Our files should not be touched. The pattern must be
202 # Our files should not be touched. The pattern must be
203 # inserted first override a '** = native' pattern.
203 # inserted first override a '** = native' pattern.
204 self.cfg.set(b'patterns', b'.hg*', b'BIN', b'eol')
204 self.cfg.set(b'patterns', b'.hg*', b'BIN', b'eol')
205 # We can then parse the user's patterns.
205 # We can then parse the user's patterns.
206 self.cfg.parse(b'.hgeol', data)
206 self.cfg.parse(b'.hgeol', data)
207
207
208 isrepolf = self.cfg.get(b'repository', b'native') != b'CRLF'
208 isrepolf = self.cfg.get(b'repository', b'native') != b'CRLF'
209 self._encode[b'NATIVE'] = isrepolf and b'to-lf' or b'to-crlf'
209 self._encode[b'NATIVE'] = isrepolf and b'to-lf' or b'to-crlf'
210 iswdlf = ui.config(b'eol', b'native') in (b'LF', b'\n')
210 iswdlf = ui.config(b'eol', b'native') in (b'LF', b'\n')
211 self._decode[b'NATIVE'] = iswdlf and b'to-lf' or b'to-crlf'
211 self._decode[b'NATIVE'] = iswdlf and b'to-lf' or b'to-crlf'
212
212
213 include = []
213 include = []
214 exclude = []
214 exclude = []
215 self.patterns = []
215 self.patterns = []
216 for pattern, style in self.cfg.items(b'patterns'):
216 for pattern, style in self.cfg.items(b'patterns'):
217 key = style.upper()
217 key = style.upper()
218 if key == b'BIN':
218 if key == b'BIN':
219 exclude.append(pattern)
219 exclude.append(pattern)
220 else:
220 else:
221 include.append(pattern)
221 include.append(pattern)
222 m = match.match(root, b'', [pattern])
222 m = match.match(root, b'', [pattern])
223 self.patterns.append((pattern, key, m))
223 self.patterns.append((pattern, key, m))
224 # This will match the files for which we need to care
224 # This will match the files for which we need to care
225 # about inconsistent newlines.
225 # about inconsistent newlines.
226 self.match = match.match(root, b'', [], include, exclude)
226 self.match = match.match(root, b'', [], include, exclude)
227
227
228 def copytoui(self, ui):
228 def copytoui(self, ui):
229 newpatterns = {pattern for pattern, key, m in self.patterns}
229 newpatterns = {pattern for pattern, key, m in self.patterns}
230 for section in (b'decode', b'encode'):
230 for section in (b'decode', b'encode'):
231 for oldpattern, _filter in ui.configitems(section):
231 for oldpattern, _filter in ui.configitems(section):
232 if oldpattern not in newpatterns:
232 if oldpattern not in newpatterns:
233 if ui.configsource(section, oldpattern) == b'eol':
233 if ui.configsource(section, oldpattern) == b'eol':
234 ui.setconfig(section, oldpattern, b'!', b'eol')
234 ui.setconfig(section, oldpattern, b'!', b'eol')
235 for pattern, key, m in self.patterns:
235 for pattern, key, m in self.patterns:
236 try:
236 try:
237 ui.setconfig(b'decode', pattern, self._decode[key], b'eol')
237 ui.setconfig(b'decode', pattern, self._decode[key], b'eol')
238 ui.setconfig(b'encode', pattern, self._encode[key], b'eol')
238 ui.setconfig(b'encode', pattern, self._encode[key], b'eol')
239 except KeyError:
239 except KeyError:
240 ui.warn(
240 ui.warn(
241 _(b"ignoring unknown EOL style '%s' from %s\n")
241 _(b"ignoring unknown EOL style '%s' from %s\n")
242 % (key, self.cfg.source(b'patterns', pattern))
242 % (key, self.cfg.source(b'patterns', pattern))
243 )
243 )
244 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
244 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
245 for k, v in self.cfg.items(b'eol'):
245 for k, v in self.cfg.items(b'eol'):
246 ui.setconfig(b'eol', k, v, b'eol')
246 ui.setconfig(b'eol', k, v, b'eol')
247
247
248 def checkrev(self, repo, ctx, files):
248 def checkrev(self, repo, ctx, files):
249 failed = []
249 failed = []
250 for f in files or ctx.files():
250 for f in files or ctx.files():
251 if f not in ctx:
251 if f not in ctx:
252 continue
252 continue
253 for pattern, key, m in self.patterns:
253 for pattern, key, m in self.patterns:
254 if not m(f):
254 if not m(f):
255 continue
255 continue
256 target = self._encode[key]
256 target = self._encode[key]
257 data = ctx[f].data()
257 data = ctx[f].data()
258 if (
258 if (
259 target == b"to-lf"
259 target == b"to-lf"
260 and b"\r\n" in data
260 and b"\r\n" in data
261 or target == b"to-crlf"
261 or target == b"to-crlf"
262 and singlelf.search(data)
262 and singlelf.search(data)
263 ):
263 ):
264 failed.append((f, target, bytes(ctx)))
264 failed.append((f, target, bytes(ctx)))
265 break
265 break
266 return failed
266 return failed
267
267
268
268
269 def parseeol(ui, repo, nodes):
269 def parseeol(ui, repo, nodes):
270 try:
270 try:
271 for node in nodes:
271 for node in nodes:
272 try:
272 try:
273 if node is None:
273 if node is None:
274 # Cannot use workingctx.data() since it would load
274 # Cannot use workingctx.data() since it would load
275 # and cache the filters before we configure them.
275 # and cache the filters before we configure them.
276 data = repo.wvfs(b'.hgeol').read()
276 data = repo.wvfs(b'.hgeol').read()
277 else:
277 else:
278 data = repo[node][b'.hgeol'].data()
278 data = repo[node][b'.hgeol'].data()
279 return eolfile(ui, repo.root, data)
279 return eolfile(ui, repo.root, data)
280 except (IOError, LookupError):
280 except (IOError, LookupError):
281 pass
281 pass
282 except errormod.ConfigError as inst:
282 except errormod.ConfigError as inst:
283 ui.warn(
283 ui.warn(
284 _(
284 _(
285 b"warning: ignoring .hgeol file due to parse error "
285 b"warning: ignoring .hgeol file due to parse error "
286 b"at %s: %s\n"
286 b"at %s: %s\n"
287 )
287 )
288 % (inst.location, inst.message)
288 % (inst.location, inst.message)
289 )
289 )
290 return None
290 return None
291
291
292
292
293 def ensureenabled(ui):
293 def ensureenabled(ui):
294 """make sure the extension is enabled when used as hook
294 """make sure the extension is enabled when used as hook
295
295
296 When eol is used through hooks, the extension is never formally loaded and
296 When eol is used through hooks, the extension is never formally loaded and
297 enabled. This has some side effect, for example the config declaration is
297 enabled. This has some side effect, for example the config declaration is
298 never loaded. This function ensure the extension is enabled when running
298 never loaded. This function ensure the extension is enabled when running
299 hooks.
299 hooks.
300 """
300 """
301 if b'eol' in ui._knownconfig:
301 if b'eol' in ui._knownconfig:
302 return
302 return
303 ui.setconfig(b'extensions', b'eol', b'', source=b'internal')
303 ui.setconfig(b'extensions', b'eol', b'', source=b'internal')
304 extensions.loadall(ui, [b'eol'])
304 extensions.loadall(ui, [b'eol'])
305
305
306
306
307 def _checkhook(ui, repo, node, headsonly):
307 def _checkhook(ui, repo, node, headsonly):
308 # Get revisions to check and touched files at the same time
308 # Get revisions to check and touched files at the same time
309 ensureenabled(ui)
309 ensureenabled(ui)
310 files = set()
310 files = set()
311 revs = set()
311 revs = set()
312 for rev in pycompat.xrange(repo[node].rev(), len(repo)):
312 for rev in pycompat.xrange(repo[node].rev(), len(repo)):
313 revs.add(rev)
313 revs.add(rev)
314 if headsonly:
314 if headsonly:
315 ctx = repo[rev]
315 ctx = repo[rev]
316 files.update(ctx.files())
316 files.update(ctx.files())
317 for pctx in ctx.parents():
317 for pctx in ctx.parents():
318 revs.discard(pctx.rev())
318 revs.discard(pctx.rev())
319 failed = []
319 failed = []
320 for rev in revs:
320 for rev in revs:
321 ctx = repo[rev]
321 ctx = repo[rev]
322 eol = parseeol(ui, repo, [ctx.node()])
322 eol = parseeol(ui, repo, [ctx.node()])
323 if eol:
323 if eol:
324 failed.extend(eol.checkrev(repo, ctx, files))
324 failed.extend(eol.checkrev(repo, ctx, files))
325
325
326 if failed:
326 if failed:
327 eols = {b'to-lf': b'CRLF', b'to-crlf': b'LF'}
327 eols = {b'to-lf': b'CRLF', b'to-crlf': b'LF'}
328 msgs = []
328 msgs = []
329 for f, target, node in sorted(failed):
329 for f, target, node in sorted(failed):
330 msgs.append(
330 msgs.append(
331 _(b" %s in %s should not have %s line endings")
331 _(b" %s in %s should not have %s line endings")
332 % (f, node, eols[target])
332 % (f, node, eols[target])
333 )
333 )
334 raise errormod.Abort(
334 raise errormod.Abort(
335 _(b"end-of-line check failed:\n") + b"\n".join(msgs)
335 _(b"end-of-line check failed:\n") + b"\n".join(msgs)
336 )
336 )
337
337
338
338
339 def checkallhook(ui, repo, node, hooktype, **kwargs):
339 def checkallhook(ui, repo, node, hooktype, **kwargs):
340 """verify that files have expected EOLs"""
340 """verify that files have expected EOLs"""
341 _checkhook(ui, repo, node, False)
341 _checkhook(ui, repo, node, False)
342
342
343
343
344 def checkheadshook(ui, repo, node, hooktype, **kwargs):
344 def checkheadshook(ui, repo, node, hooktype, **kwargs):
345 """verify that files have expected EOLs"""
345 """verify that files have expected EOLs"""
346 _checkhook(ui, repo, node, True)
346 _checkhook(ui, repo, node, True)
347
347
348
348
349 # "checkheadshook" used to be called "hook"
349 # "checkheadshook" used to be called "hook"
350 hook = checkheadshook
350 hook = checkheadshook
351
351
352
352
353 def preupdate(ui, repo, hooktype, parent1, parent2):
353 def preupdate(ui, repo, hooktype, parent1, parent2):
354 p1node = scmutil.resolvehexnodeidprefix(repo, parent1)
354 p1node = scmutil.resolvehexnodeidprefix(repo, parent1)
355 repo.loadeol([p1node])
355 repo.loadeol([p1node])
356 return False
356 return False
357
357
358
358
359 def uisetup(ui):
359 def uisetup(ui):
360 ui.setconfig(b'hooks', b'preupdate.eol', preupdate, b'eol')
360 ui.setconfig(b'hooks', b'preupdate.eol', preupdate, b'eol')
361
361
362
362
363 def extsetup(ui):
363 def extsetup(ui):
364 try:
364 try:
365 extensions.find(b'win32text')
365 extensions.find(b'win32text')
366 ui.warn(
366 ui.warn(
367 _(
367 _(
368 b"the eol extension is incompatible with the "
368 b"the eol extension is incompatible with the "
369 b"win32text extension\n"
369 b"win32text extension\n"
370 )
370 )
371 )
371 )
372 except KeyError:
372 except KeyError:
373 pass
373 pass
374
374
375
375
376 def reposetup(ui, repo):
376 def reposetup(ui, repo):
377 uisetup(repo.ui)
377 uisetup(repo.ui)
378
378
379 if not repo.local():
379 if not repo.local():
380 return
380 return
381 for name, fn in pycompat.iteritems(filters):
381 for name, fn in filters.items():
382 repo.adddatafilter(name, fn)
382 repo.adddatafilter(name, fn)
383
383
384 ui.setconfig(b'patch', b'eol', b'auto', b'eol')
384 ui.setconfig(b'patch', b'eol', b'auto', b'eol')
385
385
386 class eolrepo(repo.__class__):
386 class eolrepo(repo.__class__):
387 def loadeol(self, nodes):
387 def loadeol(self, nodes):
388 eol = parseeol(self.ui, self, nodes)
388 eol = parseeol(self.ui, self, nodes)
389 if eol is None:
389 if eol is None:
390 return None
390 return None
391 eol.copytoui(self.ui)
391 eol.copytoui(self.ui)
392 return eol.match
392 return eol.match
393
393
394 def _hgcleardirstate(self):
394 def _hgcleardirstate(self):
395 self._eolmatch = self.loadeol([None])
395 self._eolmatch = self.loadeol([None])
396 if not self._eolmatch:
396 if not self._eolmatch:
397 self._eolmatch = util.never
397 self._eolmatch = util.never
398 return
398 return
399
399
400 oldeol = None
400 oldeol = None
401 try:
401 try:
402 cachemtime = os.path.getmtime(self.vfs.join(b"eol.cache"))
402 cachemtime = os.path.getmtime(self.vfs.join(b"eol.cache"))
403 except OSError:
403 except OSError:
404 cachemtime = 0
404 cachemtime = 0
405 else:
405 else:
406 olddata = self.vfs.read(b"eol.cache")
406 olddata = self.vfs.read(b"eol.cache")
407 if olddata:
407 if olddata:
408 oldeol = eolfile(self.ui, self.root, olddata)
408 oldeol = eolfile(self.ui, self.root, olddata)
409
409
410 try:
410 try:
411 eolmtime = os.path.getmtime(self.wjoin(b".hgeol"))
411 eolmtime = os.path.getmtime(self.wjoin(b".hgeol"))
412 except OSError:
412 except OSError:
413 eolmtime = 0
413 eolmtime = 0
414
414
415 if eolmtime >= cachemtime and eolmtime > 0:
415 if eolmtime >= cachemtime and eolmtime > 0:
416 self.ui.debug(b"eol: detected change in .hgeol\n")
416 self.ui.debug(b"eol: detected change in .hgeol\n")
417
417
418 hgeoldata = self.wvfs.read(b'.hgeol')
418 hgeoldata = self.wvfs.read(b'.hgeol')
419 neweol = eolfile(self.ui, self.root, hgeoldata)
419 neweol = eolfile(self.ui, self.root, hgeoldata)
420
420
421 wlock = None
421 wlock = None
422 try:
422 try:
423 wlock = self.wlock()
423 wlock = self.wlock()
424 for f in self.dirstate:
424 for f in self.dirstate:
425 if not self.dirstate.get_entry(f).maybe_clean:
425 if not self.dirstate.get_entry(f).maybe_clean:
426 continue
426 continue
427 if oldeol is not None:
427 if oldeol is not None:
428 if not oldeol.match(f) and not neweol.match(f):
428 if not oldeol.match(f) and not neweol.match(f):
429 continue
429 continue
430 oldkey = None
430 oldkey = None
431 for pattern, key, m in oldeol.patterns:
431 for pattern, key, m in oldeol.patterns:
432 if m(f):
432 if m(f):
433 oldkey = key
433 oldkey = key
434 break
434 break
435 newkey = None
435 newkey = None
436 for pattern, key, m in neweol.patterns:
436 for pattern, key, m in neweol.patterns:
437 if m(f):
437 if m(f):
438 newkey = key
438 newkey = key
439 break
439 break
440 if oldkey == newkey:
440 if oldkey == newkey:
441 continue
441 continue
442 # all normal files need to be looked at again since
442 # all normal files need to be looked at again since
443 # the new .hgeol file specify a different filter
443 # the new .hgeol file specify a different filter
444 self.dirstate.set_possibly_dirty(f)
444 self.dirstate.set_possibly_dirty(f)
445 # Write the cache to update mtime and cache .hgeol
445 # Write the cache to update mtime and cache .hgeol
446 with self.vfs(b"eol.cache", b"w") as f:
446 with self.vfs(b"eol.cache", b"w") as f:
447 f.write(hgeoldata)
447 f.write(hgeoldata)
448 except errormod.LockUnavailable:
448 except errormod.LockUnavailable:
449 # If we cannot lock the repository and clear the
449 # If we cannot lock the repository and clear the
450 # dirstate, then a commit might not see all files
450 # dirstate, then a commit might not see all files
451 # as modified. But if we cannot lock the
451 # as modified. But if we cannot lock the
452 # repository, then we can also not make a commit,
452 # repository, then we can also not make a commit,
453 # so ignore the error.
453 # so ignore the error.
454 pass
454 pass
455 finally:
455 finally:
456 if wlock is not None:
456 if wlock is not None:
457 wlock.release()
457 wlock.release()
458
458
459 def commitctx(self, ctx, error=False, origctx=None):
459 def commitctx(self, ctx, error=False, origctx=None):
460 for f in sorted(ctx.added() + ctx.modified()):
460 for f in sorted(ctx.added() + ctx.modified()):
461 if not self._eolmatch(f):
461 if not self._eolmatch(f):
462 continue
462 continue
463 fctx = ctx[f]
463 fctx = ctx[f]
464 if fctx is None:
464 if fctx is None:
465 continue
465 continue
466 data = fctx.data()
466 data = fctx.data()
467 if stringutil.binary(data):
467 if stringutil.binary(data):
468 # We should not abort here, since the user should
468 # We should not abort here, since the user should
469 # be able to say "** = native" to automatically
469 # be able to say "** = native" to automatically
470 # have all non-binary files taken care of.
470 # have all non-binary files taken care of.
471 continue
471 continue
472 if inconsistenteol(data):
472 if inconsistenteol(data):
473 raise errormod.Abort(
473 raise errormod.Abort(
474 _(b"inconsistent newline style in %s\n") % f
474 _(b"inconsistent newline style in %s\n") % f
475 )
475 )
476 return super(eolrepo, self).commitctx(ctx, error, origctx)
476 return super(eolrepo, self).commitctx(ctx, error, origctx)
477
477
478 repo.__class__ = eolrepo
478 repo.__class__ = eolrepo
479 repo._hgcleardirstate()
479 repo._hgcleardirstate()
@@ -1,858 +1,858 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # context: context needed to annotate a file
3 # context: context needed to annotate a file
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.pycompat import (
14 from mercurial.pycompat import (
15 getattr,
15 getattr,
16 open,
16 open,
17 setattr,
17 setattr,
18 )
18 )
19 from mercurial.node import (
19 from mercurial.node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 short,
22 short,
23 )
23 )
24 from mercurial import (
24 from mercurial import (
25 error,
25 error,
26 linelog as linelogmod,
26 linelog as linelogmod,
27 lock as lockmod,
27 lock as lockmod,
28 mdiff,
28 mdiff,
29 pycompat,
29 pycompat,
30 scmutil,
30 scmutil,
31 util,
31 util,
32 )
32 )
33 from mercurial.utils import (
33 from mercurial.utils import (
34 hashutil,
34 hashutil,
35 stringutil,
35 stringutil,
36 )
36 )
37
37
38 from . import (
38 from . import (
39 error as faerror,
39 error as faerror,
40 revmap as revmapmod,
40 revmap as revmapmod,
41 )
41 )
42
42
43 # given path, get filelog, cached
43 # given path, get filelog, cached
44 @util.lrucachefunc
44 @util.lrucachefunc
45 def _getflog(repo, path):
45 def _getflog(repo, path):
46 return repo.file(path)
46 return repo.file(path)
47
47
48
48
49 # extracted from mercurial.context.basefilectx.annotate
49 # extracted from mercurial.context.basefilectx.annotate
50 def _parents(f, follow=True):
50 def _parents(f, follow=True):
51 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
51 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
52 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
52 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
53 # from the topmost introrev (= srcrev) down to p.linkrev() if it
53 # from the topmost introrev (= srcrev) down to p.linkrev() if it
54 # isn't an ancestor of the srcrev.
54 # isn't an ancestor of the srcrev.
55 f._changeid
55 f._changeid
56 pl = f.parents()
56 pl = f.parents()
57
57
58 # Don't return renamed parents if we aren't following.
58 # Don't return renamed parents if we aren't following.
59 if not follow:
59 if not follow:
60 pl = [p for p in pl if p.path() == f.path()]
60 pl = [p for p in pl if p.path() == f.path()]
61
61
62 # renamed filectx won't have a filelog yet, so set it
62 # renamed filectx won't have a filelog yet, so set it
63 # from the cache to save time
63 # from the cache to save time
64 for p in pl:
64 for p in pl:
65 if not '_filelog' in p.__dict__:
65 if not '_filelog' in p.__dict__:
66 p._filelog = _getflog(f._repo, p.path())
66 p._filelog = _getflog(f._repo, p.path())
67
67
68 return pl
68 return pl
69
69
70
70
71 # extracted from mercurial.context.basefilectx.annotate. slightly modified
71 # extracted from mercurial.context.basefilectx.annotate. slightly modified
72 # so it takes a fctx instead of a pair of text and fctx.
72 # so it takes a fctx instead of a pair of text and fctx.
73 def _decorate(fctx):
73 def _decorate(fctx):
74 text = fctx.data()
74 text = fctx.data()
75 linecount = text.count(b'\n')
75 linecount = text.count(b'\n')
76 if text and not text.endswith(b'\n'):
76 if text and not text.endswith(b'\n'):
77 linecount += 1
77 linecount += 1
78 return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
78 return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
79
79
80
80
81 # extracted from mercurial.context.basefilectx.annotate. slightly modified
81 # extracted from mercurial.context.basefilectx.annotate. slightly modified
82 # so it takes an extra "blocks" parameter calculated elsewhere, instead of
82 # so it takes an extra "blocks" parameter calculated elsewhere, instead of
83 # calculating diff here.
83 # calculating diff here.
84 def _pair(parent, child, blocks):
84 def _pair(parent, child, blocks):
85 for (a1, a2, b1, b2), t in blocks:
85 for (a1, a2, b1, b2), t in blocks:
86 # Changed blocks ('!') or blocks made only of blank lines ('~')
86 # Changed blocks ('!') or blocks made only of blank lines ('~')
87 # belong to the child.
87 # belong to the child.
88 if t == b'=':
88 if t == b'=':
89 child[0][b1:b2] = parent[0][a1:a2]
89 child[0][b1:b2] = parent[0][a1:a2]
90 return child
90 return child
91
91
92
92
93 # like scmutil.revsingle, but with lru cache, so their states (like manifests)
93 # like scmutil.revsingle, but with lru cache, so their states (like manifests)
94 # could be reused
94 # could be reused
95 _revsingle = util.lrucachefunc(scmutil.revsingle)
95 _revsingle = util.lrucachefunc(scmutil.revsingle)
96
96
97
97
98 def resolvefctx(repo, rev, path, resolverev=False, adjustctx=None):
98 def resolvefctx(repo, rev, path, resolverev=False, adjustctx=None):
99 """(repo, str, str) -> fctx
99 """(repo, str, str) -> fctx
100
100
101 get the filectx object from repo, rev, path, in an efficient way.
101 get the filectx object from repo, rev, path, in an efficient way.
102
102
103 if resolverev is True, "rev" is a revision specified by the revset
103 if resolverev is True, "rev" is a revision specified by the revset
104 language, otherwise "rev" is a nodeid, or a revision number that can
104 language, otherwise "rev" is a nodeid, or a revision number that can
105 be consumed by repo.__getitem__.
105 be consumed by repo.__getitem__.
106
106
107 if adjustctx is not None, the returned fctx will point to a changeset
107 if adjustctx is not None, the returned fctx will point to a changeset
108 that introduces the change (last modified the file). if adjustctx
108 that introduces the change (last modified the file). if adjustctx
109 is 'linkrev', trust the linkrev and do not adjust it. this is noticeably
109 is 'linkrev', trust the linkrev and do not adjust it. this is noticeably
110 faster for big repos but is incorrect for some cases.
110 faster for big repos but is incorrect for some cases.
111 """
111 """
112 if resolverev and not isinstance(rev, int) and rev is not None:
112 if resolverev and not isinstance(rev, int) and rev is not None:
113 ctx = _revsingle(repo, rev)
113 ctx = _revsingle(repo, rev)
114 else:
114 else:
115 ctx = repo[rev]
115 ctx = repo[rev]
116
116
117 # If we don't need to adjust the linkrev, create the filectx using the
117 # If we don't need to adjust the linkrev, create the filectx using the
118 # changectx instead of using ctx[path]. This means it already has the
118 # changectx instead of using ctx[path]. This means it already has the
119 # changectx information, so blame -u will be able to look directly at the
119 # changectx information, so blame -u will be able to look directly at the
120 # commitctx object instead of having to resolve it by going through the
120 # commitctx object instead of having to resolve it by going through the
121 # manifest. In a lazy-manifest world this can prevent us from downloading a
121 # manifest. In a lazy-manifest world this can prevent us from downloading a
122 # lot of data.
122 # lot of data.
123 if adjustctx is None:
123 if adjustctx is None:
124 # ctx.rev() is None means it's the working copy, which is a special
124 # ctx.rev() is None means it's the working copy, which is a special
125 # case.
125 # case.
126 if ctx.rev() is None:
126 if ctx.rev() is None:
127 fctx = ctx[path]
127 fctx = ctx[path]
128 else:
128 else:
129 fctx = repo.filectx(path, changeid=ctx.rev())
129 fctx = repo.filectx(path, changeid=ctx.rev())
130 else:
130 else:
131 fctx = ctx[path]
131 fctx = ctx[path]
132 if adjustctx == b'linkrev':
132 if adjustctx == b'linkrev':
133 introrev = fctx.linkrev()
133 introrev = fctx.linkrev()
134 else:
134 else:
135 introrev = fctx.introrev()
135 introrev = fctx.introrev()
136 if introrev != ctx.rev():
136 if introrev != ctx.rev():
137 fctx._changeid = introrev
137 fctx._changeid = introrev
138 fctx._changectx = repo[introrev]
138 fctx._changectx = repo[introrev]
139 return fctx
139 return fctx
140
140
141
141
142 # like mercurial.store.encodedir, but use linelog suffixes: .m, .l, .lock
142 # like mercurial.store.encodedir, but use linelog suffixes: .m, .l, .lock
143 def encodedir(path):
143 def encodedir(path):
144 return (
144 return (
145 path.replace(b'.hg/', b'.hg.hg/')
145 path.replace(b'.hg/', b'.hg.hg/')
146 .replace(b'.l/', b'.l.hg/')
146 .replace(b'.l/', b'.l.hg/')
147 .replace(b'.m/', b'.m.hg/')
147 .replace(b'.m/', b'.m.hg/')
148 .replace(b'.lock/', b'.lock.hg/')
148 .replace(b'.lock/', b'.lock.hg/')
149 )
149 )
150
150
151
151
152 def hashdiffopts(diffopts):
152 def hashdiffopts(diffopts):
153 diffoptstr = stringutil.pprint(
153 diffoptstr = stringutil.pprint(
154 sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults)
154 sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults)
155 )
155 )
156 return hex(hashutil.sha1(diffoptstr).digest())[:6]
156 return hex(hashutil.sha1(diffoptstr).digest())[:6]
157
157
158
158
159 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
159 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
160
160
161
161
162 class annotateopts(object):
162 class annotateopts(object):
163 """like mercurial.mdiff.diffopts, but is for annotate
163 """like mercurial.mdiff.diffopts, but is for annotate
164
164
165 followrename: follow renames, like "hg annotate -f"
165 followrename: follow renames, like "hg annotate -f"
166 followmerge: follow p2 of a merge changeset, otherwise p2 is ignored
166 followmerge: follow p2 of a merge changeset, otherwise p2 is ignored
167 """
167 """
168
168
169 defaults = {
169 defaults = {
170 b'diffopts': None,
170 b'diffopts': None,
171 b'followrename': True,
171 b'followrename': True,
172 b'followmerge': True,
172 b'followmerge': True,
173 }
173 }
174
174
175 def __init__(self, **opts):
175 def __init__(self, **opts):
176 opts = pycompat.byteskwargs(opts)
176 opts = pycompat.byteskwargs(opts)
177 for k, v in pycompat.iteritems(self.defaults):
177 for k, v in self.defaults.items():
178 setattr(self, k, opts.get(k, v))
178 setattr(self, k, opts.get(k, v))
179
179
180 @util.propertycache
180 @util.propertycache
181 def shortstr(self):
181 def shortstr(self):
182 """represent opts in a short string, suitable for a directory name"""
182 """represent opts in a short string, suitable for a directory name"""
183 result = b''
183 result = b''
184 if not self.followrename:
184 if not self.followrename:
185 result += b'r0'
185 result += b'r0'
186 if not self.followmerge:
186 if not self.followmerge:
187 result += b'm0'
187 result += b'm0'
188 if self.diffopts is not None:
188 if self.diffopts is not None:
189 assert isinstance(self.diffopts, mdiff.diffopts)
189 assert isinstance(self.diffopts, mdiff.diffopts)
190 diffopthash = hashdiffopts(self.diffopts)
190 diffopthash = hashdiffopts(self.diffopts)
191 if diffopthash != _defaultdiffopthash:
191 if diffopthash != _defaultdiffopthash:
192 result += b'i' + diffopthash
192 result += b'i' + diffopthash
193 return result or b'default'
193 return result or b'default'
194
194
195
195
196 defaultopts = annotateopts()
196 defaultopts = annotateopts()
197
197
198
198
199 class _annotatecontext(object):
199 class _annotatecontext(object):
200 """do not use this class directly as it does not use lock to protect
200 """do not use this class directly as it does not use lock to protect
201 writes. use "with annotatecontext(...)" instead.
201 writes. use "with annotatecontext(...)" instead.
202 """
202 """
203
203
204 def __init__(self, repo, path, linelogpath, revmappath, opts):
204 def __init__(self, repo, path, linelogpath, revmappath, opts):
205 self.repo = repo
205 self.repo = repo
206 self.ui = repo.ui
206 self.ui = repo.ui
207 self.path = path
207 self.path = path
208 self.opts = opts
208 self.opts = opts
209 self.linelogpath = linelogpath
209 self.linelogpath = linelogpath
210 self.revmappath = revmappath
210 self.revmappath = revmappath
211 self._linelog = None
211 self._linelog = None
212 self._revmap = None
212 self._revmap = None
213 self._node2path = {} # {str: str}
213 self._node2path = {} # {str: str}
214
214
215 @property
215 @property
216 def linelog(self):
216 def linelog(self):
217 if self._linelog is None:
217 if self._linelog is None:
218 if os.path.exists(self.linelogpath):
218 if os.path.exists(self.linelogpath):
219 with open(self.linelogpath, b'rb') as f:
219 with open(self.linelogpath, b'rb') as f:
220 try:
220 try:
221 self._linelog = linelogmod.linelog.fromdata(f.read())
221 self._linelog = linelogmod.linelog.fromdata(f.read())
222 except linelogmod.LineLogError:
222 except linelogmod.LineLogError:
223 self._linelog = linelogmod.linelog()
223 self._linelog = linelogmod.linelog()
224 else:
224 else:
225 self._linelog = linelogmod.linelog()
225 self._linelog = linelogmod.linelog()
226 return self._linelog
226 return self._linelog
227
227
228 @property
228 @property
229 def revmap(self):
229 def revmap(self):
230 if self._revmap is None:
230 if self._revmap is None:
231 self._revmap = revmapmod.revmap(self.revmappath)
231 self._revmap = revmapmod.revmap(self.revmappath)
232 return self._revmap
232 return self._revmap
233
233
234 def close(self):
234 def close(self):
235 if self._revmap is not None:
235 if self._revmap is not None:
236 self._revmap.flush()
236 self._revmap.flush()
237 self._revmap = None
237 self._revmap = None
238 if self._linelog is not None:
238 if self._linelog is not None:
239 with open(self.linelogpath, b'wb') as f:
239 with open(self.linelogpath, b'wb') as f:
240 f.write(self._linelog.encode())
240 f.write(self._linelog.encode())
241 self._linelog = None
241 self._linelog = None
242
242
243 __del__ = close
243 __del__ = close
244
244
245 def rebuild(self):
245 def rebuild(self):
246 """delete linelog and revmap, useful for rebuilding"""
246 """delete linelog and revmap, useful for rebuilding"""
247 self.close()
247 self.close()
248 self._node2path.clear()
248 self._node2path.clear()
249 _unlinkpaths([self.revmappath, self.linelogpath])
249 _unlinkpaths([self.revmappath, self.linelogpath])
250
250
251 @property
251 @property
252 def lastnode(self):
252 def lastnode(self):
253 """return last node in revmap, or None if revmap is empty"""
253 """return last node in revmap, or None if revmap is empty"""
254 if self._revmap is None:
254 if self._revmap is None:
255 # fast path, read revmap without loading its full content
255 # fast path, read revmap without loading its full content
256 return revmapmod.getlastnode(self.revmappath)
256 return revmapmod.getlastnode(self.revmappath)
257 else:
257 else:
258 return self._revmap.rev2hsh(self._revmap.maxrev)
258 return self._revmap.rev2hsh(self._revmap.maxrev)
259
259
260 def isuptodate(self, master, strict=True):
260 def isuptodate(self, master, strict=True):
261 """return True if the revmap / linelog is up-to-date, or the file
261 """return True if the revmap / linelog is up-to-date, or the file
262 does not exist in the master revision. False otherwise.
262 does not exist in the master revision. False otherwise.
263
263
264 it tries to be fast and could return false negatives, because of the
264 it tries to be fast and could return false negatives, because of the
265 use of linkrev instead of introrev.
265 use of linkrev instead of introrev.
266
266
267 useful for both server and client to decide whether to update
267 useful for both server and client to decide whether to update
268 fastannotate cache or not.
268 fastannotate cache or not.
269
269
270 if strict is True, even if fctx exists in the revmap, but is not the
270 if strict is True, even if fctx exists in the revmap, but is not the
271 last node, isuptodate will return False. it's good for performance - no
271 last node, isuptodate will return False. it's good for performance - no
272 expensive check was done.
272 expensive check was done.
273
273
274 if strict is False, if fctx exists in the revmap, this function may
274 if strict is False, if fctx exists in the revmap, this function may
275 return True. this is useful for the client to skip downloading the
275 return True. this is useful for the client to skip downloading the
276 cache if the client's master is behind the server's.
276 cache if the client's master is behind the server's.
277 """
277 """
278 lastnode = self.lastnode
278 lastnode = self.lastnode
279 try:
279 try:
280 f = self._resolvefctx(master, resolverev=True)
280 f = self._resolvefctx(master, resolverev=True)
281 # choose linkrev instead of introrev as the check is meant to be
281 # choose linkrev instead of introrev as the check is meant to be
282 # *fast*.
282 # *fast*.
283 linknode = self.repo.changelog.node(f.linkrev())
283 linknode = self.repo.changelog.node(f.linkrev())
284 if not strict and lastnode and linknode != lastnode:
284 if not strict and lastnode and linknode != lastnode:
285 # check if f.node() is in the revmap. note: this loads the
285 # check if f.node() is in the revmap. note: this loads the
286 # revmap and can be slow.
286 # revmap and can be slow.
287 return self.revmap.hsh2rev(linknode) is not None
287 return self.revmap.hsh2rev(linknode) is not None
288 # avoid resolving old manifest, or slow adjustlinkrev to be fast,
288 # avoid resolving old manifest, or slow adjustlinkrev to be fast,
289 # false negatives are acceptable in this case.
289 # false negatives are acceptable in this case.
290 return linknode == lastnode
290 return linknode == lastnode
291 except LookupError:
291 except LookupError:
292 # master does not have the file, or the revmap is ahead
292 # master does not have the file, or the revmap is ahead
293 return True
293 return True
294
294
295 def annotate(self, rev, master=None, showpath=False, showlines=False):
295 def annotate(self, rev, master=None, showpath=False, showlines=False):
296 """incrementally update the cache so it includes revisions in the main
296 """incrementally update the cache so it includes revisions in the main
297 branch till 'master'. and run annotate on 'rev', which may or may not be
297 branch till 'master'. and run annotate on 'rev', which may or may not be
298 included in the main branch.
298 included in the main branch.
299
299
300 if master is None, do not update linelog.
300 if master is None, do not update linelog.
301
301
302 the first value returned is the annotate result, it is [(node, linenum)]
302 the first value returned is the annotate result, it is [(node, linenum)]
303 by default. [(node, linenum, path)] if showpath is True.
303 by default. [(node, linenum, path)] if showpath is True.
304
304
305 if showlines is True, a second value will be returned, it is a list of
305 if showlines is True, a second value will be returned, it is a list of
306 corresponding line contents.
306 corresponding line contents.
307 """
307 """
308
308
309 # the fast path test requires commit hash, convert rev number to hash,
309 # the fast path test requires commit hash, convert rev number to hash,
310 # so it may hit the fast path. note: in the "fctx" mode, the "annotate"
310 # so it may hit the fast path. note: in the "fctx" mode, the "annotate"
311 # command could give us a revision number even if the user passes a
311 # command could give us a revision number even if the user passes a
312 # commit hash.
312 # commit hash.
313 if isinstance(rev, int):
313 if isinstance(rev, int):
314 rev = hex(self.repo.changelog.node(rev))
314 rev = hex(self.repo.changelog.node(rev))
315
315
316 # fast path: if rev is in the main branch already
316 # fast path: if rev is in the main branch already
317 directly, revfctx = self.canannotatedirectly(rev)
317 directly, revfctx = self.canannotatedirectly(rev)
318 if directly:
318 if directly:
319 if self.ui.debugflag:
319 if self.ui.debugflag:
320 self.ui.debug(
320 self.ui.debug(
321 b'fastannotate: %s: using fast path '
321 b'fastannotate: %s: using fast path '
322 b'(resolved fctx: %s)\n'
322 b'(resolved fctx: %s)\n'
323 % (
323 % (
324 self.path,
324 self.path,
325 stringutil.pprint(util.safehasattr(revfctx, b'node')),
325 stringutil.pprint(util.safehasattr(revfctx, b'node')),
326 )
326 )
327 )
327 )
328 return self.annotatedirectly(revfctx, showpath, showlines)
328 return self.annotatedirectly(revfctx, showpath, showlines)
329
329
330 # resolve master
330 # resolve master
331 masterfctx = None
331 masterfctx = None
332 if master:
332 if master:
333 try:
333 try:
334 masterfctx = self._resolvefctx(
334 masterfctx = self._resolvefctx(
335 master, resolverev=True, adjustctx=True
335 master, resolverev=True, adjustctx=True
336 )
336 )
337 except LookupError: # master does not have the file
337 except LookupError: # master does not have the file
338 pass
338 pass
339 else:
339 else:
340 if masterfctx in self.revmap: # no need to update linelog
340 if masterfctx in self.revmap: # no need to update linelog
341 masterfctx = None
341 masterfctx = None
342
342
343 # ... - @ <- rev (can be an arbitrary changeset,
343 # ... - @ <- rev (can be an arbitrary changeset,
344 # / not necessarily a descendant
344 # / not necessarily a descendant
345 # master -> o of master)
345 # master -> o of master)
346 # |
346 # |
347 # a merge -> o 'o': new changesets in the main branch
347 # a merge -> o 'o': new changesets in the main branch
348 # |\ '#': revisions in the main branch that
348 # |\ '#': revisions in the main branch that
349 # o * exist in linelog / revmap
349 # o * exist in linelog / revmap
350 # | . '*': changesets in side branches, or
350 # | . '*': changesets in side branches, or
351 # last master -> # . descendants of master
351 # last master -> # . descendants of master
352 # | .
352 # | .
353 # # * joint: '#', and is a parent of a '*'
353 # # * joint: '#', and is a parent of a '*'
354 # |/
354 # |/
355 # a joint -> # ^^^^ --- side branches
355 # a joint -> # ^^^^ --- side branches
356 # |
356 # |
357 # ^ --- main branch (in linelog)
357 # ^ --- main branch (in linelog)
358
358
359 # these DFSes are similar to the traditional annotate algorithm.
359 # these DFSes are similar to the traditional annotate algorithm.
360 # we cannot really reuse the code for perf reason.
360 # we cannot really reuse the code for perf reason.
361
361
362 # 1st DFS calculates merges, joint points, and needed.
362 # 1st DFS calculates merges, joint points, and needed.
363 # "needed" is a simple reference counting dict to free items in
363 # "needed" is a simple reference counting dict to free items in
364 # "hist", reducing its memory usage otherwise could be huge.
364 # "hist", reducing its memory usage otherwise could be huge.
365 initvisit = [revfctx]
365 initvisit = [revfctx]
366 if masterfctx:
366 if masterfctx:
367 if masterfctx.rev() is None:
367 if masterfctx.rev() is None:
368 raise error.Abort(
368 raise error.Abort(
369 _(b'cannot update linelog to wdir()'),
369 _(b'cannot update linelog to wdir()'),
370 hint=_(b'set fastannotate.mainbranch'),
370 hint=_(b'set fastannotate.mainbranch'),
371 )
371 )
372 initvisit.append(masterfctx)
372 initvisit.append(masterfctx)
373 visit = initvisit[:]
373 visit = initvisit[:]
374 pcache = {}
374 pcache = {}
375 needed = {revfctx: 1}
375 needed = {revfctx: 1}
376 hist = {} # {fctx: ([(llrev or fctx, linenum)], text)}
376 hist = {} # {fctx: ([(llrev or fctx, linenum)], text)}
377 while visit:
377 while visit:
378 f = visit.pop()
378 f = visit.pop()
379 if f in pcache or f in hist:
379 if f in pcache or f in hist:
380 continue
380 continue
381 if f in self.revmap: # in the old main branch, it's a joint
381 if f in self.revmap: # in the old main branch, it's a joint
382 llrev = self.revmap.hsh2rev(f.node())
382 llrev = self.revmap.hsh2rev(f.node())
383 self.linelog.annotate(llrev)
383 self.linelog.annotate(llrev)
384 result = self.linelog.annotateresult
384 result = self.linelog.annotateresult
385 hist[f] = (result, f.data())
385 hist[f] = (result, f.data())
386 continue
386 continue
387 pl = self._parentfunc(f)
387 pl = self._parentfunc(f)
388 pcache[f] = pl
388 pcache[f] = pl
389 for p in pl:
389 for p in pl:
390 needed[p] = needed.get(p, 0) + 1
390 needed[p] = needed.get(p, 0) + 1
391 if p not in pcache:
391 if p not in pcache:
392 visit.append(p)
392 visit.append(p)
393
393
394 # 2nd (simple) DFS calculates new changesets in the main branch
394 # 2nd (simple) DFS calculates new changesets in the main branch
395 # ('o' nodes in # the above graph), so we know when to update linelog.
395 # ('o' nodes in # the above graph), so we know when to update linelog.
396 newmainbranch = set()
396 newmainbranch = set()
397 f = masterfctx
397 f = masterfctx
398 while f and f not in self.revmap:
398 while f and f not in self.revmap:
399 newmainbranch.add(f)
399 newmainbranch.add(f)
400 pl = pcache[f]
400 pl = pcache[f]
401 if pl:
401 if pl:
402 f = pl[0]
402 f = pl[0]
403 else:
403 else:
404 f = None
404 f = None
405 break
405 break
406
406
407 # f, if present, is the position where the last build stopped at, and
407 # f, if present, is the position where the last build stopped at, and
408 # should be the "master" last time. check to see if we can continue
408 # should be the "master" last time. check to see if we can continue
409 # building the linelog incrementally. (we cannot if diverged)
409 # building the linelog incrementally. (we cannot if diverged)
410 if masterfctx is not None:
410 if masterfctx is not None:
411 self._checklastmasterhead(f)
411 self._checklastmasterhead(f)
412
412
413 if self.ui.debugflag:
413 if self.ui.debugflag:
414 if newmainbranch:
414 if newmainbranch:
415 self.ui.debug(
415 self.ui.debug(
416 b'fastannotate: %s: %d new changesets in the main'
416 b'fastannotate: %s: %d new changesets in the main'
417 b' branch\n' % (self.path, len(newmainbranch))
417 b' branch\n' % (self.path, len(newmainbranch))
418 )
418 )
419 elif not hist: # no joints, no updates
419 elif not hist: # no joints, no updates
420 self.ui.debug(
420 self.ui.debug(
421 b'fastannotate: %s: linelog cannot help in '
421 b'fastannotate: %s: linelog cannot help in '
422 b'annotating this revision\n' % self.path
422 b'annotating this revision\n' % self.path
423 )
423 )
424
424
425 # prepare annotateresult so we can update linelog incrementally
425 # prepare annotateresult so we can update linelog incrementally
426 self.linelog.annotate(self.linelog.maxrev)
426 self.linelog.annotate(self.linelog.maxrev)
427
427
428 # 3rd DFS does the actual annotate
428 # 3rd DFS does the actual annotate
429 visit = initvisit[:]
429 visit = initvisit[:]
430 progress = self.ui.makeprogress(
430 progress = self.ui.makeprogress(
431 b'building cache', total=len(newmainbranch)
431 b'building cache', total=len(newmainbranch)
432 )
432 )
433 while visit:
433 while visit:
434 f = visit[-1]
434 f = visit[-1]
435 if f in hist:
435 if f in hist:
436 visit.pop()
436 visit.pop()
437 continue
437 continue
438
438
439 ready = True
439 ready = True
440 pl = pcache[f]
440 pl = pcache[f]
441 for p in pl:
441 for p in pl:
442 if p not in hist:
442 if p not in hist:
443 ready = False
443 ready = False
444 visit.append(p)
444 visit.append(p)
445 if not ready:
445 if not ready:
446 continue
446 continue
447
447
448 visit.pop()
448 visit.pop()
449 blocks = None # mdiff blocks, used for appending linelog
449 blocks = None # mdiff blocks, used for appending linelog
450 ismainbranch = f in newmainbranch
450 ismainbranch = f in newmainbranch
451 # curr is the same as the traditional annotate algorithm,
451 # curr is the same as the traditional annotate algorithm,
452 # if we only care about linear history (do not follow merge),
452 # if we only care about linear history (do not follow merge),
453 # then curr is not actually used.
453 # then curr is not actually used.
454 assert f not in hist
454 assert f not in hist
455 curr = _decorate(f)
455 curr = _decorate(f)
456 for i, p in enumerate(pl):
456 for i, p in enumerate(pl):
457 bs = list(self._diffblocks(hist[p][1], curr[1]))
457 bs = list(self._diffblocks(hist[p][1], curr[1]))
458 if i == 0 and ismainbranch:
458 if i == 0 and ismainbranch:
459 blocks = bs
459 blocks = bs
460 curr = _pair(hist[p], curr, bs)
460 curr = _pair(hist[p], curr, bs)
461 if needed[p] == 1:
461 if needed[p] == 1:
462 del hist[p]
462 del hist[p]
463 del needed[p]
463 del needed[p]
464 else:
464 else:
465 needed[p] -= 1
465 needed[p] -= 1
466
466
467 hist[f] = curr
467 hist[f] = curr
468 del pcache[f]
468 del pcache[f]
469
469
470 if ismainbranch: # need to write to linelog
470 if ismainbranch: # need to write to linelog
471 progress.increment()
471 progress.increment()
472 bannotated = None
472 bannotated = None
473 if len(pl) == 2 and self.opts.followmerge: # merge
473 if len(pl) == 2 and self.opts.followmerge: # merge
474 bannotated = curr[0]
474 bannotated = curr[0]
475 if blocks is None: # no parents, add an empty one
475 if blocks is None: # no parents, add an empty one
476 blocks = list(self._diffblocks(b'', curr[1]))
476 blocks = list(self._diffblocks(b'', curr[1]))
477 self._appendrev(f, blocks, bannotated)
477 self._appendrev(f, blocks, bannotated)
478 elif showpath: # not append linelog, but we need to record path
478 elif showpath: # not append linelog, but we need to record path
479 self._node2path[f.node()] = f.path()
479 self._node2path[f.node()] = f.path()
480
480
481 progress.complete()
481 progress.complete()
482
482
483 result = [
483 result = [
484 ((self.revmap.rev2hsh(fr) if isinstance(fr, int) else fr.node()), l)
484 ((self.revmap.rev2hsh(fr) if isinstance(fr, int) else fr.node()), l)
485 for fr, l in hist[revfctx][0]
485 for fr, l in hist[revfctx][0]
486 ] # [(node, linenumber)]
486 ] # [(node, linenumber)]
487 return self._refineannotateresult(result, revfctx, showpath, showlines)
487 return self._refineannotateresult(result, revfctx, showpath, showlines)
488
488
489 def canannotatedirectly(self, rev):
489 def canannotatedirectly(self, rev):
490 """(str) -> bool, fctx or node.
490 """(str) -> bool, fctx or node.
491 return (True, f) if we can annotate without updating the linelog, pass
491 return (True, f) if we can annotate without updating the linelog, pass
492 f to annotatedirectly.
492 f to annotatedirectly.
493 return (False, f) if we need extra calculation. f is the fctx resolved
493 return (False, f) if we need extra calculation. f is the fctx resolved
494 from rev.
494 from rev.
495 """
495 """
496 result = True
496 result = True
497 f = None
497 f = None
498 if not isinstance(rev, int) and rev is not None:
498 if not isinstance(rev, int) and rev is not None:
499 hsh = {20: bytes, 40: bin}.get(len(rev), lambda x: None)(rev)
499 hsh = {20: bytes, 40: bin}.get(len(rev), lambda x: None)(rev)
500 if hsh is not None and (hsh, self.path) in self.revmap:
500 if hsh is not None and (hsh, self.path) in self.revmap:
501 f = hsh
501 f = hsh
502 if f is None:
502 if f is None:
503 adjustctx = b'linkrev' if self._perfhack else True
503 adjustctx = b'linkrev' if self._perfhack else True
504 f = self._resolvefctx(rev, adjustctx=adjustctx, resolverev=True)
504 f = self._resolvefctx(rev, adjustctx=adjustctx, resolverev=True)
505 result = f in self.revmap
505 result = f in self.revmap
506 if not result and self._perfhack:
506 if not result and self._perfhack:
507 # redo the resolution without perfhack - as we are going to
507 # redo the resolution without perfhack - as we are going to
508 # do write operations, we need a correct fctx.
508 # do write operations, we need a correct fctx.
509 f = self._resolvefctx(rev, adjustctx=True, resolverev=True)
509 f = self._resolvefctx(rev, adjustctx=True, resolverev=True)
510 return result, f
510 return result, f
511
511
512 def annotatealllines(self, rev, showpath=False, showlines=False):
512 def annotatealllines(self, rev, showpath=False, showlines=False):
513 """(rev : str) -> [(node : str, linenum : int, path : str)]
513 """(rev : str) -> [(node : str, linenum : int, path : str)]
514
514
515 the result has the same format with annotate, but include all (including
515 the result has the same format with annotate, but include all (including
516 deleted) lines up to rev. call this after calling annotate(rev, ...) for
516 deleted) lines up to rev. call this after calling annotate(rev, ...) for
517 better performance and accuracy.
517 better performance and accuracy.
518 """
518 """
519 revfctx = self._resolvefctx(rev, resolverev=True, adjustctx=True)
519 revfctx = self._resolvefctx(rev, resolverev=True, adjustctx=True)
520
520
521 # find a chain from rev to anything in the mainbranch
521 # find a chain from rev to anything in the mainbranch
522 if revfctx not in self.revmap:
522 if revfctx not in self.revmap:
523 chain = [revfctx]
523 chain = [revfctx]
524 a = b''
524 a = b''
525 while True:
525 while True:
526 f = chain[-1]
526 f = chain[-1]
527 pl = self._parentfunc(f)
527 pl = self._parentfunc(f)
528 if not pl:
528 if not pl:
529 break
529 break
530 if pl[0] in self.revmap:
530 if pl[0] in self.revmap:
531 a = pl[0].data()
531 a = pl[0].data()
532 break
532 break
533 chain.append(pl[0])
533 chain.append(pl[0])
534
534
535 # both self.linelog and self.revmap is backed by filesystem. now
535 # both self.linelog and self.revmap is backed by filesystem. now
536 # we want to modify them but do not want to write changes back to
536 # we want to modify them but do not want to write changes back to
537 # files. so we create in-memory objects and copy them. it's like
537 # files. so we create in-memory objects and copy them. it's like
538 # a "fork".
538 # a "fork".
539 linelog = linelogmod.linelog()
539 linelog = linelogmod.linelog()
540 linelog.copyfrom(self.linelog)
540 linelog.copyfrom(self.linelog)
541 linelog.annotate(linelog.maxrev)
541 linelog.annotate(linelog.maxrev)
542 revmap = revmapmod.revmap()
542 revmap = revmapmod.revmap()
543 revmap.copyfrom(self.revmap)
543 revmap.copyfrom(self.revmap)
544
544
545 for f in reversed(chain):
545 for f in reversed(chain):
546 b = f.data()
546 b = f.data()
547 blocks = list(self._diffblocks(a, b))
547 blocks = list(self._diffblocks(a, b))
548 self._doappendrev(linelog, revmap, f, blocks)
548 self._doappendrev(linelog, revmap, f, blocks)
549 a = b
549 a = b
550 else:
550 else:
551 # fastpath: use existing linelog, revmap as we don't write to them
551 # fastpath: use existing linelog, revmap as we don't write to them
552 linelog = self.linelog
552 linelog = self.linelog
553 revmap = self.revmap
553 revmap = self.revmap
554
554
555 lines = linelog.getalllines()
555 lines = linelog.getalllines()
556 hsh = revfctx.node()
556 hsh = revfctx.node()
557 llrev = revmap.hsh2rev(hsh)
557 llrev = revmap.hsh2rev(hsh)
558 result = [(revmap.rev2hsh(r), l) for r, l in lines if r <= llrev]
558 result = [(revmap.rev2hsh(r), l) for r, l in lines if r <= llrev]
559 # cannot use _refineannotateresult since we need custom logic for
559 # cannot use _refineannotateresult since we need custom logic for
560 # resolving line contents
560 # resolving line contents
561 if showpath:
561 if showpath:
562 result = self._addpathtoresult(result, revmap)
562 result = self._addpathtoresult(result, revmap)
563 if showlines:
563 if showlines:
564 linecontents = self._resolvelines(result, revmap, linelog)
564 linecontents = self._resolvelines(result, revmap, linelog)
565 result = (result, linecontents)
565 result = (result, linecontents)
566 return result
566 return result
567
567
568 def _resolvelines(self, annotateresult, revmap, linelog):
568 def _resolvelines(self, annotateresult, revmap, linelog):
569 """(annotateresult) -> [line]. designed for annotatealllines.
569 """(annotateresult) -> [line]. designed for annotatealllines.
570 this is probably the most inefficient code in the whole fastannotate
570 this is probably the most inefficient code in the whole fastannotate
571 directory. but we have made a decision that the linelog does not
571 directory. but we have made a decision that the linelog does not
572 store line contents. so getting them requires random accesses to
572 store line contents. so getting them requires random accesses to
573 the revlog data, since they can be many, it can be very slow.
573 the revlog data, since they can be many, it can be very slow.
574 """
574 """
575 # [llrev]
575 # [llrev]
576 revs = [revmap.hsh2rev(l[0]) for l in annotateresult]
576 revs = [revmap.hsh2rev(l[0]) for l in annotateresult]
577 result = [None] * len(annotateresult)
577 result = [None] * len(annotateresult)
578 # {(rev, linenum): [lineindex]}
578 # {(rev, linenum): [lineindex]}
579 key2idxs = collections.defaultdict(list)
579 key2idxs = collections.defaultdict(list)
580 for i in pycompat.xrange(len(result)):
580 for i in pycompat.xrange(len(result)):
581 key2idxs[(revs[i], annotateresult[i][1])].append(i)
581 key2idxs[(revs[i], annotateresult[i][1])].append(i)
582 while key2idxs:
582 while key2idxs:
583 # find an unresolved line and its linelog rev to annotate
583 # find an unresolved line and its linelog rev to annotate
584 hsh = None
584 hsh = None
585 try:
585 try:
586 for (rev, _linenum), idxs in pycompat.iteritems(key2idxs):
586 for (rev, _linenum), idxs in key2idxs.items():
587 if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
587 if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
588 continue
588 continue
589 hsh = annotateresult[idxs[0]][0]
589 hsh = annotateresult[idxs[0]][0]
590 break
590 break
591 except StopIteration: # no more unresolved lines
591 except StopIteration: # no more unresolved lines
592 return result
592 return result
593 if hsh is None:
593 if hsh is None:
594 # the remaining key2idxs are not in main branch, resolving them
594 # the remaining key2idxs are not in main branch, resolving them
595 # using the hard way...
595 # using the hard way...
596 revlines = {}
596 revlines = {}
597 for (rev, linenum), idxs in pycompat.iteritems(key2idxs):
597 for (rev, linenum), idxs in key2idxs.items():
598 if rev not in revlines:
598 if rev not in revlines:
599 hsh = annotateresult[idxs[0]][0]
599 hsh = annotateresult[idxs[0]][0]
600 if self.ui.debugflag:
600 if self.ui.debugflag:
601 self.ui.debug(
601 self.ui.debug(
602 b'fastannotate: reading %s line #%d '
602 b'fastannotate: reading %s line #%d '
603 b'to resolve lines %r\n'
603 b'to resolve lines %r\n'
604 % (short(hsh), linenum, idxs)
604 % (short(hsh), linenum, idxs)
605 )
605 )
606 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
606 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
607 lines = mdiff.splitnewlines(fctx.data())
607 lines = mdiff.splitnewlines(fctx.data())
608 revlines[rev] = lines
608 revlines[rev] = lines
609 for idx in idxs:
609 for idx in idxs:
610 result[idx] = revlines[rev][linenum]
610 result[idx] = revlines[rev][linenum]
611 assert all(x is not None for x in result)
611 assert all(x is not None for x in result)
612 return result
612 return result
613
613
614 # run the annotate and the lines should match to the file content
614 # run the annotate and the lines should match to the file content
615 self.ui.debug(
615 self.ui.debug(
616 b'fastannotate: annotate %s to resolve lines\n' % short(hsh)
616 b'fastannotate: annotate %s to resolve lines\n' % short(hsh)
617 )
617 )
618 linelog.annotate(rev)
618 linelog.annotate(rev)
619 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
619 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
620 annotated = linelog.annotateresult
620 annotated = linelog.annotateresult
621 lines = mdiff.splitnewlines(fctx.data())
621 lines = mdiff.splitnewlines(fctx.data())
622 if len(lines) != len(annotated):
622 if len(lines) != len(annotated):
623 raise faerror.CorruptedFileError(b'unexpected annotated lines')
623 raise faerror.CorruptedFileError(b'unexpected annotated lines')
624 # resolve lines from the annotate result
624 # resolve lines from the annotate result
625 for i, line in enumerate(lines):
625 for i, line in enumerate(lines):
626 k = annotated[i]
626 k = annotated[i]
627 if k in key2idxs:
627 if k in key2idxs:
628 for idx in key2idxs[k]:
628 for idx in key2idxs[k]:
629 result[idx] = line
629 result[idx] = line
630 del key2idxs[k]
630 del key2idxs[k]
631 return result
631 return result
632
632
633 def annotatedirectly(self, f, showpath, showlines):
633 def annotatedirectly(self, f, showpath, showlines):
634 """like annotate, but when we know that f is in linelog.
634 """like annotate, but when we know that f is in linelog.
635 f can be either a 20-char str (node) or a fctx. this is for perf - in
635 f can be either a 20-char str (node) or a fctx. this is for perf - in
636 the best case, the user provides a node and we don't need to read the
636 the best case, the user provides a node and we don't need to read the
637 filelog or construct any filecontext.
637 filelog or construct any filecontext.
638 """
638 """
639 if isinstance(f, bytes):
639 if isinstance(f, bytes):
640 hsh = f
640 hsh = f
641 else:
641 else:
642 hsh = f.node()
642 hsh = f.node()
643 llrev = self.revmap.hsh2rev(hsh)
643 llrev = self.revmap.hsh2rev(hsh)
644 if not llrev:
644 if not llrev:
645 raise faerror.CorruptedFileError(b'%s is not in revmap' % hex(hsh))
645 raise faerror.CorruptedFileError(b'%s is not in revmap' % hex(hsh))
646 if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
646 if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
647 raise faerror.CorruptedFileError(
647 raise faerror.CorruptedFileError(
648 b'%s is not in revmap mainbranch' % hex(hsh)
648 b'%s is not in revmap mainbranch' % hex(hsh)
649 )
649 )
650 self.linelog.annotate(llrev)
650 self.linelog.annotate(llrev)
651 result = [
651 result = [
652 (self.revmap.rev2hsh(r), l) for r, l in self.linelog.annotateresult
652 (self.revmap.rev2hsh(r), l) for r, l in self.linelog.annotateresult
653 ]
653 ]
654 return self._refineannotateresult(result, f, showpath, showlines)
654 return self._refineannotateresult(result, f, showpath, showlines)
655
655
656 def _refineannotateresult(self, result, f, showpath, showlines):
656 def _refineannotateresult(self, result, f, showpath, showlines):
657 """add the missing path or line contents, they can be expensive.
657 """add the missing path or line contents, they can be expensive.
658 f could be either node or fctx.
658 f could be either node or fctx.
659 """
659 """
660 if showpath:
660 if showpath:
661 result = self._addpathtoresult(result)
661 result = self._addpathtoresult(result)
662 if showlines:
662 if showlines:
663 if isinstance(f, bytes): # f: node or fctx
663 if isinstance(f, bytes): # f: node or fctx
664 llrev = self.revmap.hsh2rev(f)
664 llrev = self.revmap.hsh2rev(f)
665 fctx = self._resolvefctx(f, self.revmap.rev2path(llrev))
665 fctx = self._resolvefctx(f, self.revmap.rev2path(llrev))
666 else:
666 else:
667 fctx = f
667 fctx = f
668 lines = mdiff.splitnewlines(fctx.data())
668 lines = mdiff.splitnewlines(fctx.data())
669 if len(lines) != len(result): # linelog is probably corrupted
669 if len(lines) != len(result): # linelog is probably corrupted
670 raise faerror.CorruptedFileError()
670 raise faerror.CorruptedFileError()
671 result = (result, lines)
671 result = (result, lines)
672 return result
672 return result
673
673
674 def _appendrev(self, fctx, blocks, bannotated=None):
674 def _appendrev(self, fctx, blocks, bannotated=None):
675 self._doappendrev(self.linelog, self.revmap, fctx, blocks, bannotated)
675 self._doappendrev(self.linelog, self.revmap, fctx, blocks, bannotated)
676
676
677 def _diffblocks(self, a, b):
677 def _diffblocks(self, a, b):
678 return mdiff.allblocks(a, b, self.opts.diffopts)
678 return mdiff.allblocks(a, b, self.opts.diffopts)
679
679
680 @staticmethod
680 @staticmethod
681 def _doappendrev(linelog, revmap, fctx, blocks, bannotated=None):
681 def _doappendrev(linelog, revmap, fctx, blocks, bannotated=None):
682 """append a revision to linelog and revmap"""
682 """append a revision to linelog and revmap"""
683
683
684 def getllrev(f):
684 def getllrev(f):
685 """(fctx) -> int"""
685 """(fctx) -> int"""
686 # f should not be a linelog revision
686 # f should not be a linelog revision
687 if isinstance(f, int):
687 if isinstance(f, int):
688 raise error.ProgrammingError(b'f should not be an int')
688 raise error.ProgrammingError(b'f should not be an int')
689 # f is a fctx, allocate linelog rev on demand
689 # f is a fctx, allocate linelog rev on demand
690 hsh = f.node()
690 hsh = f.node()
691 rev = revmap.hsh2rev(hsh)
691 rev = revmap.hsh2rev(hsh)
692 if rev is None:
692 if rev is None:
693 rev = revmap.append(hsh, sidebranch=True, path=f.path())
693 rev = revmap.append(hsh, sidebranch=True, path=f.path())
694 return rev
694 return rev
695
695
696 # append sidebranch revisions to revmap
696 # append sidebranch revisions to revmap
697 siderevs = []
697 siderevs = []
698 siderevmap = {} # node: int
698 siderevmap = {} # node: int
699 if bannotated is not None:
699 if bannotated is not None:
700 for (a1, a2, b1, b2), op in blocks:
700 for (a1, a2, b1, b2), op in blocks:
701 if op != b'=':
701 if op != b'=':
702 # f could be either linelong rev, or fctx.
702 # f could be either linelong rev, or fctx.
703 siderevs += [
703 siderevs += [
704 f
704 f
705 for f, l in bannotated[b1:b2]
705 for f, l in bannotated[b1:b2]
706 if not isinstance(f, int)
706 if not isinstance(f, int)
707 ]
707 ]
708 siderevs = set(siderevs)
708 siderevs = set(siderevs)
709 if fctx in siderevs: # mainnode must be appended seperately
709 if fctx in siderevs: # mainnode must be appended seperately
710 siderevs.remove(fctx)
710 siderevs.remove(fctx)
711 for f in siderevs:
711 for f in siderevs:
712 siderevmap[f] = getllrev(f)
712 siderevmap[f] = getllrev(f)
713
713
714 # the changeset in the main branch, could be a merge
714 # the changeset in the main branch, could be a merge
715 llrev = revmap.append(fctx.node(), path=fctx.path())
715 llrev = revmap.append(fctx.node(), path=fctx.path())
716 siderevmap[fctx] = llrev
716 siderevmap[fctx] = llrev
717
717
718 for (a1, a2, b1, b2), op in reversed(blocks):
718 for (a1, a2, b1, b2), op in reversed(blocks):
719 if op == b'=':
719 if op == b'=':
720 continue
720 continue
721 if bannotated is None:
721 if bannotated is None:
722 linelog.replacelines(llrev, a1, a2, b1, b2)
722 linelog.replacelines(llrev, a1, a2, b1, b2)
723 else:
723 else:
724 blines = [
724 blines = [
725 ((r if isinstance(r, int) else siderevmap[r]), l)
725 ((r if isinstance(r, int) else siderevmap[r]), l)
726 for r, l in bannotated[b1:b2]
726 for r, l in bannotated[b1:b2]
727 ]
727 ]
728 linelog.replacelines_vec(llrev, a1, a2, blines)
728 linelog.replacelines_vec(llrev, a1, a2, blines)
729
729
730 def _addpathtoresult(self, annotateresult, revmap=None):
730 def _addpathtoresult(self, annotateresult, revmap=None):
731 """(revmap, [(node, linenum)]) -> [(node, linenum, path)]"""
731 """(revmap, [(node, linenum)]) -> [(node, linenum, path)]"""
732 if revmap is None:
732 if revmap is None:
733 revmap = self.revmap
733 revmap = self.revmap
734
734
735 def _getpath(nodeid):
735 def _getpath(nodeid):
736 path = self._node2path.get(nodeid)
736 path = self._node2path.get(nodeid)
737 if path is None:
737 if path is None:
738 path = revmap.rev2path(revmap.hsh2rev(nodeid))
738 path = revmap.rev2path(revmap.hsh2rev(nodeid))
739 self._node2path[nodeid] = path
739 self._node2path[nodeid] = path
740 return path
740 return path
741
741
742 return [(n, l, _getpath(n)) for n, l in annotateresult]
742 return [(n, l, _getpath(n)) for n, l in annotateresult]
743
743
744 def _checklastmasterhead(self, fctx):
744 def _checklastmasterhead(self, fctx):
745 """check if fctx is the master's head last time, raise if not"""
745 """check if fctx is the master's head last time, raise if not"""
746 if fctx is None:
746 if fctx is None:
747 llrev = 0
747 llrev = 0
748 else:
748 else:
749 llrev = self.revmap.hsh2rev(fctx.node())
749 llrev = self.revmap.hsh2rev(fctx.node())
750 if not llrev:
750 if not llrev:
751 raise faerror.CannotReuseError()
751 raise faerror.CannotReuseError()
752 if self.linelog.maxrev != llrev:
752 if self.linelog.maxrev != llrev:
753 raise faerror.CannotReuseError()
753 raise faerror.CannotReuseError()
754
754
755 @util.propertycache
755 @util.propertycache
756 def _parentfunc(self):
756 def _parentfunc(self):
757 """-> (fctx) -> [fctx]"""
757 """-> (fctx) -> [fctx]"""
758 followrename = self.opts.followrename
758 followrename = self.opts.followrename
759 followmerge = self.opts.followmerge
759 followmerge = self.opts.followmerge
760
760
761 def parents(f):
761 def parents(f):
762 pl = _parents(f, follow=followrename)
762 pl = _parents(f, follow=followrename)
763 if not followmerge:
763 if not followmerge:
764 pl = pl[:1]
764 pl = pl[:1]
765 return pl
765 return pl
766
766
767 return parents
767 return parents
768
768
769 @util.propertycache
769 @util.propertycache
770 def _perfhack(self):
770 def _perfhack(self):
771 return self.ui.configbool(b'fastannotate', b'perfhack')
771 return self.ui.configbool(b'fastannotate', b'perfhack')
772
772
773 def _resolvefctx(self, rev, path=None, **kwds):
773 def _resolvefctx(self, rev, path=None, **kwds):
774 return resolvefctx(self.repo, rev, (path or self.path), **kwds)
774 return resolvefctx(self.repo, rev, (path or self.path), **kwds)
775
775
776
776
777 def _unlinkpaths(paths):
777 def _unlinkpaths(paths):
778 """silent, best-effort unlink"""
778 """silent, best-effort unlink"""
779 for path in paths:
779 for path in paths:
780 try:
780 try:
781 util.unlink(path)
781 util.unlink(path)
782 except OSError:
782 except OSError:
783 pass
783 pass
784
784
785
785
786 class pathhelper(object):
786 class pathhelper(object):
787 """helper for getting paths for lockfile, linelog and revmap"""
787 """helper for getting paths for lockfile, linelog and revmap"""
788
788
789 def __init__(self, repo, path, opts=defaultopts):
789 def __init__(self, repo, path, opts=defaultopts):
790 # different options use different directories
790 # different options use different directories
791 self._vfspath = os.path.join(
791 self._vfspath = os.path.join(
792 b'fastannotate', opts.shortstr, encodedir(path)
792 b'fastannotate', opts.shortstr, encodedir(path)
793 )
793 )
794 self._repo = repo
794 self._repo = repo
795
795
796 @property
796 @property
797 def dirname(self):
797 def dirname(self):
798 return os.path.dirname(self._repo.vfs.join(self._vfspath))
798 return os.path.dirname(self._repo.vfs.join(self._vfspath))
799
799
800 @property
800 @property
801 def linelogpath(self):
801 def linelogpath(self):
802 return self._repo.vfs.join(self._vfspath + b'.l')
802 return self._repo.vfs.join(self._vfspath + b'.l')
803
803
804 def lock(self):
804 def lock(self):
805 return lockmod.lock(self._repo.vfs, self._vfspath + b'.lock')
805 return lockmod.lock(self._repo.vfs, self._vfspath + b'.lock')
806
806
807 @property
807 @property
808 def revmappath(self):
808 def revmappath(self):
809 return self._repo.vfs.join(self._vfspath + b'.m')
809 return self._repo.vfs.join(self._vfspath + b'.m')
810
810
811
811
812 @contextlib.contextmanager
812 @contextlib.contextmanager
813 def annotatecontext(repo, path, opts=defaultopts, rebuild=False):
813 def annotatecontext(repo, path, opts=defaultopts, rebuild=False):
814 """context needed to perform (fast) annotate on a file
814 """context needed to perform (fast) annotate on a file
815
815
816 an annotatecontext of a single file consists of two structures: the
816 an annotatecontext of a single file consists of two structures: the
817 linelog and the revmap. this function takes care of locking. only 1
817 linelog and the revmap. this function takes care of locking. only 1
818 process is allowed to write that file's linelog and revmap at a time.
818 process is allowed to write that file's linelog and revmap at a time.
819
819
820 when something goes wrong, this function will assume the linelog and the
820 when something goes wrong, this function will assume the linelog and the
821 revmap are in a bad state, and remove them from disk.
821 revmap are in a bad state, and remove them from disk.
822
822
823 use this function in the following way:
823 use this function in the following way:
824
824
825 with annotatecontext(...) as actx:
825 with annotatecontext(...) as actx:
826 actx. ....
826 actx. ....
827 """
827 """
828 helper = pathhelper(repo, path, opts)
828 helper = pathhelper(repo, path, opts)
829 util.makedirs(helper.dirname)
829 util.makedirs(helper.dirname)
830 revmappath = helper.revmappath
830 revmappath = helper.revmappath
831 linelogpath = helper.linelogpath
831 linelogpath = helper.linelogpath
832 actx = None
832 actx = None
833 try:
833 try:
834 with helper.lock():
834 with helper.lock():
835 actx = _annotatecontext(repo, path, linelogpath, revmappath, opts)
835 actx = _annotatecontext(repo, path, linelogpath, revmappath, opts)
836 if rebuild:
836 if rebuild:
837 actx.rebuild()
837 actx.rebuild()
838 yield actx
838 yield actx
839 except Exception:
839 except Exception:
840 if actx is not None:
840 if actx is not None:
841 actx.rebuild()
841 actx.rebuild()
842 repo.ui.debug(b'fastannotate: %s: cache broken and deleted\n' % path)
842 repo.ui.debug(b'fastannotate: %s: cache broken and deleted\n' % path)
843 raise
843 raise
844 finally:
844 finally:
845 if actx is not None:
845 if actx is not None:
846 actx.close()
846 actx.close()
847
847
848
848
849 def fctxannotatecontext(fctx, follow=True, diffopts=None, rebuild=False):
849 def fctxannotatecontext(fctx, follow=True, diffopts=None, rebuild=False):
850 """like annotatecontext but get the context from a fctx. convenient when
850 """like annotatecontext but get the context from a fctx. convenient when
851 used in fctx.annotate
851 used in fctx.annotate
852 """
852 """
853 repo = fctx._repo
853 repo = fctx._repo
854 path = fctx._path
854 path = fctx._path
855 if repo.ui.configbool(b'fastannotate', b'forcefollow', True):
855 if repo.ui.configbool(b'fastannotate', b'forcefollow', True):
856 follow = True
856 follow = True
857 aopts = annotateopts(diffopts=diffopts, followrename=follow)
857 aopts = annotateopts(diffopts=diffopts, followrename=follow)
858 return annotatecontext(repo, path, aopts, rebuild)
858 return annotatecontext(repo, path, aopts, rebuild)
@@ -1,260 +1,259 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # protocol: logic for a server providing fastannotate support
3 # protocol: logic for a server providing fastannotate support
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import contextlib
8 import contextlib
9 import os
9 import os
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial.pycompat import open
12 from mercurial.pycompat import open
13 from mercurial import (
13 from mercurial import (
14 error,
14 error,
15 extensions,
15 extensions,
16 hg,
16 hg,
17 pycompat,
18 util,
17 util,
19 wireprotov1peer,
18 wireprotov1peer,
20 wireprotov1server,
19 wireprotov1server,
21 )
20 )
22 from mercurial.utils import (
21 from mercurial.utils import (
23 urlutil,
22 urlutil,
24 )
23 )
25 from . import context
24 from . import context
26
25
27 # common
26 # common
28
27
29
28
30 def _getmaster(ui):
29 def _getmaster(ui):
31 """get the mainbranch, and enforce it is set"""
30 """get the mainbranch, and enforce it is set"""
32 master = ui.config(b'fastannotate', b'mainbranch')
31 master = ui.config(b'fastannotate', b'mainbranch')
33 if not master:
32 if not master:
34 raise error.Abort(
33 raise error.Abort(
35 _(
34 _(
36 b'fastannotate.mainbranch is required '
35 b'fastannotate.mainbranch is required '
37 b'for both the client and the server'
36 b'for both the client and the server'
38 )
37 )
39 )
38 )
40 return master
39 return master
41
40
42
41
43 # server-side
42 # server-side
44
43
45
44
46 def _capabilities(orig, repo, proto):
45 def _capabilities(orig, repo, proto):
47 result = orig(repo, proto)
46 result = orig(repo, proto)
48 result.append(b'getannotate')
47 result.append(b'getannotate')
49 return result
48 return result
50
49
51
50
52 def _getannotate(repo, proto, path, lastnode):
51 def _getannotate(repo, proto, path, lastnode):
53 # output:
52 # output:
54 # FILE := vfspath + '\0' + str(size) + '\0' + content
53 # FILE := vfspath + '\0' + str(size) + '\0' + content
55 # OUTPUT := '' | FILE + OUTPUT
54 # OUTPUT := '' | FILE + OUTPUT
56 result = b''
55 result = b''
57 buildondemand = repo.ui.configbool(
56 buildondemand = repo.ui.configbool(
58 b'fastannotate', b'serverbuildondemand', True
57 b'fastannotate', b'serverbuildondemand', True
59 )
58 )
60 with context.annotatecontext(repo, path) as actx:
59 with context.annotatecontext(repo, path) as actx:
61 if buildondemand:
60 if buildondemand:
62 # update before responding to the client
61 # update before responding to the client
63 master = _getmaster(repo.ui)
62 master = _getmaster(repo.ui)
64 try:
63 try:
65 if not actx.isuptodate(master):
64 if not actx.isuptodate(master):
66 actx.annotate(master, master)
65 actx.annotate(master, master)
67 except Exception:
66 except Exception:
68 # non-fast-forward move or corrupted. rebuild automically.
67 # non-fast-forward move or corrupted. rebuild automically.
69 actx.rebuild()
68 actx.rebuild()
70 try:
69 try:
71 actx.annotate(master, master)
70 actx.annotate(master, master)
72 except Exception:
71 except Exception:
73 actx.rebuild() # delete files
72 actx.rebuild() # delete files
74 finally:
73 finally:
75 # although the "with" context will also do a close/flush, we
74 # although the "with" context will also do a close/flush, we
76 # need to do it early so we can send the correct respond to
75 # need to do it early so we can send the correct respond to
77 # client.
76 # client.
78 actx.close()
77 actx.close()
79 # send back the full content of revmap and linelog, in the future we
78 # send back the full content of revmap and linelog, in the future we
80 # may want to do some rsync-like fancy updating.
79 # may want to do some rsync-like fancy updating.
81 # the lastnode check is not necessary if the client and the server
80 # the lastnode check is not necessary if the client and the server
82 # agree where the main branch is.
81 # agree where the main branch is.
83 if actx.lastnode != lastnode:
82 if actx.lastnode != lastnode:
84 for p in [actx.revmappath, actx.linelogpath]:
83 for p in [actx.revmappath, actx.linelogpath]:
85 if not os.path.exists(p):
84 if not os.path.exists(p):
86 continue
85 continue
87 with open(p, b'rb') as f:
86 with open(p, b'rb') as f:
88 content = f.read()
87 content = f.read()
89 vfsbaselen = len(repo.vfs.base + b'/')
88 vfsbaselen = len(repo.vfs.base + b'/')
90 relpath = p[vfsbaselen:]
89 relpath = p[vfsbaselen:]
91 result += b'%s\0%d\0%s' % (relpath, len(content), content)
90 result += b'%s\0%d\0%s' % (relpath, len(content), content)
92 return result
91 return result
93
92
94
93
95 def _registerwireprotocommand():
94 def _registerwireprotocommand():
96 if b'getannotate' in wireprotov1server.commands:
95 if b'getannotate' in wireprotov1server.commands:
97 return
96 return
98 wireprotov1server.wireprotocommand(b'getannotate', b'path lastnode')(
97 wireprotov1server.wireprotocommand(b'getannotate', b'path lastnode')(
99 _getannotate
98 _getannotate
100 )
99 )
101
100
102
101
103 def serveruisetup(ui):
102 def serveruisetup(ui):
104 _registerwireprotocommand()
103 _registerwireprotocommand()
105 extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
104 extensions.wrapfunction(wireprotov1server, b'_capabilities', _capabilities)
106
105
107
106
108 # client-side
107 # client-side
109
108
110
109
111 def _parseresponse(payload):
110 def _parseresponse(payload):
112 result = {}
111 result = {}
113 i = 0
112 i = 0
114 l = len(payload) - 1
113 l = len(payload) - 1
115 state = 0 # 0: vfspath, 1: size
114 state = 0 # 0: vfspath, 1: size
116 vfspath = size = b''
115 vfspath = size = b''
117 while i < l:
116 while i < l:
118 ch = payload[i : i + 1]
117 ch = payload[i : i + 1]
119 if ch == b'\0':
118 if ch == b'\0':
120 if state == 1:
119 if state == 1:
121 result[vfspath] = payload[i + 1 : i + 1 + int(size)]
120 result[vfspath] = payload[i + 1 : i + 1 + int(size)]
122 i += int(size)
121 i += int(size)
123 state = 0
122 state = 0
124 vfspath = size = b''
123 vfspath = size = b''
125 elif state == 0:
124 elif state == 0:
126 state = 1
125 state = 1
127 else:
126 else:
128 if state == 1:
127 if state == 1:
129 size += ch
128 size += ch
130 elif state == 0:
129 elif state == 0:
131 vfspath += ch
130 vfspath += ch
132 i += 1
131 i += 1
133 return result
132 return result
134
133
135
134
136 def peersetup(ui, peer):
135 def peersetup(ui, peer):
137 class fastannotatepeer(peer.__class__):
136 class fastannotatepeer(peer.__class__):
138 @wireprotov1peer.batchable
137 @wireprotov1peer.batchable
139 def getannotate(self, path, lastnode=None):
138 def getannotate(self, path, lastnode=None):
140 if not self.capable(b'getannotate'):
139 if not self.capable(b'getannotate'):
141 ui.warn(_(b'remote peer cannot provide annotate cache\n'))
140 ui.warn(_(b'remote peer cannot provide annotate cache\n'))
142 return None, None
141 return None, None
143 else:
142 else:
144 args = {b'path': path, b'lastnode': lastnode or b''}
143 args = {b'path': path, b'lastnode': lastnode or b''}
145 return args, _parseresponse
144 return args, _parseresponse
146
145
147 peer.__class__ = fastannotatepeer
146 peer.__class__ = fastannotatepeer
148
147
149
148
150 @contextlib.contextmanager
149 @contextlib.contextmanager
151 def annotatepeer(repo):
150 def annotatepeer(repo):
152 ui = repo.ui
151 ui = repo.ui
153
152
154 remotedest = ui.config(b'fastannotate', b'remotepath', b'default')
153 remotedest = ui.config(b'fastannotate', b'remotepath', b'default')
155 r = urlutil.get_unique_pull_path(b'fastannotate', repo, ui, remotedest)
154 r = urlutil.get_unique_pull_path(b'fastannotate', repo, ui, remotedest)
156 remotepath = r[0]
155 remotepath = r[0]
157 peer = hg.peer(ui, {}, remotepath)
156 peer = hg.peer(ui, {}, remotepath)
158
157
159 try:
158 try:
160 yield peer
159 yield peer
161 finally:
160 finally:
162 peer.close()
161 peer.close()
163
162
164
163
165 def clientfetch(repo, paths, lastnodemap=None, peer=None):
164 def clientfetch(repo, paths, lastnodemap=None, peer=None):
166 """download annotate cache from the server for paths"""
165 """download annotate cache from the server for paths"""
167 if not paths:
166 if not paths:
168 return
167 return
169
168
170 if peer is None:
169 if peer is None:
171 with annotatepeer(repo) as peer:
170 with annotatepeer(repo) as peer:
172 return clientfetch(repo, paths, lastnodemap, peer)
171 return clientfetch(repo, paths, lastnodemap, peer)
173
172
174 if lastnodemap is None:
173 if lastnodemap is None:
175 lastnodemap = {}
174 lastnodemap = {}
176
175
177 ui = repo.ui
176 ui = repo.ui
178 results = []
177 results = []
179 with peer.commandexecutor() as batcher:
178 with peer.commandexecutor() as batcher:
180 ui.debug(b'fastannotate: requesting %d files\n' % len(paths))
179 ui.debug(b'fastannotate: requesting %d files\n' % len(paths))
181 for p in paths:
180 for p in paths:
182 results.append(
181 results.append(
183 batcher.callcommand(
182 batcher.callcommand(
184 b'getannotate',
183 b'getannotate',
185 {b'path': p, b'lastnode': lastnodemap.get(p)},
184 {b'path': p, b'lastnode': lastnodemap.get(p)},
186 )
185 )
187 )
186 )
188
187
189 for result in results:
188 for result in results:
190 r = result.result()
189 r = result.result()
191 # TODO: pconvert these paths on the server?
190 # TODO: pconvert these paths on the server?
192 r = {util.pconvert(p): v for p, v in pycompat.iteritems(r)}
191 r = {util.pconvert(p): v for p, v in r.items()}
193 for path in sorted(r):
192 for path in sorted(r):
194 # ignore malicious paths
193 # ignore malicious paths
195 if not path.startswith(b'fastannotate/') or b'/../' in (
194 if not path.startswith(b'fastannotate/') or b'/../' in (
196 path + b'/'
195 path + b'/'
197 ):
196 ):
198 ui.debug(
197 ui.debug(
199 b'fastannotate: ignored malicious path %s\n' % path
198 b'fastannotate: ignored malicious path %s\n' % path
200 )
199 )
201 continue
200 continue
202 content = r[path]
201 content = r[path]
203 if ui.debugflag:
202 if ui.debugflag:
204 ui.debug(
203 ui.debug(
205 b'fastannotate: writing %d bytes to %s\n'
204 b'fastannotate: writing %d bytes to %s\n'
206 % (len(content), path)
205 % (len(content), path)
207 )
206 )
208 repo.vfs.makedirs(os.path.dirname(path))
207 repo.vfs.makedirs(os.path.dirname(path))
209 with repo.vfs(path, b'wb') as f:
208 with repo.vfs(path, b'wb') as f:
210 f.write(content)
209 f.write(content)
211
210
212
211
213 def _filterfetchpaths(repo, paths):
212 def _filterfetchpaths(repo, paths):
214 """return a subset of paths whose history is long and need to fetch linelog
213 """return a subset of paths whose history is long and need to fetch linelog
215 from the server. works with remotefilelog and non-remotefilelog repos.
214 from the server. works with remotefilelog and non-remotefilelog repos.
216 """
215 """
217 threshold = repo.ui.configint(b'fastannotate', b'clientfetchthreshold', 10)
216 threshold = repo.ui.configint(b'fastannotate', b'clientfetchthreshold', 10)
218 if threshold <= 0:
217 if threshold <= 0:
219 return paths
218 return paths
220
219
221 result = []
220 result = []
222 for path in paths:
221 for path in paths:
223 try:
222 try:
224 if len(repo.file(path)) >= threshold:
223 if len(repo.file(path)) >= threshold:
225 result.append(path)
224 result.append(path)
226 except Exception: # file not found etc.
225 except Exception: # file not found etc.
227 result.append(path)
226 result.append(path)
228
227
229 return result
228 return result
230
229
231
230
232 def localreposetup(ui, repo):
231 def localreposetup(ui, repo):
233 class fastannotaterepo(repo.__class__):
232 class fastannotaterepo(repo.__class__):
234 def prefetchfastannotate(self, paths, peer=None):
233 def prefetchfastannotate(self, paths, peer=None):
235 master = _getmaster(self.ui)
234 master = _getmaster(self.ui)
236 needupdatepaths = []
235 needupdatepaths = []
237 lastnodemap = {}
236 lastnodemap = {}
238 try:
237 try:
239 for path in _filterfetchpaths(self, paths):
238 for path in _filterfetchpaths(self, paths):
240 with context.annotatecontext(self, path) as actx:
239 with context.annotatecontext(self, path) as actx:
241 if not actx.isuptodate(master, strict=False):
240 if not actx.isuptodate(master, strict=False):
242 needupdatepaths.append(path)
241 needupdatepaths.append(path)
243 lastnodemap[path] = actx.lastnode
242 lastnodemap[path] = actx.lastnode
244 if needupdatepaths:
243 if needupdatepaths:
245 clientfetch(self, needupdatepaths, lastnodemap, peer)
244 clientfetch(self, needupdatepaths, lastnodemap, peer)
246 except Exception as ex:
245 except Exception as ex:
247 # could be directory not writable or so, not fatal
246 # could be directory not writable or so, not fatal
248 self.ui.debug(b'fastannotate: prefetch failed: %r\n' % ex)
247 self.ui.debug(b'fastannotate: prefetch failed: %r\n' % ex)
249
248
250 repo.__class__ = fastannotaterepo
249 repo.__class__ = fastannotaterepo
251
250
252
251
253 def clientreposetup(ui, repo):
252 def clientreposetup(ui, repo):
254 _registerwireprotocommand()
253 _registerwireprotocommand()
255 if repo.local():
254 if repo.local():
256 localreposetup(ui, repo)
255 localreposetup(ui, repo)
257 # TODO: this mutates global state, but only if at least one repo
256 # TODO: this mutates global state, but only if at least one repo
258 # has the extension enabled. This is probably bad for hgweb.
257 # has the extension enabled. This is probably bad for hgweb.
259 if peersetup not in hg.wirepeersetupfuncs:
258 if peersetup not in hg.wirepeersetupfuncs:
260 hg.wirepeersetupfuncs.append(peersetup)
259 hg.wirepeersetupfuncs.append(peersetup)
@@ -1,957 +1,955 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:pattern=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. Any output on standard error
22 fixed file content is expected on standard output. Any output on standard error
23 will be displayed as a warning. If the exit status is not zero, the file will
23 will be displayed as a warning. If the exit status is not zero, the file will
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
25 status but no standard error output. Some values may be substituted into the
26 command::
26 command::
27
27
28 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
29 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
30
30
31 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
32 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
33 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
34 substituted into the command::
34 substituted into the command::
35
35
36 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
37 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
38
38
39 Deleted sections of a file will be ignored by :linerange, because there is no
39 Deleted sections of a file will be ignored by :linerange, because there is no
40 corresponding line range in the version being fixed.
40 corresponding line range in the version being fixed.
41
41
42 By default, tools that set :linerange will only be executed if there is at least
42 By default, tools that set :linerange will only be executed if there is at least
43 one changed line range. This is meant to prevent accidents like running a code
43 one changed line range. This is meant to prevent accidents like running a code
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
44 formatter in such a way that it unexpectedly reformats the whole file. If such a
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
45 tool needs to operate on unchanged files, it should set the :skipclean suboption
46 to false.
46 to false.
47
47
48 The :pattern suboption determines which files will be passed through each
48 The :pattern suboption determines which files will be passed through each
49 configured tool. See :hg:`help patterns` for possible values. However, all
49 configured tool. See :hg:`help patterns` for possible values. However, all
50 patterns are relative to the repo root, even if that text says they are relative
50 patterns are relative to the repo root, even if that text says they are relative
51 to the current working directory. If there are file arguments to :hg:`fix`, the
51 to the current working directory. If there are file arguments to :hg:`fix`, the
52 intersection of these patterns is used.
52 intersection of these patterns is used.
53
53
54 There is also a configurable limit for the maximum size of file that will be
54 There is also a configurable limit for the maximum size of file that will be
55 processed by :hg:`fix`::
55 processed by :hg:`fix`::
56
56
57 [fix]
57 [fix]
58 maxfilesize = 2MB
58 maxfilesize = 2MB
59
59
60 Normally, execution of configured tools will continue after a failure (indicated
60 Normally, execution of configured tools will continue after a failure (indicated
61 by a non-zero exit status). It can also be configured to abort after the first
61 by a non-zero exit status). It can also be configured to abort after the first
62 such failure, so that no files will be affected if any tool fails. This abort
62 such failure, so that no files will be affected if any tool fails. This abort
63 will also cause :hg:`fix` to exit with a non-zero status::
63 will also cause :hg:`fix` to exit with a non-zero status::
64
64
65 [fix]
65 [fix]
66 failure = abort
66 failure = abort
67
67
68 When multiple tools are configured to affect a file, they execute in an order
68 When multiple tools are configured to affect a file, they execute in an order
69 defined by the :priority suboption. The priority suboption has a default value
69 defined by the :priority suboption. The priority suboption has a default value
70 of zero for each tool. Tools are executed in order of descending priority. The
70 of zero for each tool. Tools are executed in order of descending priority. The
71 execution order of tools with equal priority is unspecified. For example, you
71 execution order of tools with equal priority is unspecified. For example, you
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
72 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
73 in a text file by ensuring that 'sort' runs before 'head'::
73 in a text file by ensuring that 'sort' runs before 'head'::
74
74
75 [fix]
75 [fix]
76 sort:command = sort -n
76 sort:command = sort -n
77 head:command = head -n 10
77 head:command = head -n 10
78 sort:pattern = numbers.txt
78 sort:pattern = numbers.txt
79 head:pattern = numbers.txt
79 head:pattern = numbers.txt
80 sort:priority = 2
80 sort:priority = 2
81 head:priority = 1
81 head:priority = 1
82
82
83 To account for changes made by each tool, the line numbers used for incremental
83 To account for changes made by each tool, the line numbers used for incremental
84 formatting are recomputed before executing the next tool. So, each tool may see
84 formatting are recomputed before executing the next tool. So, each tool may see
85 different values for the arguments added by the :linerange suboption.
85 different values for the arguments added by the :linerange suboption.
86
86
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
87 Each fixer tool is allowed to return some metadata in addition to the fixed file
88 content. The metadata must be placed before the file content on stdout,
88 content. The metadata must be placed before the file content on stdout,
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
89 separated from the file content by a zero byte. The metadata is parsed as a JSON
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
90 value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer tool
91 is expected to produce this metadata encoding if and only if the :metadata
91 is expected to produce this metadata encoding if and only if the :metadata
92 suboption is true::
92 suboption is true::
93
93
94 [fix]
94 [fix]
95 tool:command = tool --prepend-json-metadata
95 tool:command = tool --prepend-json-metadata
96 tool:metadata = true
96 tool:metadata = true
97
97
98 The metadata values are passed to hooks, which can be used to print summaries or
98 The metadata values are passed to hooks, which can be used to print summaries or
99 perform other post-fixing work. The supported hooks are::
99 perform other post-fixing work. The supported hooks are::
100
100
101 "postfixfile"
101 "postfixfile"
102 Run once for each file in each revision where any fixer tools made changes
102 Run once for each file in each revision where any fixer tools made changes
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
103 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
104 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
105 tools that affected the file. Fixer tools that didn't affect the file have a
105 tools that affected the file. Fixer tools that didn't affect the file have a
106 value of None. Only fixer tools that executed are present in the metadata.
106 value of None. Only fixer tools that executed are present in the metadata.
107
107
108 "postfix"
108 "postfix"
109 Run once after all files and revisions have been handled. Provides
109 Run once after all files and revisions have been handled. Provides
110 "$HG_REPLACEMENTS" with information about what revisions were created and
110 "$HG_REPLACEMENTS" with information about what revisions were created and
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
111 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
112 files in the working copy were updated. Provides a list "$HG_METADATA"
112 files in the working copy were updated. Provides a list "$HG_METADATA"
113 mapping fixer tool names to lists of metadata values returned from
113 mapping fixer tool names to lists of metadata values returned from
114 executions that modified a file. This aggregates the same metadata
114 executions that modified a file. This aggregates the same metadata
115 previously passed to the "postfixfile" hook.
115 previously passed to the "postfixfile" hook.
116
116
117 Fixer tools are run in the repository's root directory. This allows them to read
117 Fixer tools are run in the repository's root directory. This allows them to read
118 configuration files from the working copy, or even write to the working copy.
118 configuration files from the working copy, or even write to the working copy.
119 The working copy is not updated to match the revision being fixed. In fact,
119 The working copy is not updated to match the revision being fixed. In fact,
120 several revisions may be fixed in parallel. Writes to the working copy are not
120 several revisions may be fixed in parallel. Writes to the working copy are not
121 amended into the revision being fixed; fixer tools should always write fixed
121 amended into the revision being fixed; fixer tools should always write fixed
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125
125
126 import collections
126 import collections
127 import itertools
127 import itertools
128 import os
128 import os
129 import re
129 import re
130 import subprocess
130 import subprocess
131
131
132 from mercurial.i18n import _
132 from mercurial.i18n import _
133 from mercurial.node import (
133 from mercurial.node import (
134 nullid,
134 nullid,
135 nullrev,
135 nullrev,
136 wdirrev,
136 wdirrev,
137 )
137 )
138
138
139 from mercurial.utils import procutil
139 from mercurial.utils import procutil
140
140
141 from mercurial import (
141 from mercurial import (
142 cmdutil,
142 cmdutil,
143 context,
143 context,
144 copies,
144 copies,
145 error,
145 error,
146 logcmdutil,
146 logcmdutil,
147 match as matchmod,
147 match as matchmod,
148 mdiff,
148 mdiff,
149 merge,
149 merge,
150 mergestate as mergestatemod,
150 mergestate as mergestatemod,
151 pycompat,
151 pycompat,
152 registrar,
152 registrar,
153 rewriteutil,
153 rewriteutil,
154 scmutil,
154 scmutil,
155 util,
155 util,
156 worker,
156 worker,
157 )
157 )
158
158
159 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
159 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
160 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
160 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
161 # be specifying the version(s) of Mercurial they are tested with, or
161 # be specifying the version(s) of Mercurial they are tested with, or
162 # leave the attribute unspecified.
162 # leave the attribute unspecified.
163 testedwith = b'ships-with-hg-core'
163 testedwith = b'ships-with-hg-core'
164
164
165 cmdtable = {}
165 cmdtable = {}
166 command = registrar.command(cmdtable)
166 command = registrar.command(cmdtable)
167
167
168 configtable = {}
168 configtable = {}
169 configitem = registrar.configitem(configtable)
169 configitem = registrar.configitem(configtable)
170
170
171 # Register the suboptions allowed for each configured fixer, and default values.
171 # Register the suboptions allowed for each configured fixer, and default values.
172 FIXER_ATTRS = {
172 FIXER_ATTRS = {
173 b'command': None,
173 b'command': None,
174 b'linerange': None,
174 b'linerange': None,
175 b'pattern': None,
175 b'pattern': None,
176 b'priority': 0,
176 b'priority': 0,
177 b'metadata': False,
177 b'metadata': False,
178 b'skipclean': True,
178 b'skipclean': True,
179 b'enabled': True,
179 b'enabled': True,
180 }
180 }
181
181
182 for key, default in FIXER_ATTRS.items():
182 for key, default in FIXER_ATTRS.items():
183 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
183 configitem(b'fix', b'.*:%s$' % key, default=default, generic=True)
184
184
185 # A good default size allows most source code files to be fixed, but avoids
185 # A good default size allows most source code files to be fixed, but avoids
186 # letting fixer tools choke on huge inputs, which could be surprising to the
186 # letting fixer tools choke on huge inputs, which could be surprising to the
187 # user.
187 # user.
188 configitem(b'fix', b'maxfilesize', default=b'2MB')
188 configitem(b'fix', b'maxfilesize', default=b'2MB')
189
189
190 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
190 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
191 # This helps users do shell scripts that stop when a fixer tool signals a
191 # This helps users do shell scripts that stop when a fixer tool signals a
192 # problem.
192 # problem.
193 configitem(b'fix', b'failure', default=b'continue')
193 configitem(b'fix', b'failure', default=b'continue')
194
194
195
195
196 def checktoolfailureaction(ui, message, hint=None):
196 def checktoolfailureaction(ui, message, hint=None):
197 """Abort with 'message' if fix.failure=abort"""
197 """Abort with 'message' if fix.failure=abort"""
198 action = ui.config(b'fix', b'failure')
198 action = ui.config(b'fix', b'failure')
199 if action not in (b'continue', b'abort'):
199 if action not in (b'continue', b'abort'):
200 raise error.Abort(
200 raise error.Abort(
201 _(b'unknown fix.failure action: %s') % (action,),
201 _(b'unknown fix.failure action: %s') % (action,),
202 hint=_(b'use "continue" or "abort"'),
202 hint=_(b'use "continue" or "abort"'),
203 )
203 )
204 if action == b'abort':
204 if action == b'abort':
205 raise error.Abort(message, hint=hint)
205 raise error.Abort(message, hint=hint)
206
206
207
207
208 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
208 allopt = (b'', b'all', False, _(b'fix all non-public non-obsolete revisions'))
209 baseopt = (
209 baseopt = (
210 b'',
210 b'',
211 b'base',
211 b'base',
212 [],
212 [],
213 _(
213 _(
214 b'revisions to diff against (overrides automatic '
214 b'revisions to diff against (overrides automatic '
215 b'selection, and applies to every revision being '
215 b'selection, and applies to every revision being '
216 b'fixed)'
216 b'fixed)'
217 ),
217 ),
218 _(b'REV'),
218 _(b'REV'),
219 )
219 )
220 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
220 revopt = (b'r', b'rev', [], _(b'revisions to fix (ADVANCED)'), _(b'REV'))
221 sourceopt = (
221 sourceopt = (
222 b's',
222 b's',
223 b'source',
223 b'source',
224 [],
224 [],
225 _(b'fix the specified revisions and their descendants'),
225 _(b'fix the specified revisions and their descendants'),
226 _(b'REV'),
226 _(b'REV'),
227 )
227 )
228 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
228 wdiropt = (b'w', b'working-dir', False, _(b'fix the working directory'))
229 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
229 wholeopt = (b'', b'whole', False, _(b'always fix every line of a file'))
230 usage = _(b'[OPTION]... [FILE]...')
230 usage = _(b'[OPTION]... [FILE]...')
231
231
232
232
233 @command(
233 @command(
234 b'fix',
234 b'fix',
235 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
235 [allopt, baseopt, revopt, sourceopt, wdiropt, wholeopt],
236 usage,
236 usage,
237 helpcategory=command.CATEGORY_FILE_CONTENTS,
237 helpcategory=command.CATEGORY_FILE_CONTENTS,
238 )
238 )
239 def fix(ui, repo, *pats, **opts):
239 def fix(ui, repo, *pats, **opts):
240 """rewrite file content in changesets or working directory
240 """rewrite file content in changesets or working directory
241
241
242 Runs any configured tools to fix the content of files. Only affects files
242 Runs any configured tools to fix the content of files. Only affects files
243 with changes, unless file arguments are provided. Only affects changed lines
243 with changes, unless file arguments are provided. Only affects changed lines
244 of files, unless the --whole flag is used. Some tools may always affect the
244 of files, unless the --whole flag is used. Some tools may always affect the
245 whole file regardless of --whole.
245 whole file regardless of --whole.
246
246
247 If --working-dir is used, files with uncommitted changes in the working copy
247 If --working-dir is used, files with uncommitted changes in the working copy
248 will be fixed. Note that no backup are made.
248 will be fixed. Note that no backup are made.
249
249
250 If revisions are specified with --source, those revisions and their
250 If revisions are specified with --source, those revisions and their
251 descendants will be checked, and they may be replaced with new revisions
251 descendants will be checked, and they may be replaced with new revisions
252 that have fixed file content. By automatically including the descendants,
252 that have fixed file content. By automatically including the descendants,
253 no merging, rebasing, or evolution will be required. If an ancestor of the
253 no merging, rebasing, or evolution will be required. If an ancestor of the
254 working copy is included, then the working copy itself will also be fixed,
254 working copy is included, then the working copy itself will also be fixed,
255 and the working copy will be updated to the fixed parent.
255 and the working copy will be updated to the fixed parent.
256
256
257 When determining what lines of each file to fix at each revision, the whole
257 When determining what lines of each file to fix at each revision, the whole
258 set of revisions being fixed is considered, so that fixes to earlier
258 set of revisions being fixed is considered, so that fixes to earlier
259 revisions are not forgotten in later ones. The --base flag can be used to
259 revisions are not forgotten in later ones. The --base flag can be used to
260 override this default behavior, though it is not usually desirable to do so.
260 override this default behavior, though it is not usually desirable to do so.
261 """
261 """
262 opts = pycompat.byteskwargs(opts)
262 opts = pycompat.byteskwargs(opts)
263 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
263 cmdutil.check_at_most_one_arg(opts, b'all', b'source', b'rev')
264 cmdutil.check_incompatible_arguments(
264 cmdutil.check_incompatible_arguments(
265 opts, b'working_dir', [b'all', b'source']
265 opts, b'working_dir', [b'all', b'source']
266 )
266 )
267
267
268 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
268 with repo.wlock(), repo.lock(), repo.transaction(b'fix'):
269 revstofix = getrevstofix(ui, repo, opts)
269 revstofix = getrevstofix(ui, repo, opts)
270 basectxs = getbasectxs(repo, opts, revstofix)
270 basectxs = getbasectxs(repo, opts, revstofix)
271 workqueue, numitems = getworkqueue(
271 workqueue, numitems = getworkqueue(
272 ui, repo, pats, opts, revstofix, basectxs
272 ui, repo, pats, opts, revstofix, basectxs
273 )
273 )
274 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
274 basepaths = getbasepaths(repo, opts, workqueue, basectxs)
275 fixers = getfixers(ui)
275 fixers = getfixers(ui)
276
276
277 # Rather than letting each worker independently fetch the files
277 # Rather than letting each worker independently fetch the files
278 # (which also would add complications for shared/keepalive
278 # (which also would add complications for shared/keepalive
279 # connections), prefetch them all first.
279 # connections), prefetch them all first.
280 _prefetchfiles(repo, workqueue, basepaths)
280 _prefetchfiles(repo, workqueue, basepaths)
281
281
282 # There are no data dependencies between the workers fixing each file
282 # There are no data dependencies between the workers fixing each file
283 # revision, so we can use all available parallelism.
283 # revision, so we can use all available parallelism.
284 def getfixes(items):
284 def getfixes(items):
285 for srcrev, path, dstrevs in items:
285 for srcrev, path, dstrevs in items:
286 ctx = repo[srcrev]
286 ctx = repo[srcrev]
287 olddata = ctx[path].data()
287 olddata = ctx[path].data()
288 metadata, newdata = fixfile(
288 metadata, newdata = fixfile(
289 ui,
289 ui,
290 repo,
290 repo,
291 opts,
291 opts,
292 fixers,
292 fixers,
293 ctx,
293 ctx,
294 path,
294 path,
295 basepaths,
295 basepaths,
296 basectxs[srcrev],
296 basectxs[srcrev],
297 )
297 )
298 # We ungroup the work items now, because the code that consumes
298 # We ungroup the work items now, because the code that consumes
299 # these results has to handle each dstrev separately, and in
299 # these results has to handle each dstrev separately, and in
300 # topological order. Because these are handled in topological
300 # topological order. Because these are handled in topological
301 # order, it's important that we pass around references to
301 # order, it's important that we pass around references to
302 # "newdata" instead of copying it. Otherwise, we would be
302 # "newdata" instead of copying it. Otherwise, we would be
303 # keeping more copies of file content in memory at a time than
303 # keeping more copies of file content in memory at a time than
304 # if we hadn't bothered to group/deduplicate the work items.
304 # if we hadn't bothered to group/deduplicate the work items.
305 data = newdata if newdata != olddata else None
305 data = newdata if newdata != olddata else None
306 for dstrev in dstrevs:
306 for dstrev in dstrevs:
307 yield (dstrev, path, metadata, data)
307 yield (dstrev, path, metadata, data)
308
308
309 results = worker.worker(
309 results = worker.worker(
310 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
310 ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
311 )
311 )
312
312
313 # We have to hold on to the data for each successor revision in memory
313 # We have to hold on to the data for each successor revision in memory
314 # until all its parents are committed. We ensure this by committing and
314 # until all its parents are committed. We ensure this by committing and
315 # freeing memory for the revisions in some topological order. This
315 # freeing memory for the revisions in some topological order. This
316 # leaves a little bit of memory efficiency on the table, but also makes
316 # leaves a little bit of memory efficiency on the table, but also makes
317 # the tests deterministic. It might also be considered a feature since
317 # the tests deterministic. It might also be considered a feature since
318 # it makes the results more easily reproducible.
318 # it makes the results more easily reproducible.
319 filedata = collections.defaultdict(dict)
319 filedata = collections.defaultdict(dict)
320 aggregatemetadata = collections.defaultdict(list)
320 aggregatemetadata = collections.defaultdict(list)
321 replacements = {}
321 replacements = {}
322 wdirwritten = False
322 wdirwritten = False
323 commitorder = sorted(revstofix, reverse=True)
323 commitorder = sorted(revstofix, reverse=True)
324 with ui.makeprogress(
324 with ui.makeprogress(
325 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
325 topic=_(b'fixing'), unit=_(b'files'), total=sum(numitems.values())
326 ) as progress:
326 ) as progress:
327 for rev, path, filerevmetadata, newdata in results:
327 for rev, path, filerevmetadata, newdata in results:
328 progress.increment(item=path)
328 progress.increment(item=path)
329 for fixername, fixermetadata in filerevmetadata.items():
329 for fixername, fixermetadata in filerevmetadata.items():
330 aggregatemetadata[fixername].append(fixermetadata)
330 aggregatemetadata[fixername].append(fixermetadata)
331 if newdata is not None:
331 if newdata is not None:
332 filedata[rev][path] = newdata
332 filedata[rev][path] = newdata
333 hookargs = {
333 hookargs = {
334 b'rev': rev,
334 b'rev': rev,
335 b'path': path,
335 b'path': path,
336 b'metadata': filerevmetadata,
336 b'metadata': filerevmetadata,
337 }
337 }
338 repo.hook(
338 repo.hook(
339 b'postfixfile',
339 b'postfixfile',
340 throw=False,
340 throw=False,
341 **pycompat.strkwargs(hookargs)
341 **pycompat.strkwargs(hookargs)
342 )
342 )
343 numitems[rev] -= 1
343 numitems[rev] -= 1
344 # Apply the fixes for this and any other revisions that are
344 # Apply the fixes for this and any other revisions that are
345 # ready and sitting at the front of the queue. Using a loop here
345 # ready and sitting at the front of the queue. Using a loop here
346 # prevents the queue from being blocked by the first revision to
346 # prevents the queue from being blocked by the first revision to
347 # be ready out of order.
347 # be ready out of order.
348 while commitorder and not numitems[commitorder[-1]]:
348 while commitorder and not numitems[commitorder[-1]]:
349 rev = commitorder.pop()
349 rev = commitorder.pop()
350 ctx = repo[rev]
350 ctx = repo[rev]
351 if rev == wdirrev:
351 if rev == wdirrev:
352 writeworkingdir(repo, ctx, filedata[rev], replacements)
352 writeworkingdir(repo, ctx, filedata[rev], replacements)
353 wdirwritten = bool(filedata[rev])
353 wdirwritten = bool(filedata[rev])
354 else:
354 else:
355 replacerev(ui, repo, ctx, filedata[rev], replacements)
355 replacerev(ui, repo, ctx, filedata[rev], replacements)
356 del filedata[rev]
356 del filedata[rev]
357
357
358 cleanup(repo, replacements, wdirwritten)
358 cleanup(repo, replacements, wdirwritten)
359 hookargs = {
359 hookargs = {
360 b'replacements': replacements,
360 b'replacements': replacements,
361 b'wdirwritten': wdirwritten,
361 b'wdirwritten': wdirwritten,
362 b'metadata': aggregatemetadata,
362 b'metadata': aggregatemetadata,
363 }
363 }
364 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
364 repo.hook(b'postfix', throw=True, **pycompat.strkwargs(hookargs))
365
365
366
366
367 def cleanup(repo, replacements, wdirwritten):
367 def cleanup(repo, replacements, wdirwritten):
368 """Calls scmutil.cleanupnodes() with the given replacements.
368 """Calls scmutil.cleanupnodes() with the given replacements.
369
369
370 "replacements" is a dict from nodeid to nodeid, with one key and one value
370 "replacements" is a dict from nodeid to nodeid, with one key and one value
371 for every revision that was affected by fixing. This is slightly different
371 for every revision that was affected by fixing. This is slightly different
372 from cleanupnodes().
372 from cleanupnodes().
373
373
374 "wdirwritten" is a bool which tells whether the working copy was affected by
374 "wdirwritten" is a bool which tells whether the working copy was affected by
375 fixing, since it has no entry in "replacements".
375 fixing, since it has no entry in "replacements".
376
376
377 Useful as a hook point for extending "hg fix" with output summarizing the
377 Useful as a hook point for extending "hg fix" with output summarizing the
378 effects of the command, though we choose not to output anything here.
378 effects of the command, though we choose not to output anything here.
379 """
379 """
380 replacements = {
380 replacements = {prec: [succ] for prec, succ in replacements.items()}
381 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
382 }
383 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
381 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
384
382
385
383
386 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
384 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
387 """Constructs a list of files to fix and which revisions each fix applies to
385 """Constructs a list of files to fix and which revisions each fix applies to
388
386
389 To avoid duplicating work, there is usually only one work item for each file
387 To avoid duplicating work, there is usually only one work item for each file
390 revision that might need to be fixed. There can be multiple work items per
388 revision that might need to be fixed. There can be multiple work items per
391 file revision if the same file needs to be fixed in multiple changesets with
389 file revision if the same file needs to be fixed in multiple changesets with
392 different baserevs. Each work item also contains a list of changesets where
390 different baserevs. Each work item also contains a list of changesets where
393 the file's data should be replaced with the fixed data. The work items for
391 the file's data should be replaced with the fixed data. The work items for
394 earlier changesets come earlier in the work queue, to improve pipelining by
392 earlier changesets come earlier in the work queue, to improve pipelining by
395 allowing the first changeset to be replaced while fixes are still being
393 allowing the first changeset to be replaced while fixes are still being
396 computed for later changesets.
394 computed for later changesets.
397
395
398 Also returned is a map from changesets to the count of work items that might
396 Also returned is a map from changesets to the count of work items that might
399 affect each changeset. This is used later to count when all of a changeset's
397 affect each changeset. This is used later to count when all of a changeset's
400 work items have been finished, without having to inspect the remaining work
398 work items have been finished, without having to inspect the remaining work
401 queue in each worker subprocess.
399 queue in each worker subprocess.
402
400
403 The example work item (1, "foo/bar.txt", (1, 2, 3)) means that the data of
401 The example work item (1, "foo/bar.txt", (1, 2, 3)) means that the data of
404 bar.txt should be read from revision 1, then fixed, and written back to
402 bar.txt should be read from revision 1, then fixed, and written back to
405 revisions 1, 2 and 3. Revision 1 is called the "srcrev" and the list of
403 revisions 1, 2 and 3. Revision 1 is called the "srcrev" and the list of
406 revisions is called the "dstrevs". In practice the srcrev is always one of
404 revisions is called the "dstrevs". In practice the srcrev is always one of
407 the dstrevs, and we make that choice when constructing the work item so that
405 the dstrevs, and we make that choice when constructing the work item so that
408 the choice can't be made inconsistently later on. The dstrevs should all
406 the choice can't be made inconsistently later on. The dstrevs should all
409 have the same file revision for the given path, so the choice of srcrev is
407 have the same file revision for the given path, so the choice of srcrev is
410 arbitrary. The wdirrev can be a dstrev and a srcrev.
408 arbitrary. The wdirrev can be a dstrev and a srcrev.
411 """
409 """
412 dstrevmap = collections.defaultdict(list)
410 dstrevmap = collections.defaultdict(list)
413 numitems = collections.defaultdict(int)
411 numitems = collections.defaultdict(int)
414 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
412 maxfilesize = ui.configbytes(b'fix', b'maxfilesize')
415 for rev in sorted(revstofix):
413 for rev in sorted(revstofix):
416 fixctx = repo[rev]
414 fixctx = repo[rev]
417 match = scmutil.match(fixctx, pats, opts)
415 match = scmutil.match(fixctx, pats, opts)
418 for path in sorted(
416 for path in sorted(
419 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
417 pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
420 ):
418 ):
421 fctx = fixctx[path]
419 fctx = fixctx[path]
422 if fctx.islink():
420 if fctx.islink():
423 continue
421 continue
424 if fctx.size() > maxfilesize:
422 if fctx.size() > maxfilesize:
425 ui.warn(
423 ui.warn(
426 _(b'ignoring file larger than %s: %s\n')
424 _(b'ignoring file larger than %s: %s\n')
427 % (util.bytecount(maxfilesize), path)
425 % (util.bytecount(maxfilesize), path)
428 )
426 )
429 continue
427 continue
430 baserevs = tuple(ctx.rev() for ctx in basectxs[rev])
428 baserevs = tuple(ctx.rev() for ctx in basectxs[rev])
431 dstrevmap[(fctx.filerev(), baserevs, path)].append(rev)
429 dstrevmap[(fctx.filerev(), baserevs, path)].append(rev)
432 numitems[rev] += 1
430 numitems[rev] += 1
433 workqueue = [
431 workqueue = [
434 (min(dstrevs), path, dstrevs)
432 (min(dstrevs), path, dstrevs)
435 for (_filerev, _baserevs, path), dstrevs in dstrevmap.items()
433 for (_filerev, _baserevs, path), dstrevs in dstrevmap.items()
436 ]
434 ]
437 # Move work items for earlier changesets to the front of the queue, so we
435 # Move work items for earlier changesets to the front of the queue, so we
438 # might be able to replace those changesets (in topological order) while
436 # might be able to replace those changesets (in topological order) while
439 # we're still processing later work items. Note the min() in the previous
437 # we're still processing later work items. Note the min() in the previous
440 # expression, which means we don't need a custom comparator here. The path
438 # expression, which means we don't need a custom comparator here. The path
441 # is also important in the sort order to make the output order stable. There
439 # is also important in the sort order to make the output order stable. There
442 # are some situations where this doesn't help much, but some situations
440 # are some situations where this doesn't help much, but some situations
443 # where it lets us buffer O(1) files instead of O(n) files.
441 # where it lets us buffer O(1) files instead of O(n) files.
444 workqueue.sort()
442 workqueue.sort()
445 return workqueue, numitems
443 return workqueue, numitems
446
444
447
445
448 def getrevstofix(ui, repo, opts):
446 def getrevstofix(ui, repo, opts):
449 """Returns the set of revision numbers that should be fixed"""
447 """Returns the set of revision numbers that should be fixed"""
450 if opts[b'all']:
448 if opts[b'all']:
451 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
449 revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
452 elif opts[b'source']:
450 elif opts[b'source']:
453 source_revs = logcmdutil.revrange(repo, opts[b'source'])
451 source_revs = logcmdutil.revrange(repo, opts[b'source'])
454 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
452 revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
455 if wdirrev in source_revs:
453 if wdirrev in source_revs:
456 # `wdir()::` is currently empty, so manually add wdir
454 # `wdir()::` is currently empty, so manually add wdir
457 revs.add(wdirrev)
455 revs.add(wdirrev)
458 if repo[b'.'].rev() in revs:
456 if repo[b'.'].rev() in revs:
459 revs.add(wdirrev)
457 revs.add(wdirrev)
460 else:
458 else:
461 revs = set(logcmdutil.revrange(repo, opts[b'rev']))
459 revs = set(logcmdutil.revrange(repo, opts[b'rev']))
462 if opts.get(b'working_dir'):
460 if opts.get(b'working_dir'):
463 revs.add(wdirrev)
461 revs.add(wdirrev)
464 # Allow fixing only wdir() even if there's an unfinished operation
462 # Allow fixing only wdir() even if there's an unfinished operation
465 if not (len(revs) == 1 and wdirrev in revs):
463 if not (len(revs) == 1 and wdirrev in revs):
466 cmdutil.checkunfinished(repo)
464 cmdutil.checkunfinished(repo)
467 rewriteutil.precheck(repo, revs, b'fix')
465 rewriteutil.precheck(repo, revs, b'fix')
468 if (
466 if (
469 wdirrev in revs
467 wdirrev in revs
470 and mergestatemod.mergestate.read(repo).unresolvedcount()
468 and mergestatemod.mergestate.read(repo).unresolvedcount()
471 ):
469 ):
472 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
470 raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
473 if not revs:
471 if not revs:
474 raise error.Abort(
472 raise error.Abort(
475 b'no changesets specified', hint=b'use --source or --working-dir'
473 b'no changesets specified', hint=b'use --source or --working-dir'
476 )
474 )
477 return revs
475 return revs
478
476
479
477
480 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
478 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
481 """Returns the set of files that should be fixed in a context
479 """Returns the set of files that should be fixed in a context
482
480
483 The result depends on the base contexts; we include any file that has
481 The result depends on the base contexts; we include any file that has
484 changed relative to any of the base contexts. Base contexts should be
482 changed relative to any of the base contexts. Base contexts should be
485 ancestors of the context being fixed.
483 ancestors of the context being fixed.
486 """
484 """
487 files = set()
485 files = set()
488 for basectx in basectxs:
486 for basectx in basectxs:
489 stat = basectx.status(
487 stat = basectx.status(
490 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
488 fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
491 )
489 )
492 files.update(
490 files.update(
493 set(
491 set(
494 itertools.chain(
492 itertools.chain(
495 stat.added, stat.modified, stat.clean, stat.unknown
493 stat.added, stat.modified, stat.clean, stat.unknown
496 )
494 )
497 )
495 )
498 )
496 )
499 return files
497 return files
500
498
501
499
502 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
500 def lineranges(opts, path, basepaths, basectxs, fixctx, content2):
503 """Returns the set of line ranges that should be fixed in a file
501 """Returns the set of line ranges that should be fixed in a file
504
502
505 Of the form [(10, 20), (30, 40)].
503 Of the form [(10, 20), (30, 40)].
506
504
507 This depends on the given base contexts; we must consider lines that have
505 This depends on the given base contexts; we must consider lines that have
508 changed versus any of the base contexts, and whether the file has been
506 changed versus any of the base contexts, and whether the file has been
509 renamed versus any of them.
507 renamed versus any of them.
510
508
511 Another way to understand this is that we exclude line ranges that are
509 Another way to understand this is that we exclude line ranges that are
512 common to the file in all base contexts.
510 common to the file in all base contexts.
513 """
511 """
514 if opts.get(b'whole'):
512 if opts.get(b'whole'):
515 # Return a range containing all lines. Rely on the diff implementation's
513 # Return a range containing all lines. Rely on the diff implementation's
516 # idea of how many lines are in the file, instead of reimplementing it.
514 # idea of how many lines are in the file, instead of reimplementing it.
517 return difflineranges(b'', content2)
515 return difflineranges(b'', content2)
518
516
519 rangeslist = []
517 rangeslist = []
520 for basectx in basectxs:
518 for basectx in basectxs:
521 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
519 basepath = basepaths.get((basectx.rev(), fixctx.rev(), path), path)
522
520
523 if basepath in basectx:
521 if basepath in basectx:
524 content1 = basectx[basepath].data()
522 content1 = basectx[basepath].data()
525 else:
523 else:
526 content1 = b''
524 content1 = b''
527 rangeslist.extend(difflineranges(content1, content2))
525 rangeslist.extend(difflineranges(content1, content2))
528 return unionranges(rangeslist)
526 return unionranges(rangeslist)
529
527
530
528
531 def getbasepaths(repo, opts, workqueue, basectxs):
529 def getbasepaths(repo, opts, workqueue, basectxs):
532 if opts.get(b'whole'):
530 if opts.get(b'whole'):
533 # Base paths will never be fetched for line range determination.
531 # Base paths will never be fetched for line range determination.
534 return {}
532 return {}
535
533
536 basepaths = {}
534 basepaths = {}
537 for srcrev, path, _dstrevs in workqueue:
535 for srcrev, path, _dstrevs in workqueue:
538 fixctx = repo[srcrev]
536 fixctx = repo[srcrev]
539 for basectx in basectxs[srcrev]:
537 for basectx in basectxs[srcrev]:
540 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
538 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
541 if basepath in basectx:
539 if basepath in basectx:
542 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
540 basepaths[(basectx.rev(), fixctx.rev(), path)] = basepath
543 return basepaths
541 return basepaths
544
542
545
543
546 def unionranges(rangeslist):
544 def unionranges(rangeslist):
547 """Return the union of some closed intervals
545 """Return the union of some closed intervals
548
546
549 >>> unionranges([])
547 >>> unionranges([])
550 []
548 []
551 >>> unionranges([(1, 100)])
549 >>> unionranges([(1, 100)])
552 [(1, 100)]
550 [(1, 100)]
553 >>> unionranges([(1, 100), (1, 100)])
551 >>> unionranges([(1, 100), (1, 100)])
554 [(1, 100)]
552 [(1, 100)]
555 >>> unionranges([(1, 100), (2, 100)])
553 >>> unionranges([(1, 100), (2, 100)])
556 [(1, 100)]
554 [(1, 100)]
557 >>> unionranges([(1, 99), (1, 100)])
555 >>> unionranges([(1, 99), (1, 100)])
558 [(1, 100)]
556 [(1, 100)]
559 >>> unionranges([(1, 100), (40, 60)])
557 >>> unionranges([(1, 100), (40, 60)])
560 [(1, 100)]
558 [(1, 100)]
561 >>> unionranges([(1, 49), (50, 100)])
559 >>> unionranges([(1, 49), (50, 100)])
562 [(1, 100)]
560 [(1, 100)]
563 >>> unionranges([(1, 48), (50, 100)])
561 >>> unionranges([(1, 48), (50, 100)])
564 [(1, 48), (50, 100)]
562 [(1, 48), (50, 100)]
565 >>> unionranges([(1, 2), (3, 4), (5, 6)])
563 >>> unionranges([(1, 2), (3, 4), (5, 6)])
566 [(1, 6)]
564 [(1, 6)]
567 """
565 """
568 rangeslist = sorted(set(rangeslist))
566 rangeslist = sorted(set(rangeslist))
569 unioned = []
567 unioned = []
570 if rangeslist:
568 if rangeslist:
571 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
569 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
572 for a, b in rangeslist:
570 for a, b in rangeslist:
573 c, d = unioned[-1]
571 c, d = unioned[-1]
574 if a > d + 1:
572 if a > d + 1:
575 unioned.append((a, b))
573 unioned.append((a, b))
576 else:
574 else:
577 unioned[-1] = (c, max(b, d))
575 unioned[-1] = (c, max(b, d))
578 return unioned
576 return unioned
579
577
580
578
581 def difflineranges(content1, content2):
579 def difflineranges(content1, content2):
582 """Return list of line number ranges in content2 that differ from content1.
580 """Return list of line number ranges in content2 that differ from content1.
583
581
584 Line numbers are 1-based. The numbers are the first and last line contained
582 Line numbers are 1-based. The numbers are the first and last line contained
585 in the range. Single-line ranges have the same line number for the first and
583 in the range. Single-line ranges have the same line number for the first and
586 last line. Excludes any empty ranges that result from lines that are only
584 last line. Excludes any empty ranges that result from lines that are only
587 present in content1. Relies on mdiff's idea of where the line endings are in
585 present in content1. Relies on mdiff's idea of where the line endings are in
588 the string.
586 the string.
589
587
590 >>> from mercurial import pycompat
588 >>> from mercurial import pycompat
591 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
589 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
592 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
590 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
593 >>> difflineranges2(b'', b'')
591 >>> difflineranges2(b'', b'')
594 []
592 []
595 >>> difflineranges2(b'a', b'')
593 >>> difflineranges2(b'a', b'')
596 []
594 []
597 >>> difflineranges2(b'', b'A')
595 >>> difflineranges2(b'', b'A')
598 [(1, 1)]
596 [(1, 1)]
599 >>> difflineranges2(b'a', b'a')
597 >>> difflineranges2(b'a', b'a')
600 []
598 []
601 >>> difflineranges2(b'a', b'A')
599 >>> difflineranges2(b'a', b'A')
602 [(1, 1)]
600 [(1, 1)]
603 >>> difflineranges2(b'ab', b'')
601 >>> difflineranges2(b'ab', b'')
604 []
602 []
605 >>> difflineranges2(b'', b'AB')
603 >>> difflineranges2(b'', b'AB')
606 [(1, 2)]
604 [(1, 2)]
607 >>> difflineranges2(b'abc', b'ac')
605 >>> difflineranges2(b'abc', b'ac')
608 []
606 []
609 >>> difflineranges2(b'ab', b'aCb')
607 >>> difflineranges2(b'ab', b'aCb')
610 [(2, 2)]
608 [(2, 2)]
611 >>> difflineranges2(b'abc', b'aBc')
609 >>> difflineranges2(b'abc', b'aBc')
612 [(2, 2)]
610 [(2, 2)]
613 >>> difflineranges2(b'ab', b'AB')
611 >>> difflineranges2(b'ab', b'AB')
614 [(1, 2)]
612 [(1, 2)]
615 >>> difflineranges2(b'abcde', b'aBcDe')
613 >>> difflineranges2(b'abcde', b'aBcDe')
616 [(2, 2), (4, 4)]
614 [(2, 2), (4, 4)]
617 >>> difflineranges2(b'abcde', b'aBCDe')
615 >>> difflineranges2(b'abcde', b'aBCDe')
618 [(2, 4)]
616 [(2, 4)]
619 """
617 """
620 ranges = []
618 ranges = []
621 for lines, kind in mdiff.allblocks(content1, content2):
619 for lines, kind in mdiff.allblocks(content1, content2):
622 firstline, lastline = lines[2:4]
620 firstline, lastline = lines[2:4]
623 if kind == b'!' and firstline != lastline:
621 if kind == b'!' and firstline != lastline:
624 ranges.append((firstline + 1, lastline))
622 ranges.append((firstline + 1, lastline))
625 return ranges
623 return ranges
626
624
627
625
628 def getbasectxs(repo, opts, revstofix):
626 def getbasectxs(repo, opts, revstofix):
629 """Returns a map of the base contexts for each revision
627 """Returns a map of the base contexts for each revision
630
628
631 The base contexts determine which lines are considered modified when we
629 The base contexts determine which lines are considered modified when we
632 attempt to fix just the modified lines in a file. It also determines which
630 attempt to fix just the modified lines in a file. It also determines which
633 files we attempt to fix, so it is important to compute this even when
631 files we attempt to fix, so it is important to compute this even when
634 --whole is used.
632 --whole is used.
635 """
633 """
636 # The --base flag overrides the usual logic, and we give every revision
634 # The --base flag overrides the usual logic, and we give every revision
637 # exactly the set of baserevs that the user specified.
635 # exactly the set of baserevs that the user specified.
638 if opts.get(b'base'):
636 if opts.get(b'base'):
639 baserevs = set(logcmdutil.revrange(repo, opts.get(b'base')))
637 baserevs = set(logcmdutil.revrange(repo, opts.get(b'base')))
640 if not baserevs:
638 if not baserevs:
641 baserevs = {nullrev}
639 baserevs = {nullrev}
642 basectxs = {repo[rev] for rev in baserevs}
640 basectxs = {repo[rev] for rev in baserevs}
643 return {rev: basectxs for rev in revstofix}
641 return {rev: basectxs for rev in revstofix}
644
642
645 # Proceed in topological order so that we can easily determine each
643 # Proceed in topological order so that we can easily determine each
646 # revision's baserevs by looking at its parents and their baserevs.
644 # revision's baserevs by looking at its parents and their baserevs.
647 basectxs = collections.defaultdict(set)
645 basectxs = collections.defaultdict(set)
648 for rev in sorted(revstofix):
646 for rev in sorted(revstofix):
649 ctx = repo[rev]
647 ctx = repo[rev]
650 for pctx in ctx.parents():
648 for pctx in ctx.parents():
651 if pctx.rev() in basectxs:
649 if pctx.rev() in basectxs:
652 basectxs[rev].update(basectxs[pctx.rev()])
650 basectxs[rev].update(basectxs[pctx.rev()])
653 else:
651 else:
654 basectxs[rev].add(pctx)
652 basectxs[rev].add(pctx)
655 return basectxs
653 return basectxs
656
654
657
655
658 def _prefetchfiles(repo, workqueue, basepaths):
656 def _prefetchfiles(repo, workqueue, basepaths):
659 toprefetch = set()
657 toprefetch = set()
660
658
661 # Prefetch the files that will be fixed.
659 # Prefetch the files that will be fixed.
662 for srcrev, path, _dstrevs in workqueue:
660 for srcrev, path, _dstrevs in workqueue:
663 if srcrev == wdirrev:
661 if srcrev == wdirrev:
664 continue
662 continue
665 toprefetch.add((srcrev, path))
663 toprefetch.add((srcrev, path))
666
664
667 # Prefetch the base contents for lineranges().
665 # Prefetch the base contents for lineranges().
668 for (baserev, fixrev, path), basepath in basepaths.items():
666 for (baserev, fixrev, path), basepath in basepaths.items():
669 toprefetch.add((baserev, basepath))
667 toprefetch.add((baserev, basepath))
670
668
671 if toprefetch:
669 if toprefetch:
672 scmutil.prefetchfiles(
670 scmutil.prefetchfiles(
673 repo,
671 repo,
674 [
672 [
675 (rev, scmutil.matchfiles(repo, [path]))
673 (rev, scmutil.matchfiles(repo, [path]))
676 for rev, path in toprefetch
674 for rev, path in toprefetch
677 ],
675 ],
678 )
676 )
679
677
680
678
681 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
679 def fixfile(ui, repo, opts, fixers, fixctx, path, basepaths, basectxs):
682 """Run any configured fixers that should affect the file in this context
680 """Run any configured fixers that should affect the file in this context
683
681
684 Returns the file content that results from applying the fixers in some order
682 Returns the file content that results from applying the fixers in some order
685 starting with the file's content in the fixctx. Fixers that support line
683 starting with the file's content in the fixctx. Fixers that support line
686 ranges will affect lines that have changed relative to any of the basectxs
684 ranges will affect lines that have changed relative to any of the basectxs
687 (i.e. they will only avoid lines that are common to all basectxs).
685 (i.e. they will only avoid lines that are common to all basectxs).
688
686
689 A fixer tool's stdout will become the file's new content if and only if it
687 A fixer tool's stdout will become the file's new content if and only if it
690 exits with code zero. The fixer tool's working directory is the repository's
688 exits with code zero. The fixer tool's working directory is the repository's
691 root.
689 root.
692 """
690 """
693 metadata = {}
691 metadata = {}
694 newdata = fixctx[path].data()
692 newdata = fixctx[path].data()
695 for fixername, fixer in pycompat.iteritems(fixers):
693 for fixername, fixer in fixers.items():
696 if fixer.affects(opts, fixctx, path):
694 if fixer.affects(opts, fixctx, path):
697 ranges = lineranges(
695 ranges = lineranges(
698 opts, path, basepaths, basectxs, fixctx, newdata
696 opts, path, basepaths, basectxs, fixctx, newdata
699 )
697 )
700 command = fixer.command(ui, path, ranges)
698 command = fixer.command(ui, path, ranges)
701 if command is None:
699 if command is None:
702 continue
700 continue
703 ui.debug(b'subprocess: %s\n' % (command,))
701 ui.debug(b'subprocess: %s\n' % (command,))
704 proc = subprocess.Popen(
702 proc = subprocess.Popen(
705 procutil.tonativestr(command),
703 procutil.tonativestr(command),
706 shell=True,
704 shell=True,
707 cwd=procutil.tonativestr(repo.root),
705 cwd=procutil.tonativestr(repo.root),
708 stdin=subprocess.PIPE,
706 stdin=subprocess.PIPE,
709 stdout=subprocess.PIPE,
707 stdout=subprocess.PIPE,
710 stderr=subprocess.PIPE,
708 stderr=subprocess.PIPE,
711 )
709 )
712 stdout, stderr = proc.communicate(newdata)
710 stdout, stderr = proc.communicate(newdata)
713 if stderr:
711 if stderr:
714 showstderr(ui, fixctx.rev(), fixername, stderr)
712 showstderr(ui, fixctx.rev(), fixername, stderr)
715 newerdata = stdout
713 newerdata = stdout
716 if fixer.shouldoutputmetadata():
714 if fixer.shouldoutputmetadata():
717 try:
715 try:
718 metadatajson, newerdata = stdout.split(b'\0', 1)
716 metadatajson, newerdata = stdout.split(b'\0', 1)
719 metadata[fixername] = pycompat.json_loads(metadatajson)
717 metadata[fixername] = pycompat.json_loads(metadatajson)
720 except ValueError:
718 except ValueError:
721 ui.warn(
719 ui.warn(
722 _(b'ignored invalid output from fixer tool: %s\n')
720 _(b'ignored invalid output from fixer tool: %s\n')
723 % (fixername,)
721 % (fixername,)
724 )
722 )
725 continue
723 continue
726 else:
724 else:
727 metadata[fixername] = None
725 metadata[fixername] = None
728 if proc.returncode == 0:
726 if proc.returncode == 0:
729 newdata = newerdata
727 newdata = newerdata
730 else:
728 else:
731 if not stderr:
729 if not stderr:
732 message = _(b'exited with status %d\n') % (proc.returncode,)
730 message = _(b'exited with status %d\n') % (proc.returncode,)
733 showstderr(ui, fixctx.rev(), fixername, message)
731 showstderr(ui, fixctx.rev(), fixername, message)
734 checktoolfailureaction(
732 checktoolfailureaction(
735 ui,
733 ui,
736 _(b'no fixes will be applied'),
734 _(b'no fixes will be applied'),
737 hint=_(
735 hint=_(
738 b'use --config fix.failure=continue to apply any '
736 b'use --config fix.failure=continue to apply any '
739 b'successful fixes anyway'
737 b'successful fixes anyway'
740 ),
738 ),
741 )
739 )
742 return metadata, newdata
740 return metadata, newdata
743
741
744
742
745 def showstderr(ui, rev, fixername, stderr):
743 def showstderr(ui, rev, fixername, stderr):
746 """Writes the lines of the stderr string as warnings on the ui
744 """Writes the lines of the stderr string as warnings on the ui
747
745
748 Uses the revision number and fixername to give more context to each line of
746 Uses the revision number and fixername to give more context to each line of
749 the error message. Doesn't include file names, since those take up a lot of
747 the error message. Doesn't include file names, since those take up a lot of
750 space and would tend to be included in the error message if they were
748 space and would tend to be included in the error message if they were
751 relevant.
749 relevant.
752 """
750 """
753 for line in re.split(b'[\r\n]+', stderr):
751 for line in re.split(b'[\r\n]+', stderr):
754 if line:
752 if line:
755 ui.warn(b'[')
753 ui.warn(b'[')
756 if rev is None:
754 if rev is None:
757 ui.warn(_(b'wdir'), label=b'evolve.rev')
755 ui.warn(_(b'wdir'), label=b'evolve.rev')
758 else:
756 else:
759 ui.warn(b'%d' % rev, label=b'evolve.rev')
757 ui.warn(b'%d' % rev, label=b'evolve.rev')
760 ui.warn(b'] %s: %s\n' % (fixername, line))
758 ui.warn(b'] %s: %s\n' % (fixername, line))
761
759
762
760
763 def writeworkingdir(repo, ctx, filedata, replacements):
761 def writeworkingdir(repo, ctx, filedata, replacements):
764 """Write new content to the working copy and check out the new p1 if any
762 """Write new content to the working copy and check out the new p1 if any
765
763
766 We check out a new revision if and only if we fixed something in both the
764 We check out a new revision if and only if we fixed something in both the
767 working directory and its parent revision. This avoids the need for a full
765 working directory and its parent revision. This avoids the need for a full
768 update/merge, and means that the working directory simply isn't affected
766 update/merge, and means that the working directory simply isn't affected
769 unless the --working-dir flag is given.
767 unless the --working-dir flag is given.
770
768
771 Directly updates the dirstate for the affected files.
769 Directly updates the dirstate for the affected files.
772 """
770 """
773 for path, data in pycompat.iteritems(filedata):
771 for path, data in filedata.items():
774 fctx = ctx[path]
772 fctx = ctx[path]
775 fctx.write(data, fctx.flags())
773 fctx.write(data, fctx.flags())
776
774
777 oldp1 = repo.dirstate.p1()
775 oldp1 = repo.dirstate.p1()
778 newp1 = replacements.get(oldp1, oldp1)
776 newp1 = replacements.get(oldp1, oldp1)
779 if newp1 != oldp1:
777 if newp1 != oldp1:
780 assert repo.dirstate.p2() == nullid
778 assert repo.dirstate.p2() == nullid
781 with repo.dirstate.parentchange():
779 with repo.dirstate.parentchange():
782 scmutil.movedirstate(repo, repo[newp1])
780 scmutil.movedirstate(repo, repo[newp1])
783
781
784
782
785 def replacerev(ui, repo, ctx, filedata, replacements):
783 def replacerev(ui, repo, ctx, filedata, replacements):
786 """Commit a new revision like the given one, but with file content changes
784 """Commit a new revision like the given one, but with file content changes
787
785
788 "ctx" is the original revision to be replaced by a modified one.
786 "ctx" is the original revision to be replaced by a modified one.
789
787
790 "filedata" is a dict that maps paths to their new file content. All other
788 "filedata" is a dict that maps paths to their new file content. All other
791 paths will be recreated from the original revision without changes.
789 paths will be recreated from the original revision without changes.
792 "filedata" may contain paths that didn't exist in the original revision;
790 "filedata" may contain paths that didn't exist in the original revision;
793 they will be added.
791 they will be added.
794
792
795 "replacements" is a dict that maps a single node to a single node, and it is
793 "replacements" is a dict that maps a single node to a single node, and it is
796 updated to indicate the original revision is replaced by the newly created
794 updated to indicate the original revision is replaced by the newly created
797 one. No entry is added if the replacement's node already exists.
795 one. No entry is added if the replacement's node already exists.
798
796
799 The new revision has the same parents as the old one, unless those parents
797 The new revision has the same parents as the old one, unless those parents
800 have already been replaced, in which case those replacements are the parents
798 have already been replaced, in which case those replacements are the parents
801 of this new revision. Thus, if revisions are replaced in topological order,
799 of this new revision. Thus, if revisions are replaced in topological order,
802 there is no need to rebase them into the original topology later.
800 there is no need to rebase them into the original topology later.
803 """
801 """
804
802
805 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
803 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
806 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
804 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
807 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
805 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
808 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
806 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
809
807
810 # We don't want to create a revision that has no changes from the original,
808 # We don't want to create a revision that has no changes from the original,
811 # but we should if the original revision's parent has been replaced.
809 # but we should if the original revision's parent has been replaced.
812 # Otherwise, we would produce an orphan that needs no actual human
810 # Otherwise, we would produce an orphan that needs no actual human
813 # intervention to evolve. We can't rely on commit() to avoid creating the
811 # intervention to evolve. We can't rely on commit() to avoid creating the
814 # un-needed revision because the extra field added below produces a new hash
812 # un-needed revision because the extra field added below produces a new hash
815 # regardless of file content changes.
813 # regardless of file content changes.
816 if (
814 if (
817 not filedata
815 not filedata
818 and p1ctx.node() not in replacements
816 and p1ctx.node() not in replacements
819 and p2ctx.node() not in replacements
817 and p2ctx.node() not in replacements
820 ):
818 ):
821 return
819 return
822
820
823 extra = ctx.extra().copy()
821 extra = ctx.extra().copy()
824 extra[b'fix_source'] = ctx.hex()
822 extra[b'fix_source'] = ctx.hex()
825
823
826 wctx = context.overlayworkingctx(repo)
824 wctx = context.overlayworkingctx(repo)
827 wctx.setbase(repo[newp1node])
825 wctx.setbase(repo[newp1node])
828 merge.revert_to(ctx, wc=wctx)
826 merge.revert_to(ctx, wc=wctx)
829 copies.graftcopies(wctx, ctx, ctx.p1())
827 copies.graftcopies(wctx, ctx, ctx.p1())
830
828
831 for path in filedata.keys():
829 for path in filedata.keys():
832 fctx = ctx[path]
830 fctx = ctx[path]
833 copysource = fctx.copysource()
831 copysource = fctx.copysource()
834 wctx.write(path, filedata[path], flags=fctx.flags())
832 wctx.write(path, filedata[path], flags=fctx.flags())
835 if copysource:
833 if copysource:
836 wctx.markcopied(path, copysource)
834 wctx.markcopied(path, copysource)
837
835
838 desc = rewriteutil.update_hash_refs(
836 desc = rewriteutil.update_hash_refs(
839 repo,
837 repo,
840 ctx.description(),
838 ctx.description(),
841 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
839 {oldnode: [newnode] for oldnode, newnode in replacements.items()},
842 )
840 )
843
841
844 memctx = wctx.tomemctx(
842 memctx = wctx.tomemctx(
845 text=desc,
843 text=desc,
846 branch=ctx.branch(),
844 branch=ctx.branch(),
847 extra=extra,
845 extra=extra,
848 date=ctx.date(),
846 date=ctx.date(),
849 parents=(newp1node, newp2node),
847 parents=(newp1node, newp2node),
850 user=ctx.user(),
848 user=ctx.user(),
851 )
849 )
852
850
853 sucnode = memctx.commit()
851 sucnode = memctx.commit()
854 prenode = ctx.node()
852 prenode = ctx.node()
855 if prenode == sucnode:
853 if prenode == sucnode:
856 ui.debug(b'node %s already existed\n' % (ctx.hex()))
854 ui.debug(b'node %s already existed\n' % (ctx.hex()))
857 else:
855 else:
858 replacements[ctx.node()] = sucnode
856 replacements[ctx.node()] = sucnode
859
857
860
858
861 def getfixers(ui):
859 def getfixers(ui):
862 """Returns a map of configured fixer tools indexed by their names
860 """Returns a map of configured fixer tools indexed by their names
863
861
864 Each value is a Fixer object with methods that implement the behavior of the
862 Each value is a Fixer object with methods that implement the behavior of the
865 fixer's config suboptions. Does not validate the config values.
863 fixer's config suboptions. Does not validate the config values.
866 """
864 """
867 fixers = {}
865 fixers = {}
868 for name in fixernames(ui):
866 for name in fixernames(ui):
869 enabled = ui.configbool(b'fix', name + b':enabled')
867 enabled = ui.configbool(b'fix', name + b':enabled')
870 command = ui.config(b'fix', name + b':command')
868 command = ui.config(b'fix', name + b':command')
871 pattern = ui.config(b'fix', name + b':pattern')
869 pattern = ui.config(b'fix', name + b':pattern')
872 linerange = ui.config(b'fix', name + b':linerange')
870 linerange = ui.config(b'fix', name + b':linerange')
873 priority = ui.configint(b'fix', name + b':priority')
871 priority = ui.configint(b'fix', name + b':priority')
874 metadata = ui.configbool(b'fix', name + b':metadata')
872 metadata = ui.configbool(b'fix', name + b':metadata')
875 skipclean = ui.configbool(b'fix', name + b':skipclean')
873 skipclean = ui.configbool(b'fix', name + b':skipclean')
876 # Don't use a fixer if it has no pattern configured. It would be
874 # Don't use a fixer if it has no pattern configured. It would be
877 # dangerous to let it affect all files. It would be pointless to let it
875 # dangerous to let it affect all files. It would be pointless to let it
878 # affect no files. There is no reasonable subset of files to use as the
876 # affect no files. There is no reasonable subset of files to use as the
879 # default.
877 # default.
880 if command is None:
878 if command is None:
881 ui.warn(
879 ui.warn(
882 _(b'fixer tool has no command configuration: %s\n') % (name,)
880 _(b'fixer tool has no command configuration: %s\n') % (name,)
883 )
881 )
884 elif pattern is None:
882 elif pattern is None:
885 ui.warn(
883 ui.warn(
886 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
884 _(b'fixer tool has no pattern configuration: %s\n') % (name,)
887 )
885 )
888 elif not enabled:
886 elif not enabled:
889 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
887 ui.debug(b'ignoring disabled fixer tool: %s\n' % (name,))
890 else:
888 else:
891 fixers[name] = Fixer(
889 fixers[name] = Fixer(
892 command, pattern, linerange, priority, metadata, skipclean
890 command, pattern, linerange, priority, metadata, skipclean
893 )
891 )
894 return collections.OrderedDict(
892 return collections.OrderedDict(
895 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
893 sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
896 )
894 )
897
895
898
896
899 def fixernames(ui):
897 def fixernames(ui):
900 """Returns the names of [fix] config options that have suboptions"""
898 """Returns the names of [fix] config options that have suboptions"""
901 names = set()
899 names = set()
902 for k, v in ui.configitems(b'fix'):
900 for k, v in ui.configitems(b'fix'):
903 if b':' in k:
901 if b':' in k:
904 names.add(k.split(b':', 1)[0])
902 names.add(k.split(b':', 1)[0])
905 return names
903 return names
906
904
907
905
908 class Fixer(object):
906 class Fixer(object):
909 """Wraps the raw config values for a fixer with methods"""
907 """Wraps the raw config values for a fixer with methods"""
910
908
911 def __init__(
909 def __init__(
912 self, command, pattern, linerange, priority, metadata, skipclean
910 self, command, pattern, linerange, priority, metadata, skipclean
913 ):
911 ):
914 self._command = command
912 self._command = command
915 self._pattern = pattern
913 self._pattern = pattern
916 self._linerange = linerange
914 self._linerange = linerange
917 self._priority = priority
915 self._priority = priority
918 self._metadata = metadata
916 self._metadata = metadata
919 self._skipclean = skipclean
917 self._skipclean = skipclean
920
918
921 def affects(self, opts, fixctx, path):
919 def affects(self, opts, fixctx, path):
922 """Should this fixer run on the file at the given path and context?"""
920 """Should this fixer run on the file at the given path and context?"""
923 repo = fixctx.repo()
921 repo = fixctx.repo()
924 matcher = matchmod.match(
922 matcher = matchmod.match(
925 repo.root, repo.root, [self._pattern], ctx=fixctx
923 repo.root, repo.root, [self._pattern], ctx=fixctx
926 )
924 )
927 return matcher(path)
925 return matcher(path)
928
926
929 def shouldoutputmetadata(self):
927 def shouldoutputmetadata(self):
930 """Should the stdout of this fixer start with JSON and a null byte?"""
928 """Should the stdout of this fixer start with JSON and a null byte?"""
931 return self._metadata
929 return self._metadata
932
930
933 def command(self, ui, path, ranges):
931 def command(self, ui, path, ranges):
934 """A shell command to use to invoke this fixer on the given file/lines
932 """A shell command to use to invoke this fixer on the given file/lines
935
933
936 May return None if there is no appropriate command to run for the given
934 May return None if there is no appropriate command to run for the given
937 parameters.
935 parameters.
938 """
936 """
939 expand = cmdutil.rendercommandtemplate
937 expand = cmdutil.rendercommandtemplate
940 parts = [
938 parts = [
941 expand(
939 expand(
942 ui,
940 ui,
943 self._command,
941 self._command,
944 {b'rootpath': path, b'basename': os.path.basename(path)},
942 {b'rootpath': path, b'basename': os.path.basename(path)},
945 )
943 )
946 ]
944 ]
947 if self._linerange:
945 if self._linerange:
948 if self._skipclean and not ranges:
946 if self._skipclean and not ranges:
949 # No line ranges to fix, so don't run the fixer.
947 # No line ranges to fix, so don't run the fixer.
950 return None
948 return None
951 for first, last in ranges:
949 for first, last in ranges:
952 parts.append(
950 parts.append(
953 expand(
951 expand(
954 ui, self._linerange, {b'first': first, b'last': last}
952 ui, self._linerange, {b'first': first, b'last': last}
955 )
953 )
956 )
954 )
957 return b' '.join(parts)
955 return b' '.join(parts)
@@ -1,1004 +1,1000 b''
1 # __init__.py - fsmonitor initialization and overrides
1 # __init__.py - fsmonitor initialization and overrides
2 #
2 #
3 # Copyright 2013-2016 Facebook, Inc.
3 # Copyright 2013-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''Faster status operations with the Watchman file monitor (EXPERIMENTAL)
8 '''Faster status operations with the Watchman file monitor (EXPERIMENTAL)
9
9
10 Integrates the file-watching program Watchman with Mercurial to produce faster
10 Integrates the file-watching program Watchman with Mercurial to produce faster
11 status results.
11 status results.
12
12
13 On a particular Linux system, for a real-world repository with over 400,000
13 On a particular Linux system, for a real-world repository with over 400,000
14 files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same
14 files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same
15 system, with fsmonitor it takes about 0.3 seconds.
15 system, with fsmonitor it takes about 0.3 seconds.
16
16
17 fsmonitor requires no configuration -- it will tell Watchman about your
17 fsmonitor requires no configuration -- it will tell Watchman about your
18 repository as necessary. You'll need to install Watchman from
18 repository as necessary. You'll need to install Watchman from
19 https://facebook.github.io/watchman/ and make sure it is in your PATH.
19 https://facebook.github.io/watchman/ and make sure it is in your PATH.
20
20
21 fsmonitor is incompatible with the largefiles and eol extensions, and
21 fsmonitor is incompatible with the largefiles and eol extensions, and
22 will disable itself if any of those are active.
22 will disable itself if any of those are active.
23
23
24 The following configuration options exist:
24 The following configuration options exist:
25
25
26 ::
26 ::
27
27
28 [fsmonitor]
28 [fsmonitor]
29 mode = {off, on, paranoid}
29 mode = {off, on, paranoid}
30
30
31 When `mode = off`, fsmonitor will disable itself (similar to not loading the
31 When `mode = off`, fsmonitor will disable itself (similar to not loading the
32 extension at all). When `mode = on`, fsmonitor will be enabled (the default).
32 extension at all). When `mode = on`, fsmonitor will be enabled (the default).
33 When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem,
33 When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem,
34 and ensure that the results are consistent.
34 and ensure that the results are consistent.
35
35
36 ::
36 ::
37
37
38 [fsmonitor]
38 [fsmonitor]
39 timeout = (float)
39 timeout = (float)
40
40
41 A value, in seconds, that determines how long fsmonitor will wait for Watchman
41 A value, in seconds, that determines how long fsmonitor will wait for Watchman
42 to return results. Defaults to `2.0`.
42 to return results. Defaults to `2.0`.
43
43
44 ::
44 ::
45
45
46 [fsmonitor]
46 [fsmonitor]
47 blacklistusers = (list of userids)
47 blacklistusers = (list of userids)
48
48
49 A list of usernames for which fsmonitor will disable itself altogether.
49 A list of usernames for which fsmonitor will disable itself altogether.
50
50
51 ::
51 ::
52
52
53 [fsmonitor]
53 [fsmonitor]
54 walk_on_invalidate = (boolean)
54 walk_on_invalidate = (boolean)
55
55
56 Whether or not to walk the whole repo ourselves when our cached state has been
56 Whether or not to walk the whole repo ourselves when our cached state has been
57 invalidated, for example when Watchman has been restarted or .hgignore rules
57 invalidated, for example when Watchman has been restarted or .hgignore rules
58 have been changed. Walking the repo in that case can result in competing for
58 have been changed. Walking the repo in that case can result in competing for
59 I/O with Watchman. For large repos it is recommended to set this value to
59 I/O with Watchman. For large repos it is recommended to set this value to
60 false. You may wish to set this to true if you have a very fast filesystem
60 false. You may wish to set this to true if you have a very fast filesystem
61 that can outpace the IPC overhead of getting the result data for the full repo
61 that can outpace the IPC overhead of getting the result data for the full repo
62 from Watchman. Defaults to false.
62 from Watchman. Defaults to false.
63
63
64 ::
64 ::
65
65
66 [fsmonitor]
66 [fsmonitor]
67 warn_when_unused = (boolean)
67 warn_when_unused = (boolean)
68
68
69 Whether to print a warning during certain operations when fsmonitor would be
69 Whether to print a warning during certain operations when fsmonitor would be
70 beneficial to performance but isn't enabled.
70 beneficial to performance but isn't enabled.
71
71
72 ::
72 ::
73
73
74 [fsmonitor]
74 [fsmonitor]
75 warn_update_file_count = (integer)
75 warn_update_file_count = (integer)
76 # or when mercurial is built with rust support
76 # or when mercurial is built with rust support
77 warn_update_file_count_rust = (integer)
77 warn_update_file_count_rust = (integer)
78
78
79 If ``warn_when_unused`` is set and fsmonitor isn't enabled, a warning will
79 If ``warn_when_unused`` is set and fsmonitor isn't enabled, a warning will
80 be printed during working directory updates if this many files will be
80 be printed during working directory updates if this many files will be
81 created.
81 created.
82 '''
82 '''
83
83
84 # Platforms Supported
84 # Platforms Supported
85 # ===================
85 # ===================
86 #
86 #
87 # **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably,
87 # **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably,
88 # even under severe loads.
88 # even under severe loads.
89 #
89 #
90 # **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor
90 # **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor
91 # turned on, on case-insensitive HFS+. There has been a reasonable amount of
91 # turned on, on case-insensitive HFS+. There has been a reasonable amount of
92 # user testing under normal loads.
92 # user testing under normal loads.
93 #
93 #
94 # **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but
94 # **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but
95 # very little testing has been done.
95 # very little testing has been done.
96 #
96 #
97 # **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet.
97 # **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet.
98 #
98 #
99 # Known Issues
99 # Known Issues
100 # ============
100 # ============
101 #
101 #
102 # * fsmonitor will disable itself if any of the following extensions are
102 # * fsmonitor will disable itself if any of the following extensions are
103 # enabled: largefiles, inotify, eol; or if the repository has subrepos.
103 # enabled: largefiles, inotify, eol; or if the repository has subrepos.
104 # * fsmonitor will produce incorrect results if nested repos that are not
104 # * fsmonitor will produce incorrect results if nested repos that are not
105 # subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`.
105 # subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`.
106 #
106 #
107 # The issues related to nested repos and subrepos are probably not fundamental
107 # The issues related to nested repos and subrepos are probably not fundamental
108 # ones. Patches to fix them are welcome.
108 # ones. Patches to fix them are welcome.
109
109
110
110
111 import codecs
111 import codecs
112 import os
112 import os
113 import stat
113 import stat
114 import sys
114 import sys
115 import tempfile
115 import tempfile
116 import weakref
116 import weakref
117
117
118 from mercurial.i18n import _
118 from mercurial.i18n import _
119 from mercurial.node import hex
119 from mercurial.node import hex
120 from mercurial.pycompat import open
120 from mercurial.pycompat import open
121 from mercurial import (
121 from mercurial import (
122 context,
122 context,
123 encoding,
123 encoding,
124 error,
124 error,
125 extensions,
125 extensions,
126 localrepo,
126 localrepo,
127 merge,
127 merge,
128 pathutil,
128 pathutil,
129 pycompat,
129 pycompat,
130 registrar,
130 registrar,
131 scmutil,
131 scmutil,
132 util,
132 util,
133 )
133 )
134 from mercurial import match as matchmod
134 from mercurial import match as matchmod
135 from mercurial.utils import (
135 from mercurial.utils import (
136 hashutil,
136 hashutil,
137 stringutil,
137 stringutil,
138 )
138 )
139
139
140 from . import (
140 from . import (
141 pywatchman,
141 pywatchman,
142 state,
142 state,
143 watchmanclient,
143 watchmanclient,
144 )
144 )
145
145
146 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
146 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
147 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
147 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
148 # be specifying the version(s) of Mercurial they are tested with, or
148 # be specifying the version(s) of Mercurial they are tested with, or
149 # leave the attribute unspecified.
149 # leave the attribute unspecified.
150 testedwith = b'ships-with-hg-core'
150 testedwith = b'ships-with-hg-core'
151
151
152 configtable = {}
152 configtable = {}
153 configitem = registrar.configitem(configtable)
153 configitem = registrar.configitem(configtable)
154
154
155 configitem(
155 configitem(
156 b'fsmonitor',
156 b'fsmonitor',
157 b'mode',
157 b'mode',
158 default=b'on',
158 default=b'on',
159 )
159 )
160 configitem(
160 configitem(
161 b'fsmonitor',
161 b'fsmonitor',
162 b'walk_on_invalidate',
162 b'walk_on_invalidate',
163 default=False,
163 default=False,
164 )
164 )
165 configitem(
165 configitem(
166 b'fsmonitor',
166 b'fsmonitor',
167 b'timeout',
167 b'timeout',
168 default=b'2',
168 default=b'2',
169 )
169 )
170 configitem(
170 configitem(
171 b'fsmonitor',
171 b'fsmonitor',
172 b'blacklistusers',
172 b'blacklistusers',
173 default=list,
173 default=list,
174 )
174 )
175 configitem(
175 configitem(
176 b'fsmonitor',
176 b'fsmonitor',
177 b'watchman_exe',
177 b'watchman_exe',
178 default=b'watchman',
178 default=b'watchman',
179 )
179 )
180 configitem(
180 configitem(
181 b'fsmonitor',
181 b'fsmonitor',
182 b'verbose',
182 b'verbose',
183 default=True,
183 default=True,
184 experimental=True,
184 experimental=True,
185 )
185 )
186 configitem(
186 configitem(
187 b'experimental',
187 b'experimental',
188 b'fsmonitor.transaction_notify',
188 b'fsmonitor.transaction_notify',
189 default=False,
189 default=False,
190 )
190 )
191
191
192 # This extension is incompatible with the following blacklisted extensions
192 # This extension is incompatible with the following blacklisted extensions
193 # and will disable itself when encountering one of these:
193 # and will disable itself when encountering one of these:
194 _blacklist = [b'largefiles', b'eol']
194 _blacklist = [b'largefiles', b'eol']
195
195
196
196
197 def debuginstall(ui, fm):
197 def debuginstall(ui, fm):
198 fm.write(
198 fm.write(
199 b"fsmonitor-watchman",
199 b"fsmonitor-watchman",
200 _(b"fsmonitor checking for watchman binary... (%s)\n"),
200 _(b"fsmonitor checking for watchman binary... (%s)\n"),
201 ui.configpath(b"fsmonitor", b"watchman_exe"),
201 ui.configpath(b"fsmonitor", b"watchman_exe"),
202 )
202 )
203 root = tempfile.mkdtemp()
203 root = tempfile.mkdtemp()
204 c = watchmanclient.client(ui, root)
204 c = watchmanclient.client(ui, root)
205 err = None
205 err = None
206 try:
206 try:
207 v = c.command(b"version")
207 v = c.command(b"version")
208 fm.write(
208 fm.write(
209 b"fsmonitor-watchman-version",
209 b"fsmonitor-watchman-version",
210 _(b" watchman binary version %s\n"),
210 _(b" watchman binary version %s\n"),
211 pycompat.bytestr(v["version"]),
211 pycompat.bytestr(v["version"]),
212 )
212 )
213 except watchmanclient.Unavailable as e:
213 except watchmanclient.Unavailable as e:
214 err = stringutil.forcebytestr(e)
214 err = stringutil.forcebytestr(e)
215 fm.condwrite(
215 fm.condwrite(
216 err,
216 err,
217 b"fsmonitor-watchman-error",
217 b"fsmonitor-watchman-error",
218 _(b" watchman binary missing or broken: %s\n"),
218 _(b" watchman binary missing or broken: %s\n"),
219 err,
219 err,
220 )
220 )
221 return 1 if err else 0
221 return 1 if err else 0
222
222
223
223
224 def _handleunavailable(ui, state, ex):
224 def _handleunavailable(ui, state, ex):
225 """Exception handler for Watchman interaction exceptions"""
225 """Exception handler for Watchman interaction exceptions"""
226 if isinstance(ex, watchmanclient.Unavailable):
226 if isinstance(ex, watchmanclient.Unavailable):
227 # experimental config: fsmonitor.verbose
227 # experimental config: fsmonitor.verbose
228 if ex.warn and ui.configbool(b'fsmonitor', b'verbose'):
228 if ex.warn and ui.configbool(b'fsmonitor', b'verbose'):
229 if b'illegal_fstypes' not in stringutil.forcebytestr(ex):
229 if b'illegal_fstypes' not in stringutil.forcebytestr(ex):
230 ui.warn(stringutil.forcebytestr(ex) + b'\n')
230 ui.warn(stringutil.forcebytestr(ex) + b'\n')
231 if ex.invalidate:
231 if ex.invalidate:
232 state.invalidate()
232 state.invalidate()
233 # experimental config: fsmonitor.verbose
233 # experimental config: fsmonitor.verbose
234 if ui.configbool(b'fsmonitor', b'verbose'):
234 if ui.configbool(b'fsmonitor', b'verbose'):
235 ui.log(
235 ui.log(
236 b'fsmonitor',
236 b'fsmonitor',
237 b'Watchman unavailable: %s\n',
237 b'Watchman unavailable: %s\n',
238 stringutil.forcebytestr(ex.msg),
238 stringutil.forcebytestr(ex.msg),
239 )
239 )
240 else:
240 else:
241 ui.log(
241 ui.log(
242 b'fsmonitor',
242 b'fsmonitor',
243 b'Watchman exception: %s\n',
243 b'Watchman exception: %s\n',
244 stringutil.forcebytestr(ex),
244 stringutil.forcebytestr(ex),
245 )
245 )
246
246
247
247
248 def _hashignore(ignore):
248 def _hashignore(ignore):
249 """Calculate hash for ignore patterns and filenames
249 """Calculate hash for ignore patterns and filenames
250
250
251 If this information changes between Mercurial invocations, we can't
251 If this information changes between Mercurial invocations, we can't
252 rely on Watchman information anymore and have to re-scan the working
252 rely on Watchman information anymore and have to re-scan the working
253 copy.
253 copy.
254
254
255 """
255 """
256 sha1 = hashutil.sha1()
256 sha1 = hashutil.sha1()
257 sha1.update(pycompat.byterepr(ignore))
257 sha1.update(pycompat.byterepr(ignore))
258 return pycompat.sysbytes(sha1.hexdigest())
258 return pycompat.sysbytes(sha1.hexdigest())
259
259
260
260
261 _watchmanencoding = pywatchman.encoding.get_local_encoding()
261 _watchmanencoding = pywatchman.encoding.get_local_encoding()
262 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
262 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
263 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
263 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
264
264
265
265
266 def _watchmantofsencoding(path):
266 def _watchmantofsencoding(path):
267 """Fix path to match watchman and local filesystem encoding
267 """Fix path to match watchman and local filesystem encoding
268
268
269 watchman's paths encoding can differ from filesystem encoding. For example,
269 watchman's paths encoding can differ from filesystem encoding. For example,
270 on Windows, it's always utf-8.
270 on Windows, it's always utf-8.
271 """
271 """
272 try:
272 try:
273 decoded = path.decode(_watchmanencoding)
273 decoded = path.decode(_watchmanencoding)
274 except UnicodeDecodeError as e:
274 except UnicodeDecodeError as e:
275 raise error.Abort(
275 raise error.Abort(
276 stringutil.forcebytestr(e), hint=b'watchman encoding error'
276 stringutil.forcebytestr(e), hint=b'watchman encoding error'
277 )
277 )
278
278
279 try:
279 try:
280 encoded = decoded.encode(_fsencoding, 'strict')
280 encoded = decoded.encode(_fsencoding, 'strict')
281 except UnicodeEncodeError as e:
281 except UnicodeEncodeError as e:
282 raise error.Abort(stringutil.forcebytestr(e))
282 raise error.Abort(stringutil.forcebytestr(e))
283
283
284 return encoded
284 return encoded
285
285
286
286
287 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
287 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
288 """Replacement for dirstate.walk, hooking into Watchman.
288 """Replacement for dirstate.walk, hooking into Watchman.
289
289
290 Whenever full is False, ignored is False, and the Watchman client is
290 Whenever full is False, ignored is False, and the Watchman client is
291 available, use Watchman combined with saved state to possibly return only a
291 available, use Watchman combined with saved state to possibly return only a
292 subset of files."""
292 subset of files."""
293
293
294 def bail(reason):
294 def bail(reason):
295 self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
295 self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
296 return orig(match, subrepos, unknown, ignored, full=True)
296 return orig(match, subrepos, unknown, ignored, full=True)
297
297
298 if full:
298 if full:
299 return bail(b'full rewalk requested')
299 return bail(b'full rewalk requested')
300 if ignored:
300 if ignored:
301 return bail(b'listing ignored files')
301 return bail(b'listing ignored files')
302 if not self._watchmanclient.available():
302 if not self._watchmanclient.available():
303 return bail(b'client unavailable')
303 return bail(b'client unavailable')
304 state = self._fsmonitorstate
304 state = self._fsmonitorstate
305 clock, ignorehash, notefiles = state.get()
305 clock, ignorehash, notefiles = state.get()
306 if not clock:
306 if not clock:
307 if state.walk_on_invalidate:
307 if state.walk_on_invalidate:
308 return bail(b'no clock')
308 return bail(b'no clock')
309 # Initial NULL clock value, see
309 # Initial NULL clock value, see
310 # https://facebook.github.io/watchman/docs/clockspec.html
310 # https://facebook.github.io/watchman/docs/clockspec.html
311 clock = b'c:0:0'
311 clock = b'c:0:0'
312 notefiles = []
312 notefiles = []
313
313
314 ignore = self._ignore
314 ignore = self._ignore
315 dirignore = self._dirignore
315 dirignore = self._dirignore
316 if unknown:
316 if unknown:
317 if _hashignore(ignore) != ignorehash and clock != b'c:0:0':
317 if _hashignore(ignore) != ignorehash and clock != b'c:0:0':
318 # ignore list changed -- can't rely on Watchman state any more
318 # ignore list changed -- can't rely on Watchman state any more
319 if state.walk_on_invalidate:
319 if state.walk_on_invalidate:
320 return bail(b'ignore rules changed')
320 return bail(b'ignore rules changed')
321 notefiles = []
321 notefiles = []
322 clock = b'c:0:0'
322 clock = b'c:0:0'
323 else:
323 else:
324 # always ignore
324 # always ignore
325 ignore = util.always
325 ignore = util.always
326 dirignore = util.always
326 dirignore = util.always
327
327
328 matchfn = match.matchfn
328 matchfn = match.matchfn
329 matchalways = match.always()
329 matchalways = match.always()
330 dmap = self._map
330 dmap = self._map
331 if util.safehasattr(dmap, b'_map'):
331 if util.safehasattr(dmap, b'_map'):
332 # for better performance, directly access the inner dirstate map if the
332 # for better performance, directly access the inner dirstate map if the
333 # standard dirstate implementation is in use.
333 # standard dirstate implementation is in use.
334 dmap = dmap._map
334 dmap = dmap._map
335 nonnormalset = {
335 nonnormalset = {
336 f
336 f
337 for f, e in self._map.items()
337 for f, e in self._map.items()
338 if e.v1_state() != b"n" or e.v1_mtime() == -1
338 if e.v1_state() != b"n" or e.v1_mtime() == -1
339 }
339 }
340
340
341 copymap = self._map.copymap
341 copymap = self._map.copymap
342 getkind = stat.S_IFMT
342 getkind = stat.S_IFMT
343 dirkind = stat.S_IFDIR
343 dirkind = stat.S_IFDIR
344 regkind = stat.S_IFREG
344 regkind = stat.S_IFREG
345 lnkkind = stat.S_IFLNK
345 lnkkind = stat.S_IFLNK
346 join = self._join
346 join = self._join
347 normcase = util.normcase
347 normcase = util.normcase
348 fresh_instance = False
348 fresh_instance = False
349
349
350 exact = skipstep3 = False
350 exact = skipstep3 = False
351 if match.isexact(): # match.exact
351 if match.isexact(): # match.exact
352 exact = True
352 exact = True
353 dirignore = util.always # skip step 2
353 dirignore = util.always # skip step 2
354 elif match.prefix(): # match.match, no patterns
354 elif match.prefix(): # match.match, no patterns
355 skipstep3 = True
355 skipstep3 = True
356
356
357 if not exact and self._checkcase:
357 if not exact and self._checkcase:
358 # note that even though we could receive directory entries, we're only
358 # note that even though we could receive directory entries, we're only
359 # interested in checking if a file with the same name exists. So only
359 # interested in checking if a file with the same name exists. So only
360 # normalize files if possible.
360 # normalize files if possible.
361 normalize = self._normalizefile
361 normalize = self._normalizefile
362 skipstep3 = False
362 skipstep3 = False
363 else:
363 else:
364 normalize = None
364 normalize = None
365
365
366 # step 1: find all explicit files
366 # step 1: find all explicit files
367 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
367 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
368
368
369 skipstep3 = skipstep3 and not (work or dirsnotfound)
369 skipstep3 = skipstep3 and not (work or dirsnotfound)
370 work = [d for d in work if not dirignore(d[0])]
370 work = [d for d in work if not dirignore(d[0])]
371
371
372 if not work and (exact or skipstep3):
372 if not work and (exact or skipstep3):
373 for s in subrepos:
373 for s in subrepos:
374 del results[s]
374 del results[s]
375 del results[b'.hg']
375 del results[b'.hg']
376 return results
376 return results
377
377
378 # step 2: query Watchman
378 # step 2: query Watchman
379 try:
379 try:
380 # Use the user-configured timeout for the query.
380 # Use the user-configured timeout for the query.
381 # Add a little slack over the top of the user query to allow for
381 # Add a little slack over the top of the user query to allow for
382 # overheads while transferring the data
382 # overheads while transferring the data
383 self._watchmanclient.settimeout(state.timeout + 0.1)
383 self._watchmanclient.settimeout(state.timeout + 0.1)
384 result = self._watchmanclient.command(
384 result = self._watchmanclient.command(
385 b'query',
385 b'query',
386 {
386 {
387 b'fields': [b'mode', b'mtime', b'size', b'exists', b'name'],
387 b'fields': [b'mode', b'mtime', b'size', b'exists', b'name'],
388 b'since': clock,
388 b'since': clock,
389 b'expression': [
389 b'expression': [
390 b'not',
390 b'not',
391 [
391 [
392 b'anyof',
392 b'anyof',
393 [b'dirname', b'.hg'],
393 [b'dirname', b'.hg'],
394 [b'name', b'.hg', b'wholename'],
394 [b'name', b'.hg', b'wholename'],
395 ],
395 ],
396 ],
396 ],
397 b'sync_timeout': int(state.timeout * 1000),
397 b'sync_timeout': int(state.timeout * 1000),
398 b'empty_on_fresh_instance': state.walk_on_invalidate,
398 b'empty_on_fresh_instance': state.walk_on_invalidate,
399 },
399 },
400 )
400 )
401 except Exception as ex:
401 except Exception as ex:
402 _handleunavailable(self._ui, state, ex)
402 _handleunavailable(self._ui, state, ex)
403 self._watchmanclient.clearconnection()
403 self._watchmanclient.clearconnection()
404 return bail(b'exception during run')
404 return bail(b'exception during run')
405 else:
405 else:
406 # We need to propagate the last observed clock up so that we
406 # We need to propagate the last observed clock up so that we
407 # can use it for our next query
407 # can use it for our next query
408 state.setlastclock(pycompat.sysbytes(result[b'clock']))
408 state.setlastclock(pycompat.sysbytes(result[b'clock']))
409 if result[b'is_fresh_instance']:
409 if result[b'is_fresh_instance']:
410 if state.walk_on_invalidate:
410 if state.walk_on_invalidate:
411 state.invalidate()
411 state.invalidate()
412 return bail(b'fresh instance')
412 return bail(b'fresh instance')
413 fresh_instance = True
413 fresh_instance = True
414 # Ignore any prior noteable files from the state info
414 # Ignore any prior noteable files from the state info
415 notefiles = []
415 notefiles = []
416
416
417 # for file paths which require normalization and we encounter a case
417 # for file paths which require normalization and we encounter a case
418 # collision, we store our own foldmap
418 # collision, we store our own foldmap
419 if normalize:
419 if normalize:
420 foldmap = {normcase(k): k for k in results}
420 foldmap = {normcase(k): k for k in results}
421
421
422 switch_slashes = pycompat.ossep == b'\\'
422 switch_slashes = pycompat.ossep == b'\\'
423 # The order of the results is, strictly speaking, undefined.
423 # The order of the results is, strictly speaking, undefined.
424 # For case changes on a case insensitive filesystem we may receive
424 # For case changes on a case insensitive filesystem we may receive
425 # two entries, one with exists=True and another with exists=False.
425 # two entries, one with exists=True and another with exists=False.
426 # The exists=True entries in the same response should be interpreted
426 # The exists=True entries in the same response should be interpreted
427 # as being happens-after the exists=False entries due to the way that
427 # as being happens-after the exists=False entries due to the way that
428 # Watchman tracks files. We use this property to reconcile deletes
428 # Watchman tracks files. We use this property to reconcile deletes
429 # for name case changes.
429 # for name case changes.
430 for entry in result[b'files']:
430 for entry in result[b'files']:
431 fname = entry[b'name']
431 fname = entry[b'name']
432
432
433 # Watchman always give us a str. Normalize to bytes on Python 3
433 # Watchman always give us a str. Normalize to bytes on Python 3
434 # using Watchman's encoding, if needed.
434 # using Watchman's encoding, if needed.
435 if not isinstance(fname, bytes):
435 if not isinstance(fname, bytes):
436 fname = fname.encode(_watchmanencoding)
436 fname = fname.encode(_watchmanencoding)
437
437
438 if _fixencoding:
438 if _fixencoding:
439 fname = _watchmantofsencoding(fname)
439 fname = _watchmantofsencoding(fname)
440
440
441 if switch_slashes:
441 if switch_slashes:
442 fname = fname.replace(b'\\', b'/')
442 fname = fname.replace(b'\\', b'/')
443 if normalize:
443 if normalize:
444 normed = normcase(fname)
444 normed = normcase(fname)
445 fname = normalize(fname, True, True)
445 fname = normalize(fname, True, True)
446 foldmap[normed] = fname
446 foldmap[normed] = fname
447 fmode = entry[b'mode']
447 fmode = entry[b'mode']
448 fexists = entry[b'exists']
448 fexists = entry[b'exists']
449 kind = getkind(fmode)
449 kind = getkind(fmode)
450
450
451 if b'/.hg/' in fname or fname.endswith(b'/.hg'):
451 if b'/.hg/' in fname or fname.endswith(b'/.hg'):
452 return bail(b'nested-repo-detected')
452 return bail(b'nested-repo-detected')
453
453
454 if not fexists:
454 if not fexists:
455 # if marked as deleted and we don't already have a change
455 # if marked as deleted and we don't already have a change
456 # record, mark it as deleted. If we already have an entry
456 # record, mark it as deleted. If we already have an entry
457 # for fname then it was either part of walkexplicit or was
457 # for fname then it was either part of walkexplicit or was
458 # an earlier result that was a case change
458 # an earlier result that was a case change
459 if (
459 if (
460 fname not in results
460 fname not in results
461 and fname in dmap
461 and fname in dmap
462 and (matchalways or matchfn(fname))
462 and (matchalways or matchfn(fname))
463 ):
463 ):
464 results[fname] = None
464 results[fname] = None
465 elif kind == dirkind:
465 elif kind == dirkind:
466 if fname in dmap and (matchalways or matchfn(fname)):
466 if fname in dmap and (matchalways or matchfn(fname)):
467 results[fname] = None
467 results[fname] = None
468 elif kind == regkind or kind == lnkkind:
468 elif kind == regkind or kind == lnkkind:
469 if fname in dmap:
469 if fname in dmap:
470 if matchalways or matchfn(fname):
470 if matchalways or matchfn(fname):
471 results[fname] = entry
471 results[fname] = entry
472 elif (matchalways or matchfn(fname)) and not ignore(fname):
472 elif (matchalways or matchfn(fname)) and not ignore(fname):
473 results[fname] = entry
473 results[fname] = entry
474 elif fname in dmap and (matchalways or matchfn(fname)):
474 elif fname in dmap and (matchalways or matchfn(fname)):
475 results[fname] = None
475 results[fname] = None
476
476
477 # step 3: query notable files we don't already know about
477 # step 3: query notable files we don't already know about
478 # XXX try not to iterate over the entire dmap
478 # XXX try not to iterate over the entire dmap
479 if normalize:
479 if normalize:
480 # any notable files that have changed case will already be handled
480 # any notable files that have changed case will already be handled
481 # above, so just check membership in the foldmap
481 # above, so just check membership in the foldmap
482 notefiles = {
482 notefiles = {
483 normalize(f, True, True)
483 normalize(f, True, True)
484 for f in notefiles
484 for f in notefiles
485 if normcase(f) not in foldmap
485 if normcase(f) not in foldmap
486 }
486 }
487 visit = {
487 visit = {
488 f
488 f
489 for f in notefiles
489 for f in notefiles
490 if (f not in results and matchfn(f) and (f in dmap or not ignore(f)))
490 if (f not in results and matchfn(f) and (f in dmap or not ignore(f)))
491 }
491 }
492
492
493 if not fresh_instance:
493 if not fresh_instance:
494 if matchalways:
494 if matchalways:
495 visit.update(f for f in nonnormalset if f not in results)
495 visit.update(f for f in nonnormalset if f not in results)
496 visit.update(f for f in copymap if f not in results)
496 visit.update(f for f in copymap if f not in results)
497 else:
497 else:
498 visit.update(
498 visit.update(
499 f for f in nonnormalset if f not in results and matchfn(f)
499 f for f in nonnormalset if f not in results and matchfn(f)
500 )
500 )
501 visit.update(f for f in copymap if f not in results and matchfn(f))
501 visit.update(f for f in copymap if f not in results and matchfn(f))
502 else:
502 else:
503 if matchalways:
503 if matchalways:
504 visit.update(
504 visit.update(f for f, st in dmap.items() if f not in results)
505 f for f, st in pycompat.iteritems(dmap) if f not in results
506 )
507 visit.update(f for f in copymap if f not in results)
505 visit.update(f for f in copymap if f not in results)
508 else:
506 else:
509 visit.update(
507 visit.update(
510 f
508 f for f, st in dmap.items() if f not in results and matchfn(f)
511 for f, st in pycompat.iteritems(dmap)
512 if f not in results and matchfn(f)
513 )
509 )
514 visit.update(f for f in copymap if f not in results and matchfn(f))
510 visit.update(f for f in copymap if f not in results and matchfn(f))
515
511
516 audit = pathutil.pathauditor(self._root, cached=True).check
512 audit = pathutil.pathauditor(self._root, cached=True).check
517 auditpass = [f for f in visit if audit(f)]
513 auditpass = [f for f in visit if audit(f)]
518 auditpass.sort()
514 auditpass.sort()
519 auditfail = visit.difference(auditpass)
515 auditfail = visit.difference(auditpass)
520 for f in auditfail:
516 for f in auditfail:
521 results[f] = None
517 results[f] = None
522
518
523 nf = iter(auditpass)
519 nf = iter(auditpass)
524 for st in util.statfiles([join(f) for f in auditpass]):
520 for st in util.statfiles([join(f) for f in auditpass]):
525 f = next(nf)
521 f = next(nf)
526 if st or f in dmap:
522 if st or f in dmap:
527 results[f] = st
523 results[f] = st
528
524
529 for s in subrepos:
525 for s in subrepos:
530 del results[s]
526 del results[s]
531 del results[b'.hg']
527 del results[b'.hg']
532 return results
528 return results
533
529
534
530
535 def overridestatus(
531 def overridestatus(
536 orig,
532 orig,
537 self,
533 self,
538 node1=b'.',
534 node1=b'.',
539 node2=None,
535 node2=None,
540 match=None,
536 match=None,
541 ignored=False,
537 ignored=False,
542 clean=False,
538 clean=False,
543 unknown=False,
539 unknown=False,
544 listsubrepos=False,
540 listsubrepos=False,
545 ):
541 ):
546 listignored = ignored
542 listignored = ignored
547 listclean = clean
543 listclean = clean
548 listunknown = unknown
544 listunknown = unknown
549
545
550 def _cmpsets(l1, l2):
546 def _cmpsets(l1, l2):
551 try:
547 try:
552 if b'FSMONITOR_LOG_FILE' in encoding.environ:
548 if b'FSMONITOR_LOG_FILE' in encoding.environ:
553 fn = encoding.environ[b'FSMONITOR_LOG_FILE']
549 fn = encoding.environ[b'FSMONITOR_LOG_FILE']
554 f = open(fn, b'wb')
550 f = open(fn, b'wb')
555 else:
551 else:
556 fn = b'fsmonitorfail.log'
552 fn = b'fsmonitorfail.log'
557 f = self.vfs.open(fn, b'wb')
553 f = self.vfs.open(fn, b'wb')
558 except (IOError, OSError):
554 except (IOError, OSError):
559 self.ui.warn(_(b'warning: unable to write to %s\n') % fn)
555 self.ui.warn(_(b'warning: unable to write to %s\n') % fn)
560 return
556 return
561
557
562 try:
558 try:
563 for i, (s1, s2) in enumerate(zip(l1, l2)):
559 for i, (s1, s2) in enumerate(zip(l1, l2)):
564 if set(s1) != set(s2):
560 if set(s1) != set(s2):
565 f.write(b'sets at position %d are unequal\n' % i)
561 f.write(b'sets at position %d are unequal\n' % i)
566 f.write(b'watchman returned: %r\n' % s1)
562 f.write(b'watchman returned: %r\n' % s1)
567 f.write(b'stat returned: %r\n' % s2)
563 f.write(b'stat returned: %r\n' % s2)
568 finally:
564 finally:
569 f.close()
565 f.close()
570
566
571 if isinstance(node1, context.changectx):
567 if isinstance(node1, context.changectx):
572 ctx1 = node1
568 ctx1 = node1
573 else:
569 else:
574 ctx1 = self[node1]
570 ctx1 = self[node1]
575 if isinstance(node2, context.changectx):
571 if isinstance(node2, context.changectx):
576 ctx2 = node2
572 ctx2 = node2
577 else:
573 else:
578 ctx2 = self[node2]
574 ctx2 = self[node2]
579
575
580 working = ctx2.rev() is None
576 working = ctx2.rev() is None
581 parentworking = working and ctx1 == self[b'.']
577 parentworking = working and ctx1 == self[b'.']
582 match = match or matchmod.always()
578 match = match or matchmod.always()
583
579
584 # Maybe we can use this opportunity to update Watchman's state.
580 # Maybe we can use this opportunity to update Watchman's state.
585 # Mercurial uses workingcommitctx and/or memctx to represent the part of
581 # Mercurial uses workingcommitctx and/or memctx to represent the part of
586 # the workingctx that is to be committed. So don't update the state in
582 # the workingctx that is to be committed. So don't update the state in
587 # that case.
583 # that case.
588 # HG_PENDING is set in the environment when the dirstate is being updated
584 # HG_PENDING is set in the environment when the dirstate is being updated
589 # in the middle of a transaction; we must not update our state in that
585 # in the middle of a transaction; we must not update our state in that
590 # case, or we risk forgetting about changes in the working copy.
586 # case, or we risk forgetting about changes in the working copy.
591 updatestate = (
587 updatestate = (
592 parentworking
588 parentworking
593 and match.always()
589 and match.always()
594 and not isinstance(ctx2, (context.workingcommitctx, context.memctx))
590 and not isinstance(ctx2, (context.workingcommitctx, context.memctx))
595 and b'HG_PENDING' not in encoding.environ
591 and b'HG_PENDING' not in encoding.environ
596 )
592 )
597
593
598 try:
594 try:
599 if self._fsmonitorstate.walk_on_invalidate:
595 if self._fsmonitorstate.walk_on_invalidate:
600 # Use a short timeout to query the current clock. If that
596 # Use a short timeout to query the current clock. If that
601 # takes too long then we assume that the service will be slow
597 # takes too long then we assume that the service will be slow
602 # to answer our query.
598 # to answer our query.
603 # walk_on_invalidate indicates that we prefer to walk the
599 # walk_on_invalidate indicates that we prefer to walk the
604 # tree ourselves because we can ignore portions that Watchman
600 # tree ourselves because we can ignore portions that Watchman
605 # cannot and we tend to be faster in the warmer buffer cache
601 # cannot and we tend to be faster in the warmer buffer cache
606 # cases.
602 # cases.
607 self._watchmanclient.settimeout(0.1)
603 self._watchmanclient.settimeout(0.1)
608 else:
604 else:
609 # Give Watchman more time to potentially complete its walk
605 # Give Watchman more time to potentially complete its walk
610 # and return the initial clock. In this mode we assume that
606 # and return the initial clock. In this mode we assume that
611 # the filesystem will be slower than parsing a potentially
607 # the filesystem will be slower than parsing a potentially
612 # very large Watchman result set.
608 # very large Watchman result set.
613 self._watchmanclient.settimeout(self._fsmonitorstate.timeout + 0.1)
609 self._watchmanclient.settimeout(self._fsmonitorstate.timeout + 0.1)
614 startclock = self._watchmanclient.getcurrentclock()
610 startclock = self._watchmanclient.getcurrentclock()
615 except Exception as ex:
611 except Exception as ex:
616 self._watchmanclient.clearconnection()
612 self._watchmanclient.clearconnection()
617 _handleunavailable(self.ui, self._fsmonitorstate, ex)
613 _handleunavailable(self.ui, self._fsmonitorstate, ex)
618 # boo, Watchman failed. bail
614 # boo, Watchman failed. bail
619 return orig(
615 return orig(
620 node1,
616 node1,
621 node2,
617 node2,
622 match,
618 match,
623 listignored,
619 listignored,
624 listclean,
620 listclean,
625 listunknown,
621 listunknown,
626 listsubrepos,
622 listsubrepos,
627 )
623 )
628
624
629 if updatestate:
625 if updatestate:
630 # We need info about unknown files. This may make things slower the
626 # We need info about unknown files. This may make things slower the
631 # first time, but whatever.
627 # first time, but whatever.
632 stateunknown = True
628 stateunknown = True
633 else:
629 else:
634 stateunknown = listunknown
630 stateunknown = listunknown
635
631
636 if updatestate:
632 if updatestate:
637 ps = poststatus(startclock)
633 ps = poststatus(startclock)
638 self.addpostdsstatus(ps)
634 self.addpostdsstatus(ps)
639
635
640 r = orig(
636 r = orig(
641 node1, node2, match, listignored, listclean, stateunknown, listsubrepos
637 node1, node2, match, listignored, listclean, stateunknown, listsubrepos
642 )
638 )
643 modified, added, removed, deleted, unknown, ignored, clean = r
639 modified, added, removed, deleted, unknown, ignored, clean = r
644
640
645 if not listunknown:
641 if not listunknown:
646 unknown = []
642 unknown = []
647
643
648 # don't do paranoid checks if we're not going to query Watchman anyway
644 # don't do paranoid checks if we're not going to query Watchman anyway
649 full = listclean or match.traversedir is not None
645 full = listclean or match.traversedir is not None
650 if self._fsmonitorstate.mode == b'paranoid' and not full:
646 if self._fsmonitorstate.mode == b'paranoid' and not full:
651 # run status again and fall back to the old walk this time
647 # run status again and fall back to the old walk this time
652 self.dirstate._fsmonitordisable = True
648 self.dirstate._fsmonitordisable = True
653
649
654 # shut the UI up
650 # shut the UI up
655 quiet = self.ui.quiet
651 quiet = self.ui.quiet
656 self.ui.quiet = True
652 self.ui.quiet = True
657 fout, ferr = self.ui.fout, self.ui.ferr
653 fout, ferr = self.ui.fout, self.ui.ferr
658 self.ui.fout = self.ui.ferr = open(os.devnull, b'wb')
654 self.ui.fout = self.ui.ferr = open(os.devnull, b'wb')
659
655
660 try:
656 try:
661 rv2 = orig(
657 rv2 = orig(
662 node1,
658 node1,
663 node2,
659 node2,
664 match,
660 match,
665 listignored,
661 listignored,
666 listclean,
662 listclean,
667 listunknown,
663 listunknown,
668 listsubrepos,
664 listsubrepos,
669 )
665 )
670 finally:
666 finally:
671 self.dirstate._fsmonitordisable = False
667 self.dirstate._fsmonitordisable = False
672 self.ui.quiet = quiet
668 self.ui.quiet = quiet
673 self.ui.fout, self.ui.ferr = fout, ferr
669 self.ui.fout, self.ui.ferr = fout, ferr
674
670
675 # clean isn't tested since it's set to True above
671 # clean isn't tested since it's set to True above
676 with self.wlock():
672 with self.wlock():
677 _cmpsets(
673 _cmpsets(
678 [modified, added, removed, deleted, unknown, ignored, clean],
674 [modified, added, removed, deleted, unknown, ignored, clean],
679 rv2,
675 rv2,
680 )
676 )
681 modified, added, removed, deleted, unknown, ignored, clean = rv2
677 modified, added, removed, deleted, unknown, ignored, clean = rv2
682
678
683 return scmutil.status(
679 return scmutil.status(
684 modified, added, removed, deleted, unknown, ignored, clean
680 modified, added, removed, deleted, unknown, ignored, clean
685 )
681 )
686
682
687
683
688 class poststatus(object):
684 class poststatus(object):
689 def __init__(self, startclock):
685 def __init__(self, startclock):
690 self._startclock = pycompat.sysbytes(startclock)
686 self._startclock = pycompat.sysbytes(startclock)
691
687
692 def __call__(self, wctx, status):
688 def __call__(self, wctx, status):
693 clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
689 clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
694 hashignore = _hashignore(wctx.repo().dirstate._ignore)
690 hashignore = _hashignore(wctx.repo().dirstate._ignore)
695 notefiles = (
691 notefiles = (
696 status.modified
692 status.modified
697 + status.added
693 + status.added
698 + status.removed
694 + status.removed
699 + status.deleted
695 + status.deleted
700 + status.unknown
696 + status.unknown
701 )
697 )
702 wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
698 wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
703
699
704
700
705 def makedirstate(repo, dirstate):
701 def makedirstate(repo, dirstate):
706 class fsmonitordirstate(dirstate.__class__):
702 class fsmonitordirstate(dirstate.__class__):
707 def _fsmonitorinit(self, repo):
703 def _fsmonitorinit(self, repo):
708 # _fsmonitordisable is used in paranoid mode
704 # _fsmonitordisable is used in paranoid mode
709 self._fsmonitordisable = False
705 self._fsmonitordisable = False
710 self._fsmonitorstate = repo._fsmonitorstate
706 self._fsmonitorstate = repo._fsmonitorstate
711 self._watchmanclient = repo._watchmanclient
707 self._watchmanclient = repo._watchmanclient
712 self._repo = weakref.proxy(repo)
708 self._repo = weakref.proxy(repo)
713
709
714 def walk(self, *args, **kwargs):
710 def walk(self, *args, **kwargs):
715 orig = super(fsmonitordirstate, self).walk
711 orig = super(fsmonitordirstate, self).walk
716 if self._fsmonitordisable:
712 if self._fsmonitordisable:
717 return orig(*args, **kwargs)
713 return orig(*args, **kwargs)
718 return overridewalk(orig, self, *args, **kwargs)
714 return overridewalk(orig, self, *args, **kwargs)
719
715
720 def rebuild(self, *args, **kwargs):
716 def rebuild(self, *args, **kwargs):
721 self._fsmonitorstate.invalidate()
717 self._fsmonitorstate.invalidate()
722 return super(fsmonitordirstate, self).rebuild(*args, **kwargs)
718 return super(fsmonitordirstate, self).rebuild(*args, **kwargs)
723
719
724 def invalidate(self, *args, **kwargs):
720 def invalidate(self, *args, **kwargs):
725 self._fsmonitorstate.invalidate()
721 self._fsmonitorstate.invalidate()
726 return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
722 return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
727
723
728 dirstate.__class__ = fsmonitordirstate
724 dirstate.__class__ = fsmonitordirstate
729 dirstate._fsmonitorinit(repo)
725 dirstate._fsmonitorinit(repo)
730
726
731
727
732 def wrapdirstate(orig, self):
728 def wrapdirstate(orig, self):
733 ds = orig(self)
729 ds = orig(self)
734 # only override the dirstate when Watchman is available for the repo
730 # only override the dirstate when Watchman is available for the repo
735 if util.safehasattr(self, b'_fsmonitorstate'):
731 if util.safehasattr(self, b'_fsmonitorstate'):
736 makedirstate(self, ds)
732 makedirstate(self, ds)
737 return ds
733 return ds
738
734
739
735
740 def extsetup(ui):
736 def extsetup(ui):
741 extensions.wrapfilecache(
737 extensions.wrapfilecache(
742 localrepo.localrepository, b'dirstate', wrapdirstate
738 localrepo.localrepository, b'dirstate', wrapdirstate
743 )
739 )
744 if pycompat.isdarwin:
740 if pycompat.isdarwin:
745 # An assist for avoiding the dangling-symlink fsevents bug
741 # An assist for avoiding the dangling-symlink fsevents bug
746 extensions.wrapfunction(os, b'symlink', wrapsymlink)
742 extensions.wrapfunction(os, b'symlink', wrapsymlink)
747
743
748 extensions.wrapfunction(merge, b'_update', wrapupdate)
744 extensions.wrapfunction(merge, b'_update', wrapupdate)
749
745
750
746
751 def wrapsymlink(orig, source, link_name):
747 def wrapsymlink(orig, source, link_name):
752 """if we create a dangling symlink, also touch the parent dir
748 """if we create a dangling symlink, also touch the parent dir
753 to encourage fsevents notifications to work more correctly"""
749 to encourage fsevents notifications to work more correctly"""
754 try:
750 try:
755 return orig(source, link_name)
751 return orig(source, link_name)
756 finally:
752 finally:
757 try:
753 try:
758 os.utime(os.path.dirname(link_name), None)
754 os.utime(os.path.dirname(link_name), None)
759 except OSError:
755 except OSError:
760 pass
756 pass
761
757
762
758
763 class state_update(object):
759 class state_update(object):
764 """This context manager is responsible for dispatching the state-enter
760 """This context manager is responsible for dispatching the state-enter
765 and state-leave signals to the watchman service. The enter and leave
761 and state-leave signals to the watchman service. The enter and leave
766 methods can be invoked manually (for scenarios where context manager
762 methods can be invoked manually (for scenarios where context manager
767 semantics are not possible). If parameters oldnode and newnode are None,
763 semantics are not possible). If parameters oldnode and newnode are None,
768 they will be populated based on current working copy in enter and
764 they will be populated based on current working copy in enter and
769 leave, respectively. Similarly, if the distance is none, it will be
765 leave, respectively. Similarly, if the distance is none, it will be
770 calculated based on the oldnode and newnode in the leave method."""
766 calculated based on the oldnode and newnode in the leave method."""
771
767
772 def __init__(
768 def __init__(
773 self,
769 self,
774 repo,
770 repo,
775 name,
771 name,
776 oldnode=None,
772 oldnode=None,
777 newnode=None,
773 newnode=None,
778 distance=None,
774 distance=None,
779 partial=False,
775 partial=False,
780 ):
776 ):
781 self.repo = repo.unfiltered()
777 self.repo = repo.unfiltered()
782 self.name = name
778 self.name = name
783 self.oldnode = oldnode
779 self.oldnode = oldnode
784 self.newnode = newnode
780 self.newnode = newnode
785 self.distance = distance
781 self.distance = distance
786 self.partial = partial
782 self.partial = partial
787 self._lock = None
783 self._lock = None
788 self.need_leave = False
784 self.need_leave = False
789
785
790 def __enter__(self):
786 def __enter__(self):
791 self.enter()
787 self.enter()
792
788
793 def enter(self):
789 def enter(self):
794 # Make sure we have a wlock prior to sending notifications to watchman.
790 # Make sure we have a wlock prior to sending notifications to watchman.
795 # We don't want to race with other actors. In the update case,
791 # We don't want to race with other actors. In the update case,
796 # merge.update is going to take the wlock almost immediately. We are
792 # merge.update is going to take the wlock almost immediately. We are
797 # effectively extending the lock around several short sanity checks.
793 # effectively extending the lock around several short sanity checks.
798 if self.oldnode is None:
794 if self.oldnode is None:
799 self.oldnode = self.repo[b'.'].node()
795 self.oldnode = self.repo[b'.'].node()
800
796
801 if self.repo.currentwlock() is None:
797 if self.repo.currentwlock() is None:
802 if util.safehasattr(self.repo, b'wlocknostateupdate'):
798 if util.safehasattr(self.repo, b'wlocknostateupdate'):
803 self._lock = self.repo.wlocknostateupdate()
799 self._lock = self.repo.wlocknostateupdate()
804 else:
800 else:
805 self._lock = self.repo.wlock()
801 self._lock = self.repo.wlock()
806 self.need_leave = self._state(b'state-enter', hex(self.oldnode))
802 self.need_leave = self._state(b'state-enter', hex(self.oldnode))
807 return self
803 return self
808
804
809 def __exit__(self, type_, value, tb):
805 def __exit__(self, type_, value, tb):
810 abort = True if type_ else False
806 abort = True if type_ else False
811 self.exit(abort=abort)
807 self.exit(abort=abort)
812
808
813 def exit(self, abort=False):
809 def exit(self, abort=False):
814 try:
810 try:
815 if self.need_leave:
811 if self.need_leave:
816 status = b'failed' if abort else b'ok'
812 status = b'failed' if abort else b'ok'
817 if self.newnode is None:
813 if self.newnode is None:
818 self.newnode = self.repo[b'.'].node()
814 self.newnode = self.repo[b'.'].node()
819 if self.distance is None:
815 if self.distance is None:
820 self.distance = calcdistance(
816 self.distance = calcdistance(
821 self.repo, self.oldnode, self.newnode
817 self.repo, self.oldnode, self.newnode
822 )
818 )
823 self._state(b'state-leave', hex(self.newnode), status=status)
819 self._state(b'state-leave', hex(self.newnode), status=status)
824 finally:
820 finally:
825 self.need_leave = False
821 self.need_leave = False
826 if self._lock:
822 if self._lock:
827 self._lock.release()
823 self._lock.release()
828
824
829 def _state(self, cmd, commithash, status=b'ok'):
825 def _state(self, cmd, commithash, status=b'ok'):
830 if not util.safehasattr(self.repo, b'_watchmanclient'):
826 if not util.safehasattr(self.repo, b'_watchmanclient'):
831 return False
827 return False
832 try:
828 try:
833 self.repo._watchmanclient.command(
829 self.repo._watchmanclient.command(
834 cmd,
830 cmd,
835 {
831 {
836 b'name': self.name,
832 b'name': self.name,
837 b'metadata': {
833 b'metadata': {
838 # the target revision
834 # the target revision
839 b'rev': commithash,
835 b'rev': commithash,
840 # approximate number of commits between current and target
836 # approximate number of commits between current and target
841 b'distance': self.distance if self.distance else 0,
837 b'distance': self.distance if self.distance else 0,
842 # success/failure (only really meaningful for state-leave)
838 # success/failure (only really meaningful for state-leave)
843 b'status': status,
839 b'status': status,
844 # whether the working copy parent is changing
840 # whether the working copy parent is changing
845 b'partial': self.partial,
841 b'partial': self.partial,
846 },
842 },
847 },
843 },
848 )
844 )
849 return True
845 return True
850 except Exception as e:
846 except Exception as e:
851 # Swallow any errors; fire and forget
847 # Swallow any errors; fire and forget
852 self.repo.ui.log(
848 self.repo.ui.log(
853 b'watchman', b'Exception %s while running %s\n', e, cmd
849 b'watchman', b'Exception %s while running %s\n', e, cmd
854 )
850 )
855 return False
851 return False
856
852
857
853
858 # Estimate the distance between two nodes
854 # Estimate the distance between two nodes
859 def calcdistance(repo, oldnode, newnode):
855 def calcdistance(repo, oldnode, newnode):
860 anc = repo.changelog.ancestor(oldnode, newnode)
856 anc = repo.changelog.ancestor(oldnode, newnode)
861 ancrev = repo[anc].rev()
857 ancrev = repo[anc].rev()
862 distance = abs(repo[oldnode].rev() - ancrev) + abs(
858 distance = abs(repo[oldnode].rev() - ancrev) + abs(
863 repo[newnode].rev() - ancrev
859 repo[newnode].rev() - ancrev
864 )
860 )
865 return distance
861 return distance
866
862
867
863
868 # Bracket working copy updates with calls to the watchman state-enter
864 # Bracket working copy updates with calls to the watchman state-enter
869 # and state-leave commands. This allows clients to perform more intelligent
865 # and state-leave commands. This allows clients to perform more intelligent
870 # settling during bulk file change scenarios
866 # settling during bulk file change scenarios
871 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
867 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
872 def wrapupdate(
868 def wrapupdate(
873 orig,
869 orig,
874 repo,
870 repo,
875 node,
871 node,
876 branchmerge,
872 branchmerge,
877 force,
873 force,
878 ancestor=None,
874 ancestor=None,
879 mergeancestor=False,
875 mergeancestor=False,
880 labels=None,
876 labels=None,
881 matcher=None,
877 matcher=None,
882 **kwargs
878 **kwargs
883 ):
879 ):
884
880
885 distance = 0
881 distance = 0
886 partial = True
882 partial = True
887 oldnode = repo[b'.'].node()
883 oldnode = repo[b'.'].node()
888 newnode = repo[node].node()
884 newnode = repo[node].node()
889 if matcher is None or matcher.always():
885 if matcher is None or matcher.always():
890 partial = False
886 partial = False
891 distance = calcdistance(repo.unfiltered(), oldnode, newnode)
887 distance = calcdistance(repo.unfiltered(), oldnode, newnode)
892
888
893 with state_update(
889 with state_update(
894 repo,
890 repo,
895 name=b"hg.update",
891 name=b"hg.update",
896 oldnode=oldnode,
892 oldnode=oldnode,
897 newnode=newnode,
893 newnode=newnode,
898 distance=distance,
894 distance=distance,
899 partial=partial,
895 partial=partial,
900 ):
896 ):
901 return orig(
897 return orig(
902 repo,
898 repo,
903 node,
899 node,
904 branchmerge,
900 branchmerge,
905 force,
901 force,
906 ancestor,
902 ancestor,
907 mergeancestor,
903 mergeancestor,
908 labels,
904 labels,
909 matcher,
905 matcher,
910 **kwargs
906 **kwargs
911 )
907 )
912
908
913
909
914 def repo_has_depth_one_nested_repo(repo):
910 def repo_has_depth_one_nested_repo(repo):
915 for f in repo.wvfs.listdir():
911 for f in repo.wvfs.listdir():
916 if os.path.isdir(os.path.join(repo.root, f, b'.hg')):
912 if os.path.isdir(os.path.join(repo.root, f, b'.hg')):
917 msg = b'fsmonitor: sub-repository %r detected, fsmonitor disabled\n'
913 msg = b'fsmonitor: sub-repository %r detected, fsmonitor disabled\n'
918 repo.ui.debug(msg % f)
914 repo.ui.debug(msg % f)
919 return True
915 return True
920 return False
916 return False
921
917
922
918
923 def reposetup(ui, repo):
919 def reposetup(ui, repo):
924 # We don't work with largefiles or inotify
920 # We don't work with largefiles or inotify
925 exts = extensions.enabled()
921 exts = extensions.enabled()
926 for ext in _blacklist:
922 for ext in _blacklist:
927 if ext in exts:
923 if ext in exts:
928 ui.warn(
924 ui.warn(
929 _(
925 _(
930 b'The fsmonitor extension is incompatible with the %s '
926 b'The fsmonitor extension is incompatible with the %s '
931 b'extension and has been disabled.\n'
927 b'extension and has been disabled.\n'
932 )
928 )
933 % ext
929 % ext
934 )
930 )
935 return
931 return
936
932
937 if repo.local():
933 if repo.local():
938 # We don't work with subrepos either.
934 # We don't work with subrepos either.
939 #
935 #
940 # if repo[None].substate can cause a dirstate parse, which is too
936 # if repo[None].substate can cause a dirstate parse, which is too
941 # slow. Instead, look for a file called hgsubstate,
937 # slow. Instead, look for a file called hgsubstate,
942 if repo.wvfs.exists(b'.hgsubstate') or repo.wvfs.exists(b'.hgsub'):
938 if repo.wvfs.exists(b'.hgsubstate') or repo.wvfs.exists(b'.hgsub'):
943 return
939 return
944
940
945 if repo_has_depth_one_nested_repo(repo):
941 if repo_has_depth_one_nested_repo(repo):
946 return
942 return
947
943
948 fsmonitorstate = state.state(repo)
944 fsmonitorstate = state.state(repo)
949 if fsmonitorstate.mode == b'off':
945 if fsmonitorstate.mode == b'off':
950 return
946 return
951
947
952 try:
948 try:
953 client = watchmanclient.client(repo.ui, repo.root)
949 client = watchmanclient.client(repo.ui, repo.root)
954 except Exception as ex:
950 except Exception as ex:
955 _handleunavailable(ui, fsmonitorstate, ex)
951 _handleunavailable(ui, fsmonitorstate, ex)
956 return
952 return
957
953
958 repo._fsmonitorstate = fsmonitorstate
954 repo._fsmonitorstate = fsmonitorstate
959 repo._watchmanclient = client
955 repo._watchmanclient = client
960
956
961 dirstate, cached = localrepo.isfilecached(repo, b'dirstate')
957 dirstate, cached = localrepo.isfilecached(repo, b'dirstate')
962 if cached:
958 if cached:
963 # at this point since fsmonitorstate wasn't present,
959 # at this point since fsmonitorstate wasn't present,
964 # repo.dirstate is not a fsmonitordirstate
960 # repo.dirstate is not a fsmonitordirstate
965 makedirstate(repo, dirstate)
961 makedirstate(repo, dirstate)
966
962
967 class fsmonitorrepo(repo.__class__):
963 class fsmonitorrepo(repo.__class__):
968 def status(self, *args, **kwargs):
964 def status(self, *args, **kwargs):
969 orig = super(fsmonitorrepo, self).status
965 orig = super(fsmonitorrepo, self).status
970 return overridestatus(orig, self, *args, **kwargs)
966 return overridestatus(orig, self, *args, **kwargs)
971
967
972 def wlocknostateupdate(self, *args, **kwargs):
968 def wlocknostateupdate(self, *args, **kwargs):
973 return super(fsmonitorrepo, self).wlock(*args, **kwargs)
969 return super(fsmonitorrepo, self).wlock(*args, **kwargs)
974
970
975 def wlock(self, *args, **kwargs):
971 def wlock(self, *args, **kwargs):
976 l = super(fsmonitorrepo, self).wlock(*args, **kwargs)
972 l = super(fsmonitorrepo, self).wlock(*args, **kwargs)
977 if not ui.configbool(
973 if not ui.configbool(
978 b"experimental", b"fsmonitor.transaction_notify"
974 b"experimental", b"fsmonitor.transaction_notify"
979 ):
975 ):
980 return l
976 return l
981 if l.held != 1:
977 if l.held != 1:
982 return l
978 return l
983 origrelease = l.releasefn
979 origrelease = l.releasefn
984
980
985 def staterelease():
981 def staterelease():
986 if origrelease:
982 if origrelease:
987 origrelease()
983 origrelease()
988 if l.stateupdate:
984 if l.stateupdate:
989 l.stateupdate.exit()
985 l.stateupdate.exit()
990 l.stateupdate = None
986 l.stateupdate = None
991
987
992 try:
988 try:
993 l.stateupdate = None
989 l.stateupdate = None
994 l.stateupdate = state_update(self, name=b"hg.transaction")
990 l.stateupdate = state_update(self, name=b"hg.transaction")
995 l.stateupdate.enter()
991 l.stateupdate.enter()
996 l.releasefn = staterelease
992 l.releasefn = staterelease
997 except Exception as e:
993 except Exception as e:
998 # Swallow any errors; fire and forget
994 # Swallow any errors; fire and forget
999 self.ui.log(
995 self.ui.log(
1000 b'watchman', b'Exception in state update %s\n', e
996 b'watchman', b'Exception in state update %s\n', e
1001 )
997 )
1002 return l
998 return l
1003
999
1004 repo.__class__ = fsmonitorrepo
1000 repo.__class__ = fsmonitorrepo
@@ -1,1269 +1,1269 b''
1 # githelp.py - Try to map Git commands to Mercurial equivalents.
1 # githelp.py - Try to map Git commands to Mercurial equivalents.
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """try mapping git commands to Mercurial commands
7 """try mapping git commands to Mercurial commands
8
8
9 Tries to map a given git command to a Mercurial command:
9 Tries to map a given git command to a Mercurial command:
10
10
11 $ hg githelp -- git checkout master
11 $ hg githelp -- git checkout master
12 hg update master
12 hg update master
13
13
14 If an unknown command or parameter combination is detected, an error is
14 If an unknown command or parameter combination is detected, an error is
15 produced.
15 produced.
16 """
16 """
17
17
18
18
19 import getopt
19 import getopt
20 import re
20 import re
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial import (
23 from mercurial import (
24 encoding,
24 encoding,
25 error,
25 error,
26 fancyopts,
26 fancyopts,
27 pycompat,
27 pycompat,
28 registrar,
28 registrar,
29 scmutil,
29 scmutil,
30 )
30 )
31 from mercurial.utils import procutil
31 from mercurial.utils import procutil
32
32
33 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
33 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
34 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
34 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
35 # be specifying the version(s) of Mercurial they are tested with, or
35 # be specifying the version(s) of Mercurial they are tested with, or
36 # leave the attribute unspecified.
36 # leave the attribute unspecified.
37 testedwith = b'ships-with-hg-core'
37 testedwith = b'ships-with-hg-core'
38
38
39 cmdtable = {}
39 cmdtable = {}
40 command = registrar.command(cmdtable)
40 command = registrar.command(cmdtable)
41
41
42
42
43 def convert(s):
43 def convert(s):
44 if s.startswith(b"origin/"):
44 if s.startswith(b"origin/"):
45 return s[7:]
45 return s[7:]
46 if b'HEAD' in s:
46 if b'HEAD' in s:
47 s = s.replace(b'HEAD', b'.')
47 s = s.replace(b'HEAD', b'.')
48 # HEAD~ in git is .~1 in mercurial
48 # HEAD~ in git is .~1 in mercurial
49 s = re.sub(b'~$', b'~1', s)
49 s = re.sub(b'~$', b'~1', s)
50 return s
50 return s
51
51
52
52
53 @command(
53 @command(
54 b'githelp|git',
54 b'githelp|git',
55 [],
55 [],
56 _(b'hg githelp'),
56 _(b'hg githelp'),
57 helpcategory=command.CATEGORY_HELP,
57 helpcategory=command.CATEGORY_HELP,
58 helpbasic=True,
58 helpbasic=True,
59 )
59 )
60 def githelp(ui, repo, *args, **kwargs):
60 def githelp(ui, repo, *args, **kwargs):
61 """suggests the Mercurial equivalent of the given git command
61 """suggests the Mercurial equivalent of the given git command
62
62
63 Usage: hg githelp -- <git command>
63 Usage: hg githelp -- <git command>
64 """
64 """
65
65
66 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
66 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
67 raise error.Abort(
67 raise error.Abort(
68 _(b'missing git command - usage: hg githelp -- <git command>')
68 _(b'missing git command - usage: hg githelp -- <git command>')
69 )
69 )
70
70
71 if args[0] == b'git':
71 if args[0] == b'git':
72 args = args[1:]
72 args = args[1:]
73
73
74 cmd = args[0]
74 cmd = args[0]
75 if not cmd in gitcommands:
75 if not cmd in gitcommands:
76 raise error.Abort(_(b"error: unknown git command %s") % cmd)
76 raise error.Abort(_(b"error: unknown git command %s") % cmd)
77
77
78 ui.pager(b'githelp')
78 ui.pager(b'githelp')
79 args = args[1:]
79 args = args[1:]
80 return gitcommands[cmd](ui, repo, *args, **kwargs)
80 return gitcommands[cmd](ui, repo, *args, **kwargs)
81
81
82
82
83 def parseoptions(ui, cmdoptions, args):
83 def parseoptions(ui, cmdoptions, args):
84 cmdoptions = list(cmdoptions)
84 cmdoptions = list(cmdoptions)
85 opts = {}
85 opts = {}
86 args = list(args)
86 args = list(args)
87 while True:
87 while True:
88 try:
88 try:
89 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
89 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
90 break
90 break
91 except getopt.GetoptError as ex:
91 except getopt.GetoptError as ex:
92 if "requires argument" in ex.msg:
92 if "requires argument" in ex.msg:
93 raise
93 raise
94 if ('--' + ex.opt) in ex.msg:
94 if ('--' + ex.opt) in ex.msg:
95 flag = b'--' + pycompat.bytestr(ex.opt)
95 flag = b'--' + pycompat.bytestr(ex.opt)
96 elif ('-' + ex.opt) in ex.msg:
96 elif ('-' + ex.opt) in ex.msg:
97 flag = b'-' + pycompat.bytestr(ex.opt)
97 flag = b'-' + pycompat.bytestr(ex.opt)
98 else:
98 else:
99 raise error.Abort(
99 raise error.Abort(
100 _(b"unknown option %s") % pycompat.bytestr(ex.opt)
100 _(b"unknown option %s") % pycompat.bytestr(ex.opt)
101 )
101 )
102 try:
102 try:
103 args.remove(flag)
103 args.remove(flag)
104 except Exception:
104 except Exception:
105 msg = _(b"unknown option '%s' packed with other options")
105 msg = _(b"unknown option '%s' packed with other options")
106 hint = _(b"please try passing the option as its own flag: -%s")
106 hint = _(b"please try passing the option as its own flag: -%s")
107 raise error.Abort(
107 raise error.Abort(
108 msg % pycompat.bytestr(ex.opt),
108 msg % pycompat.bytestr(ex.opt),
109 hint=hint % pycompat.bytestr(ex.opt),
109 hint=hint % pycompat.bytestr(ex.opt),
110 )
110 )
111
111
112 ui.warn(_(b"ignoring unknown option %s\n") % flag)
112 ui.warn(_(b"ignoring unknown option %s\n") % flag)
113
113
114 args = list([convert(x) for x in args])
114 args = list([convert(x) for x in args])
115 opts = dict(
115 opts = dict(
116 [
116 [
117 (k, convert(v)) if isinstance(v, bytes) else (k, v)
117 (k, convert(v)) if isinstance(v, bytes) else (k, v)
118 for k, v in pycompat.iteritems(opts)
118 for k, v in opts.items()
119 ]
119 ]
120 )
120 )
121
121
122 return args, opts
122 return args, opts
123
123
124
124
125 class Command(object):
125 class Command(object):
126 def __init__(self, name):
126 def __init__(self, name):
127 self.name = name
127 self.name = name
128 self.args = []
128 self.args = []
129 self.opts = {}
129 self.opts = {}
130
130
131 def __bytes__(self):
131 def __bytes__(self):
132 cmd = b"hg " + self.name
132 cmd = b"hg " + self.name
133 if self.opts:
133 if self.opts:
134 for k, values in sorted(pycompat.iteritems(self.opts)):
134 for k, values in sorted(self.opts.items()):
135 for v in values:
135 for v in values:
136 if v:
136 if v:
137 if isinstance(v, int):
137 if isinstance(v, int):
138 fmt = b' %s %d'
138 fmt = b' %s %d'
139 else:
139 else:
140 fmt = b' %s %s'
140 fmt = b' %s %s'
141
141
142 cmd += fmt % (k, v)
142 cmd += fmt % (k, v)
143 else:
143 else:
144 cmd += b" %s" % (k,)
144 cmd += b" %s" % (k,)
145 if self.args:
145 if self.args:
146 cmd += b" "
146 cmd += b" "
147 cmd += b" ".join(self.args)
147 cmd += b" ".join(self.args)
148 return cmd
148 return cmd
149
149
150 __str__ = encoding.strmethod(__bytes__)
150 __str__ = encoding.strmethod(__bytes__)
151
151
152 def append(self, value):
152 def append(self, value):
153 self.args.append(value)
153 self.args.append(value)
154
154
155 def extend(self, values):
155 def extend(self, values):
156 self.args.extend(values)
156 self.args.extend(values)
157
157
158 def __setitem__(self, key, value):
158 def __setitem__(self, key, value):
159 values = self.opts.setdefault(key, [])
159 values = self.opts.setdefault(key, [])
160 values.append(value)
160 values.append(value)
161
161
162 def __and__(self, other):
162 def __and__(self, other):
163 return AndCommand(self, other)
163 return AndCommand(self, other)
164
164
165
165
166 class AndCommand(object):
166 class AndCommand(object):
167 def __init__(self, left, right):
167 def __init__(self, left, right):
168 self.left = left
168 self.left = left
169 self.right = right
169 self.right = right
170
170
171 def __str__(self):
171 def __str__(self):
172 return b"%s && %s" % (self.left, self.right)
172 return b"%s && %s" % (self.left, self.right)
173
173
174 def __and__(self, other):
174 def __and__(self, other):
175 return AndCommand(self, other)
175 return AndCommand(self, other)
176
176
177
177
178 def add(ui, repo, *args, **kwargs):
178 def add(ui, repo, *args, **kwargs):
179 cmdoptions = [
179 cmdoptions = [
180 (b'A', b'all', None, b''),
180 (b'A', b'all', None, b''),
181 (b'p', b'patch', None, b''),
181 (b'p', b'patch', None, b''),
182 ]
182 ]
183 args, opts = parseoptions(ui, cmdoptions, args)
183 args, opts = parseoptions(ui, cmdoptions, args)
184
184
185 if opts.get(b'patch'):
185 if opts.get(b'patch'):
186 ui.status(
186 ui.status(
187 _(
187 _(
188 b"note: Mercurial will commit when complete, "
188 b"note: Mercurial will commit when complete, "
189 b"as there is no staging area in Mercurial\n\n"
189 b"as there is no staging area in Mercurial\n\n"
190 )
190 )
191 )
191 )
192 cmd = Command(b'commit --interactive')
192 cmd = Command(b'commit --interactive')
193 else:
193 else:
194 cmd = Command(b"add")
194 cmd = Command(b"add")
195
195
196 if not opts.get(b'all'):
196 if not opts.get(b'all'):
197 cmd.extend(args)
197 cmd.extend(args)
198 else:
198 else:
199 ui.status(
199 ui.status(
200 _(
200 _(
201 b"note: use hg addremove to remove files that have "
201 b"note: use hg addremove to remove files that have "
202 b"been deleted\n\n"
202 b"been deleted\n\n"
203 )
203 )
204 )
204 )
205
205
206 ui.status((bytes(cmd)), b"\n")
206 ui.status((bytes(cmd)), b"\n")
207
207
208
208
209 def am(ui, repo, *args, **kwargs):
209 def am(ui, repo, *args, **kwargs):
210 cmdoptions = []
210 cmdoptions = []
211 parseoptions(ui, cmdoptions, args)
211 parseoptions(ui, cmdoptions, args)
212 cmd = Command(b'import')
212 cmd = Command(b'import')
213 ui.status(bytes(cmd), b"\n")
213 ui.status(bytes(cmd), b"\n")
214
214
215
215
216 def apply(ui, repo, *args, **kwargs):
216 def apply(ui, repo, *args, **kwargs):
217 cmdoptions = [
217 cmdoptions = [
218 (b'p', b'p', int, b''),
218 (b'p', b'p', int, b''),
219 (b'', b'directory', b'', b''),
219 (b'', b'directory', b'', b''),
220 ]
220 ]
221 args, opts = parseoptions(ui, cmdoptions, args)
221 args, opts = parseoptions(ui, cmdoptions, args)
222
222
223 cmd = Command(b'import --no-commit')
223 cmd = Command(b'import --no-commit')
224 if opts.get(b'p'):
224 if opts.get(b'p'):
225 cmd[b'-p'] = opts.get(b'p')
225 cmd[b'-p'] = opts.get(b'p')
226 if opts.get(b'directory'):
226 if opts.get(b'directory'):
227 cmd[b'--prefix'] = opts.get(b'directory')
227 cmd[b'--prefix'] = opts.get(b'directory')
228 cmd.extend(args)
228 cmd.extend(args)
229
229
230 ui.status((bytes(cmd)), b"\n")
230 ui.status((bytes(cmd)), b"\n")
231
231
232
232
233 def bisect(ui, repo, *args, **kwargs):
233 def bisect(ui, repo, *args, **kwargs):
234 ui.status(_(b"see 'hg help bisect' for how to use bisect\n\n"))
234 ui.status(_(b"see 'hg help bisect' for how to use bisect\n\n"))
235
235
236
236
237 def blame(ui, repo, *args, **kwargs):
237 def blame(ui, repo, *args, **kwargs):
238 cmdoptions = []
238 cmdoptions = []
239 args, opts = parseoptions(ui, cmdoptions, args)
239 args, opts = parseoptions(ui, cmdoptions, args)
240 cmd = Command(b'annotate -udl')
240 cmd = Command(b'annotate -udl')
241 cmd.extend([convert(v) for v in args])
241 cmd.extend([convert(v) for v in args])
242 ui.status((bytes(cmd)), b"\n")
242 ui.status((bytes(cmd)), b"\n")
243
243
244
244
245 def branch(ui, repo, *args, **kwargs):
245 def branch(ui, repo, *args, **kwargs):
246 cmdoptions = [
246 cmdoptions = [
247 (b'', b'set-upstream', None, b''),
247 (b'', b'set-upstream', None, b''),
248 (b'', b'set-upstream-to', b'', b''),
248 (b'', b'set-upstream-to', b'', b''),
249 (b'd', b'delete', None, b''),
249 (b'd', b'delete', None, b''),
250 (b'D', b'delete', None, b''),
250 (b'D', b'delete', None, b''),
251 (b'm', b'move', None, b''),
251 (b'm', b'move', None, b''),
252 (b'M', b'move', None, b''),
252 (b'M', b'move', None, b''),
253 ]
253 ]
254 args, opts = parseoptions(ui, cmdoptions, args)
254 args, opts = parseoptions(ui, cmdoptions, args)
255
255
256 cmd = Command(b"bookmark")
256 cmd = Command(b"bookmark")
257
257
258 if opts.get(b'set_upstream') or opts.get(b'set_upstream_to'):
258 if opts.get(b'set_upstream') or opts.get(b'set_upstream_to'):
259 ui.status(_(b"Mercurial has no concept of upstream branches\n"))
259 ui.status(_(b"Mercurial has no concept of upstream branches\n"))
260 return
260 return
261 elif opts.get(b'delete'):
261 elif opts.get(b'delete'):
262 cmd = Command(b"strip")
262 cmd = Command(b"strip")
263 for branch in args:
263 for branch in args:
264 cmd[b'-B'] = branch
264 cmd[b'-B'] = branch
265 else:
265 else:
266 cmd[b'-B'] = None
266 cmd[b'-B'] = None
267 elif opts.get(b'move'):
267 elif opts.get(b'move'):
268 if len(args) > 0:
268 if len(args) > 0:
269 if len(args) > 1:
269 if len(args) > 1:
270 old = args.pop(0)
270 old = args.pop(0)
271 else:
271 else:
272 # shell command to output the active bookmark for the active
272 # shell command to output the active bookmark for the active
273 # revision
273 # revision
274 old = b'`hg log -T"{activebookmark}" -r .`'
274 old = b'`hg log -T"{activebookmark}" -r .`'
275 else:
275 else:
276 raise error.Abort(_(b'missing newbranch argument'))
276 raise error.Abort(_(b'missing newbranch argument'))
277 new = args[0]
277 new = args[0]
278 cmd[b'-m'] = old
278 cmd[b'-m'] = old
279 cmd.append(new)
279 cmd.append(new)
280 else:
280 else:
281 if len(args) > 1:
281 if len(args) > 1:
282 cmd[b'-r'] = args[1]
282 cmd[b'-r'] = args[1]
283 cmd.append(args[0])
283 cmd.append(args[0])
284 elif len(args) == 1:
284 elif len(args) == 1:
285 cmd.append(args[0])
285 cmd.append(args[0])
286 ui.status((bytes(cmd)), b"\n")
286 ui.status((bytes(cmd)), b"\n")
287
287
288
288
289 def ispath(repo, string):
289 def ispath(repo, string):
290 """
290 """
291 The first argument to git checkout can either be a revision or a path. Let's
291 The first argument to git checkout can either be a revision or a path. Let's
292 generally assume it's a revision, unless it's obviously a path. There are
292 generally assume it's a revision, unless it's obviously a path. There are
293 too many ways to spell revisions in git for us to reasonably catch all of
293 too many ways to spell revisions in git for us to reasonably catch all of
294 them, so let's be conservative.
294 them, so let's be conservative.
295 """
295 """
296 if scmutil.isrevsymbol(repo, string):
296 if scmutil.isrevsymbol(repo, string):
297 # if it's definitely a revision let's not even check if a file of the
297 # if it's definitely a revision let's not even check if a file of the
298 # same name exists.
298 # same name exists.
299 return False
299 return False
300
300
301 cwd = repo.getcwd()
301 cwd = repo.getcwd()
302 if cwd == b'':
302 if cwd == b'':
303 repopath = string
303 repopath = string
304 else:
304 else:
305 repopath = cwd + b'/' + string
305 repopath = cwd + b'/' + string
306
306
307 exists = repo.wvfs.exists(repopath)
307 exists = repo.wvfs.exists(repopath)
308 if exists:
308 if exists:
309 return True
309 return True
310
310
311 manifest = repo[b'.'].manifest()
311 manifest = repo[b'.'].manifest()
312
312
313 didexist = (repopath in manifest) or manifest.hasdir(repopath)
313 didexist = (repopath in manifest) or manifest.hasdir(repopath)
314
314
315 return didexist
315 return didexist
316
316
317
317
318 def checkout(ui, repo, *args, **kwargs):
318 def checkout(ui, repo, *args, **kwargs):
319 cmdoptions = [
319 cmdoptions = [
320 (b'b', b'branch', b'', b''),
320 (b'b', b'branch', b'', b''),
321 (b'B', b'branch', b'', b''),
321 (b'B', b'branch', b'', b''),
322 (b'f', b'force', None, b''),
322 (b'f', b'force', None, b''),
323 (b'p', b'patch', None, b''),
323 (b'p', b'patch', None, b''),
324 ]
324 ]
325 paths = []
325 paths = []
326 if b'--' in args:
326 if b'--' in args:
327 sepindex = args.index(b'--')
327 sepindex = args.index(b'--')
328 paths.extend(args[sepindex + 1 :])
328 paths.extend(args[sepindex + 1 :])
329 args = args[:sepindex]
329 args = args[:sepindex]
330
330
331 args, opts = parseoptions(ui, cmdoptions, args)
331 args, opts = parseoptions(ui, cmdoptions, args)
332
332
333 rev = None
333 rev = None
334 if args and ispath(repo, args[0]):
334 if args and ispath(repo, args[0]):
335 paths = args + paths
335 paths = args + paths
336 elif args:
336 elif args:
337 rev = args[0]
337 rev = args[0]
338 paths = args[1:] + paths
338 paths = args[1:] + paths
339
339
340 cmd = Command(b'update')
340 cmd = Command(b'update')
341
341
342 if opts.get(b'force'):
342 if opts.get(b'force'):
343 if paths or rev:
343 if paths or rev:
344 cmd[b'-C'] = None
344 cmd[b'-C'] = None
345
345
346 if opts.get(b'patch'):
346 if opts.get(b'patch'):
347 cmd = Command(b'revert')
347 cmd = Command(b'revert')
348 cmd[b'-i'] = None
348 cmd[b'-i'] = None
349
349
350 if opts.get(b'branch'):
350 if opts.get(b'branch'):
351 if len(args) == 0:
351 if len(args) == 0:
352 cmd = Command(b'bookmark')
352 cmd = Command(b'bookmark')
353 cmd.append(opts.get(b'branch'))
353 cmd.append(opts.get(b'branch'))
354 else:
354 else:
355 cmd.append(args[0])
355 cmd.append(args[0])
356 bookcmd = Command(b'bookmark')
356 bookcmd = Command(b'bookmark')
357 bookcmd.append(opts.get(b'branch'))
357 bookcmd.append(opts.get(b'branch'))
358 cmd = cmd & bookcmd
358 cmd = cmd & bookcmd
359 # if there is any path argument supplied, use revert instead of update
359 # if there is any path argument supplied, use revert instead of update
360 elif len(paths) > 0:
360 elif len(paths) > 0:
361 ui.status(_(b"note: use --no-backup to avoid creating .orig files\n\n"))
361 ui.status(_(b"note: use --no-backup to avoid creating .orig files\n\n"))
362 cmd = Command(b'revert')
362 cmd = Command(b'revert')
363 if opts.get(b'patch'):
363 if opts.get(b'patch'):
364 cmd[b'-i'] = None
364 cmd[b'-i'] = None
365 if rev:
365 if rev:
366 cmd[b'-r'] = rev
366 cmd[b'-r'] = rev
367 cmd.extend(paths)
367 cmd.extend(paths)
368 elif rev:
368 elif rev:
369 if opts.get(b'patch'):
369 if opts.get(b'patch'):
370 cmd[b'-r'] = rev
370 cmd[b'-r'] = rev
371 else:
371 else:
372 cmd.append(rev)
372 cmd.append(rev)
373 elif opts.get(b'force'):
373 elif opts.get(b'force'):
374 cmd = Command(b'revert')
374 cmd = Command(b'revert')
375 cmd[b'--all'] = None
375 cmd[b'--all'] = None
376 else:
376 else:
377 raise error.Abort(_(b"a commit must be specified"))
377 raise error.Abort(_(b"a commit must be specified"))
378
378
379 ui.status((bytes(cmd)), b"\n")
379 ui.status((bytes(cmd)), b"\n")
380
380
381
381
382 def cherrypick(ui, repo, *args, **kwargs):
382 def cherrypick(ui, repo, *args, **kwargs):
383 cmdoptions = [
383 cmdoptions = [
384 (b'', b'continue', None, b''),
384 (b'', b'continue', None, b''),
385 (b'', b'abort', None, b''),
385 (b'', b'abort', None, b''),
386 (b'e', b'edit', None, b''),
386 (b'e', b'edit', None, b''),
387 ]
387 ]
388 args, opts = parseoptions(ui, cmdoptions, args)
388 args, opts = parseoptions(ui, cmdoptions, args)
389
389
390 cmd = Command(b'graft')
390 cmd = Command(b'graft')
391
391
392 if opts.get(b'edit'):
392 if opts.get(b'edit'):
393 cmd[b'--edit'] = None
393 cmd[b'--edit'] = None
394 if opts.get(b'continue'):
394 if opts.get(b'continue'):
395 cmd[b'--continue'] = None
395 cmd[b'--continue'] = None
396 elif opts.get(b'abort'):
396 elif opts.get(b'abort'):
397 ui.status(_(b"note: hg graft does not have --abort\n\n"))
397 ui.status(_(b"note: hg graft does not have --abort\n\n"))
398 return
398 return
399 else:
399 else:
400 cmd.extend(args)
400 cmd.extend(args)
401
401
402 ui.status((bytes(cmd)), b"\n")
402 ui.status((bytes(cmd)), b"\n")
403
403
404
404
405 def clean(ui, repo, *args, **kwargs):
405 def clean(ui, repo, *args, **kwargs):
406 cmdoptions = [
406 cmdoptions = [
407 (b'd', b'd', None, b''),
407 (b'd', b'd', None, b''),
408 (b'f', b'force', None, b''),
408 (b'f', b'force', None, b''),
409 (b'x', b'x', None, b''),
409 (b'x', b'x', None, b''),
410 ]
410 ]
411 args, opts = parseoptions(ui, cmdoptions, args)
411 args, opts = parseoptions(ui, cmdoptions, args)
412
412
413 cmd = Command(b'purge')
413 cmd = Command(b'purge')
414 if opts.get(b'x'):
414 if opts.get(b'x'):
415 cmd[b'--all'] = None
415 cmd[b'--all'] = None
416 cmd.extend(args)
416 cmd.extend(args)
417
417
418 ui.status((bytes(cmd)), b"\n")
418 ui.status((bytes(cmd)), b"\n")
419
419
420
420
421 def clone(ui, repo, *args, **kwargs):
421 def clone(ui, repo, *args, **kwargs):
422 cmdoptions = [
422 cmdoptions = [
423 (b'', b'bare', None, b''),
423 (b'', b'bare', None, b''),
424 (b'n', b'no-checkout', None, b''),
424 (b'n', b'no-checkout', None, b''),
425 (b'b', b'branch', b'', b''),
425 (b'b', b'branch', b'', b''),
426 ]
426 ]
427 args, opts = parseoptions(ui, cmdoptions, args)
427 args, opts = parseoptions(ui, cmdoptions, args)
428
428
429 if len(args) == 0:
429 if len(args) == 0:
430 raise error.Abort(_(b"a repository to clone must be specified"))
430 raise error.Abort(_(b"a repository to clone must be specified"))
431
431
432 cmd = Command(b'clone')
432 cmd = Command(b'clone')
433 cmd.append(args[0])
433 cmd.append(args[0])
434 if len(args) > 1:
434 if len(args) > 1:
435 cmd.append(args[1])
435 cmd.append(args[1])
436
436
437 if opts.get(b'bare'):
437 if opts.get(b'bare'):
438 cmd[b'-U'] = None
438 cmd[b'-U'] = None
439 ui.status(
439 ui.status(
440 _(
440 _(
441 b"note: Mercurial does not have bare clones. "
441 b"note: Mercurial does not have bare clones. "
442 b"-U will clone the repo without checking out a commit\n\n"
442 b"-U will clone the repo without checking out a commit\n\n"
443 )
443 )
444 )
444 )
445 elif opts.get(b'no_checkout'):
445 elif opts.get(b'no_checkout'):
446 cmd[b'-U'] = None
446 cmd[b'-U'] = None
447
447
448 if opts.get(b'branch'):
448 if opts.get(b'branch'):
449 cocmd = Command(b"update")
449 cocmd = Command(b"update")
450 cocmd.append(opts.get(b'branch'))
450 cocmd.append(opts.get(b'branch'))
451 cmd = cmd & cocmd
451 cmd = cmd & cocmd
452
452
453 ui.status((bytes(cmd)), b"\n")
453 ui.status((bytes(cmd)), b"\n")
454
454
455
455
456 def commit(ui, repo, *args, **kwargs):
456 def commit(ui, repo, *args, **kwargs):
457 cmdoptions = [
457 cmdoptions = [
458 (b'a', b'all', None, b''),
458 (b'a', b'all', None, b''),
459 (b'm', b'message', b'', b''),
459 (b'm', b'message', b'', b''),
460 (b'p', b'patch', None, b''),
460 (b'p', b'patch', None, b''),
461 (b'C', b'reuse-message', b'', b''),
461 (b'C', b'reuse-message', b'', b''),
462 (b'F', b'file', b'', b''),
462 (b'F', b'file', b'', b''),
463 (b'', b'author', b'', b''),
463 (b'', b'author', b'', b''),
464 (b'', b'date', b'', b''),
464 (b'', b'date', b'', b''),
465 (b'', b'amend', None, b''),
465 (b'', b'amend', None, b''),
466 (b'', b'no-edit', None, b''),
466 (b'', b'no-edit', None, b''),
467 ]
467 ]
468 args, opts = parseoptions(ui, cmdoptions, args)
468 args, opts = parseoptions(ui, cmdoptions, args)
469
469
470 cmd = Command(b'commit')
470 cmd = Command(b'commit')
471 if opts.get(b'patch'):
471 if opts.get(b'patch'):
472 cmd = Command(b'commit --interactive')
472 cmd = Command(b'commit --interactive')
473
473
474 if opts.get(b'amend'):
474 if opts.get(b'amend'):
475 if opts.get(b'no_edit'):
475 if opts.get(b'no_edit'):
476 cmd = Command(b'amend')
476 cmd = Command(b'amend')
477 else:
477 else:
478 cmd[b'--amend'] = None
478 cmd[b'--amend'] = None
479
479
480 if opts.get(b'reuse_message'):
480 if opts.get(b'reuse_message'):
481 cmd[b'-M'] = opts.get(b'reuse_message')
481 cmd[b'-M'] = opts.get(b'reuse_message')
482
482
483 if opts.get(b'message'):
483 if opts.get(b'message'):
484 cmd[b'-m'] = b"'%s'" % (opts.get(b'message'),)
484 cmd[b'-m'] = b"'%s'" % (opts.get(b'message'),)
485
485
486 if opts.get(b'all'):
486 if opts.get(b'all'):
487 ui.status(
487 ui.status(
488 _(
488 _(
489 b"note: Mercurial doesn't have a staging area, "
489 b"note: Mercurial doesn't have a staging area, "
490 b"so there is no --all. -A will add and remove files "
490 b"so there is no --all. -A will add and remove files "
491 b"for you though.\n\n"
491 b"for you though.\n\n"
492 )
492 )
493 )
493 )
494
494
495 if opts.get(b'file'):
495 if opts.get(b'file'):
496 cmd[b'-l'] = opts.get(b'file')
496 cmd[b'-l'] = opts.get(b'file')
497
497
498 if opts.get(b'author'):
498 if opts.get(b'author'):
499 cmd[b'-u'] = opts.get(b'author')
499 cmd[b'-u'] = opts.get(b'author')
500
500
501 if opts.get(b'date'):
501 if opts.get(b'date'):
502 cmd[b'-d'] = opts.get(b'date')
502 cmd[b'-d'] = opts.get(b'date')
503
503
504 cmd.extend(args)
504 cmd.extend(args)
505
505
506 ui.status((bytes(cmd)), b"\n")
506 ui.status((bytes(cmd)), b"\n")
507
507
508
508
509 def deprecated(ui, repo, *args, **kwargs):
509 def deprecated(ui, repo, *args, **kwargs):
510 ui.warn(
510 ui.warn(
511 _(
511 _(
512 b'this command has been deprecated in the git project, '
512 b'this command has been deprecated in the git project, '
513 b'thus isn\'t supported by this tool\n\n'
513 b'thus isn\'t supported by this tool\n\n'
514 )
514 )
515 )
515 )
516
516
517
517
518 def diff(ui, repo, *args, **kwargs):
518 def diff(ui, repo, *args, **kwargs):
519 cmdoptions = [
519 cmdoptions = [
520 (b'a', b'all', None, b''),
520 (b'a', b'all', None, b''),
521 (b'', b'cached', None, b''),
521 (b'', b'cached', None, b''),
522 (b'R', b'reverse', None, b''),
522 (b'R', b'reverse', None, b''),
523 ]
523 ]
524 args, opts = parseoptions(ui, cmdoptions, args)
524 args, opts = parseoptions(ui, cmdoptions, args)
525
525
526 cmd = Command(b'diff')
526 cmd = Command(b'diff')
527
527
528 if opts.get(b'cached'):
528 if opts.get(b'cached'):
529 ui.status(
529 ui.status(
530 _(
530 _(
531 b'note: Mercurial has no concept of a staging area, '
531 b'note: Mercurial has no concept of a staging area, '
532 b'so --cached does nothing\n\n'
532 b'so --cached does nothing\n\n'
533 )
533 )
534 )
534 )
535
535
536 if opts.get(b'reverse'):
536 if opts.get(b'reverse'):
537 cmd[b'--reverse'] = None
537 cmd[b'--reverse'] = None
538
538
539 for a in list(args):
539 for a in list(args):
540 args.remove(a)
540 args.remove(a)
541 try:
541 try:
542 repo.revs(a)
542 repo.revs(a)
543 cmd[b'-r'] = a
543 cmd[b'-r'] = a
544 except Exception:
544 except Exception:
545 cmd.append(a)
545 cmd.append(a)
546
546
547 ui.status((bytes(cmd)), b"\n")
547 ui.status((bytes(cmd)), b"\n")
548
548
549
549
550 def difftool(ui, repo, *args, **kwargs):
550 def difftool(ui, repo, *args, **kwargs):
551 ui.status(
551 ui.status(
552 _(
552 _(
553 b'Mercurial does not enable external difftool by default. You '
553 b'Mercurial does not enable external difftool by default. You '
554 b'need to enable the extdiff extension in your .hgrc file by adding\n'
554 b'need to enable the extdiff extension in your .hgrc file by adding\n'
555 b'extdiff =\n'
555 b'extdiff =\n'
556 b'to the [extensions] section and then running\n\n'
556 b'to the [extensions] section and then running\n\n'
557 b'hg extdiff -p <program>\n\n'
557 b'hg extdiff -p <program>\n\n'
558 b'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
558 b'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
559 b'information.\n'
559 b'information.\n'
560 )
560 )
561 )
561 )
562
562
563
563
564 def fetch(ui, repo, *args, **kwargs):
564 def fetch(ui, repo, *args, **kwargs):
565 cmdoptions = [
565 cmdoptions = [
566 (b'', b'all', None, b''),
566 (b'', b'all', None, b''),
567 (b'f', b'force', None, b''),
567 (b'f', b'force', None, b''),
568 ]
568 ]
569 args, opts = parseoptions(ui, cmdoptions, args)
569 args, opts = parseoptions(ui, cmdoptions, args)
570
570
571 cmd = Command(b'pull')
571 cmd = Command(b'pull')
572
572
573 if len(args) > 0:
573 if len(args) > 0:
574 cmd.append(args[0])
574 cmd.append(args[0])
575 if len(args) > 1:
575 if len(args) > 1:
576 ui.status(
576 ui.status(
577 _(
577 _(
578 b"note: Mercurial doesn't have refspecs. "
578 b"note: Mercurial doesn't have refspecs. "
579 b"-r can be used to specify which commits you want to "
579 b"-r can be used to specify which commits you want to "
580 b"pull. -B can be used to specify which bookmark you "
580 b"pull. -B can be used to specify which bookmark you "
581 b"want to pull.\n\n"
581 b"want to pull.\n\n"
582 )
582 )
583 )
583 )
584 for v in args[1:]:
584 for v in args[1:]:
585 if v in repo._bookmarks:
585 if v in repo._bookmarks:
586 cmd[b'-B'] = v
586 cmd[b'-B'] = v
587 else:
587 else:
588 cmd[b'-r'] = v
588 cmd[b'-r'] = v
589
589
590 ui.status((bytes(cmd)), b"\n")
590 ui.status((bytes(cmd)), b"\n")
591
591
592
592
593 def grep(ui, repo, *args, **kwargs):
593 def grep(ui, repo, *args, **kwargs):
594 cmdoptions = []
594 cmdoptions = []
595 args, opts = parseoptions(ui, cmdoptions, args)
595 args, opts = parseoptions(ui, cmdoptions, args)
596
596
597 cmd = Command(b'grep')
597 cmd = Command(b'grep')
598
598
599 # For basic usage, git grep and hg grep are the same. They both have the
599 # For basic usage, git grep and hg grep are the same. They both have the
600 # pattern first, followed by paths.
600 # pattern first, followed by paths.
601 cmd.extend(args)
601 cmd.extend(args)
602
602
603 ui.status((bytes(cmd)), b"\n")
603 ui.status((bytes(cmd)), b"\n")
604
604
605
605
606 def init(ui, repo, *args, **kwargs):
606 def init(ui, repo, *args, **kwargs):
607 cmdoptions = []
607 cmdoptions = []
608 args, opts = parseoptions(ui, cmdoptions, args)
608 args, opts = parseoptions(ui, cmdoptions, args)
609
609
610 cmd = Command(b'init')
610 cmd = Command(b'init')
611
611
612 if len(args) > 0:
612 if len(args) > 0:
613 cmd.append(args[0])
613 cmd.append(args[0])
614
614
615 ui.status((bytes(cmd)), b"\n")
615 ui.status((bytes(cmd)), b"\n")
616
616
617
617
618 def log(ui, repo, *args, **kwargs):
618 def log(ui, repo, *args, **kwargs):
619 cmdoptions = [
619 cmdoptions = [
620 (b'', b'follow', None, b''),
620 (b'', b'follow', None, b''),
621 (b'', b'decorate', None, b''),
621 (b'', b'decorate', None, b''),
622 (b'n', b'number', b'', b''),
622 (b'n', b'number', b'', b''),
623 (b'1', b'1', None, b''),
623 (b'1', b'1', None, b''),
624 (b'', b'pretty', b'', b''),
624 (b'', b'pretty', b'', b''),
625 (b'', b'format', b'', b''),
625 (b'', b'format', b'', b''),
626 (b'', b'oneline', None, b''),
626 (b'', b'oneline', None, b''),
627 (b'', b'stat', None, b''),
627 (b'', b'stat', None, b''),
628 (b'', b'graph', None, b''),
628 (b'', b'graph', None, b''),
629 (b'p', b'patch', None, b''),
629 (b'p', b'patch', None, b''),
630 (b'G', b'grep-diff', b'', b''),
630 (b'G', b'grep-diff', b'', b''),
631 (b'S', b'pickaxe-regex', b'', b''),
631 (b'S', b'pickaxe-regex', b'', b''),
632 ]
632 ]
633 args, opts = parseoptions(ui, cmdoptions, args)
633 args, opts = parseoptions(ui, cmdoptions, args)
634 grep_pat = opts.get(b'grep_diff') or opts.get(b'pickaxe_regex')
634 grep_pat = opts.get(b'grep_diff') or opts.get(b'pickaxe_regex')
635 if grep_pat:
635 if grep_pat:
636 cmd = Command(b'grep')
636 cmd = Command(b'grep')
637 cmd[b'--diff'] = grep_pat
637 cmd[b'--diff'] = grep_pat
638 ui.status(b'%s\n' % bytes(cmd))
638 ui.status(b'%s\n' % bytes(cmd))
639 return
639 return
640
640
641 ui.status(
641 ui.status(
642 _(
642 _(
643 b'note: -v prints the entire commit message like Git does. To '
643 b'note: -v prints the entire commit message like Git does. To '
644 b'print just the first line, drop the -v.\n\n'
644 b'print just the first line, drop the -v.\n\n'
645 )
645 )
646 )
646 )
647 ui.status(
647 ui.status(
648 _(
648 _(
649 b"note: see hg help revset for information on how to filter "
649 b"note: see hg help revset for information on how to filter "
650 b"log output\n\n"
650 b"log output\n\n"
651 )
651 )
652 )
652 )
653
653
654 cmd = Command(b'log')
654 cmd = Command(b'log')
655 cmd[b'-v'] = None
655 cmd[b'-v'] = None
656
656
657 if opts.get(b'number'):
657 if opts.get(b'number'):
658 cmd[b'-l'] = opts.get(b'number')
658 cmd[b'-l'] = opts.get(b'number')
659 if opts.get(b'1'):
659 if opts.get(b'1'):
660 cmd[b'-l'] = b'1'
660 cmd[b'-l'] = b'1'
661 if opts.get(b'stat'):
661 if opts.get(b'stat'):
662 cmd[b'--stat'] = None
662 cmd[b'--stat'] = None
663 if opts.get(b'graph'):
663 if opts.get(b'graph'):
664 cmd[b'-G'] = None
664 cmd[b'-G'] = None
665 if opts.get(b'patch'):
665 if opts.get(b'patch'):
666 cmd[b'-p'] = None
666 cmd[b'-p'] = None
667
667
668 if opts.get(b'pretty') or opts.get(b'format') or opts.get(b'oneline'):
668 if opts.get(b'pretty') or opts.get(b'format') or opts.get(b'oneline'):
669 format = opts.get(b'format', b'')
669 format = opts.get(b'format', b'')
670 if b'format:' in format:
670 if b'format:' in format:
671 ui.status(
671 ui.status(
672 _(
672 _(
673 b"note: --format format:??? equates to Mercurial's "
673 b"note: --format format:??? equates to Mercurial's "
674 b"--template. See hg help templates for more info.\n\n"
674 b"--template. See hg help templates for more info.\n\n"
675 )
675 )
676 )
676 )
677 cmd[b'--template'] = b'???'
677 cmd[b'--template'] = b'???'
678 else:
678 else:
679 ui.status(
679 ui.status(
680 _(
680 _(
681 b"note: --pretty/format/oneline equate to Mercurial's "
681 b"note: --pretty/format/oneline equate to Mercurial's "
682 b"--style or --template. See hg help templates for "
682 b"--style or --template. See hg help templates for "
683 b"more info.\n\n"
683 b"more info.\n\n"
684 )
684 )
685 )
685 )
686 cmd[b'--style'] = b'???'
686 cmd[b'--style'] = b'???'
687
687
688 if len(args) > 0:
688 if len(args) > 0:
689 if b'..' in args[0]:
689 if b'..' in args[0]:
690 since, until = args[0].split(b'..')
690 since, until = args[0].split(b'..')
691 cmd[b'-r'] = b"'%s::%s'" % (since, until)
691 cmd[b'-r'] = b"'%s::%s'" % (since, until)
692 del args[0]
692 del args[0]
693 cmd.extend(args)
693 cmd.extend(args)
694
694
695 ui.status((bytes(cmd)), b"\n")
695 ui.status((bytes(cmd)), b"\n")
696
696
697
697
698 def lsfiles(ui, repo, *args, **kwargs):
698 def lsfiles(ui, repo, *args, **kwargs):
699 cmdoptions = [
699 cmdoptions = [
700 (b'c', b'cached', None, b''),
700 (b'c', b'cached', None, b''),
701 (b'd', b'deleted', None, b''),
701 (b'd', b'deleted', None, b''),
702 (b'm', b'modified', None, b''),
702 (b'm', b'modified', None, b''),
703 (b'o', b'others', None, b''),
703 (b'o', b'others', None, b''),
704 (b'i', b'ignored', None, b''),
704 (b'i', b'ignored', None, b''),
705 (b's', b'stage', None, b''),
705 (b's', b'stage', None, b''),
706 (b'z', b'_zero', None, b''),
706 (b'z', b'_zero', None, b''),
707 ]
707 ]
708 args, opts = parseoptions(ui, cmdoptions, args)
708 args, opts = parseoptions(ui, cmdoptions, args)
709
709
710 if (
710 if (
711 opts.get(b'modified')
711 opts.get(b'modified')
712 or opts.get(b'deleted')
712 or opts.get(b'deleted')
713 or opts.get(b'others')
713 or opts.get(b'others')
714 or opts.get(b'ignored')
714 or opts.get(b'ignored')
715 ):
715 ):
716 cmd = Command(b'status')
716 cmd = Command(b'status')
717 if opts.get(b'deleted'):
717 if opts.get(b'deleted'):
718 cmd[b'-d'] = None
718 cmd[b'-d'] = None
719 if opts.get(b'modified'):
719 if opts.get(b'modified'):
720 cmd[b'-m'] = None
720 cmd[b'-m'] = None
721 if opts.get(b'others'):
721 if opts.get(b'others'):
722 cmd[b'-o'] = None
722 cmd[b'-o'] = None
723 if opts.get(b'ignored'):
723 if opts.get(b'ignored'):
724 cmd[b'-i'] = None
724 cmd[b'-i'] = None
725 else:
725 else:
726 cmd = Command(b'files')
726 cmd = Command(b'files')
727 if opts.get(b'stage'):
727 if opts.get(b'stage'):
728 ui.status(
728 ui.status(
729 _(
729 _(
730 b"note: Mercurial doesn't have a staging area, ignoring "
730 b"note: Mercurial doesn't have a staging area, ignoring "
731 b"--stage\n"
731 b"--stage\n"
732 )
732 )
733 )
733 )
734 if opts.get(b'_zero'):
734 if opts.get(b'_zero'):
735 cmd[b'-0'] = None
735 cmd[b'-0'] = None
736 cmd.append(b'.')
736 cmd.append(b'.')
737 for include in args:
737 for include in args:
738 cmd[b'-I'] = procutil.shellquote(include)
738 cmd[b'-I'] = procutil.shellquote(include)
739
739
740 ui.status((bytes(cmd)), b"\n")
740 ui.status((bytes(cmd)), b"\n")
741
741
742
742
743 def merge(ui, repo, *args, **kwargs):
743 def merge(ui, repo, *args, **kwargs):
744 cmdoptions = []
744 cmdoptions = []
745 args, opts = parseoptions(ui, cmdoptions, args)
745 args, opts = parseoptions(ui, cmdoptions, args)
746
746
747 cmd = Command(b'merge')
747 cmd = Command(b'merge')
748
748
749 if len(args) > 0:
749 if len(args) > 0:
750 cmd.append(args[len(args) - 1])
750 cmd.append(args[len(args) - 1])
751
751
752 ui.status((bytes(cmd)), b"\n")
752 ui.status((bytes(cmd)), b"\n")
753
753
754
754
755 def mergebase(ui, repo, *args, **kwargs):
755 def mergebase(ui, repo, *args, **kwargs):
756 cmdoptions = []
756 cmdoptions = []
757 args, opts = parseoptions(ui, cmdoptions, args)
757 args, opts = parseoptions(ui, cmdoptions, args)
758
758
759 if len(args) != 2:
759 if len(args) != 2:
760 args = [b'A', b'B']
760 args = [b'A', b'B']
761
761
762 cmd = Command(
762 cmd = Command(
763 b"log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
763 b"log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
764 )
764 )
765
765
766 ui.status(
766 ui.status(
767 _(b'note: ancestors() is part of the revset language\n'),
767 _(b'note: ancestors() is part of the revset language\n'),
768 _(b"(learn more about revsets with 'hg help revsets')\n\n"),
768 _(b"(learn more about revsets with 'hg help revsets')\n\n"),
769 )
769 )
770 ui.status((bytes(cmd)), b"\n")
770 ui.status((bytes(cmd)), b"\n")
771
771
772
772
773 def mergetool(ui, repo, *args, **kwargs):
773 def mergetool(ui, repo, *args, **kwargs):
774 cmdoptions = []
774 cmdoptions = []
775 args, opts = parseoptions(ui, cmdoptions, args)
775 args, opts = parseoptions(ui, cmdoptions, args)
776
776
777 cmd = Command(b"resolve")
777 cmd = Command(b"resolve")
778
778
779 if len(args) == 0:
779 if len(args) == 0:
780 cmd[b'--all'] = None
780 cmd[b'--all'] = None
781 cmd.extend(args)
781 cmd.extend(args)
782 ui.status((bytes(cmd)), b"\n")
782 ui.status((bytes(cmd)), b"\n")
783
783
784
784
785 def mv(ui, repo, *args, **kwargs):
785 def mv(ui, repo, *args, **kwargs):
786 cmdoptions = [
786 cmdoptions = [
787 (b'f', b'force', None, b''),
787 (b'f', b'force', None, b''),
788 (b'n', b'dry-run', None, b''),
788 (b'n', b'dry-run', None, b''),
789 ]
789 ]
790 args, opts = parseoptions(ui, cmdoptions, args)
790 args, opts = parseoptions(ui, cmdoptions, args)
791
791
792 cmd = Command(b'mv')
792 cmd = Command(b'mv')
793 cmd.extend(args)
793 cmd.extend(args)
794
794
795 if opts.get(b'force'):
795 if opts.get(b'force'):
796 cmd[b'-f'] = None
796 cmd[b'-f'] = None
797 if opts.get(b'dry_run'):
797 if opts.get(b'dry_run'):
798 cmd[b'-n'] = None
798 cmd[b'-n'] = None
799
799
800 ui.status((bytes(cmd)), b"\n")
800 ui.status((bytes(cmd)), b"\n")
801
801
802
802
803 def pull(ui, repo, *args, **kwargs):
803 def pull(ui, repo, *args, **kwargs):
804 cmdoptions = [
804 cmdoptions = [
805 (b'', b'all', None, b''),
805 (b'', b'all', None, b''),
806 (b'f', b'force', None, b''),
806 (b'f', b'force', None, b''),
807 (b'r', b'rebase', None, b''),
807 (b'r', b'rebase', None, b''),
808 ]
808 ]
809 args, opts = parseoptions(ui, cmdoptions, args)
809 args, opts = parseoptions(ui, cmdoptions, args)
810
810
811 cmd = Command(b'pull')
811 cmd = Command(b'pull')
812 cmd[b'--rebase'] = None
812 cmd[b'--rebase'] = None
813
813
814 if len(args) > 0:
814 if len(args) > 0:
815 cmd.append(args[0])
815 cmd.append(args[0])
816 if len(args) > 1:
816 if len(args) > 1:
817 ui.status(
817 ui.status(
818 _(
818 _(
819 b"note: Mercurial doesn't have refspecs. "
819 b"note: Mercurial doesn't have refspecs. "
820 b"-r can be used to specify which commits you want to "
820 b"-r can be used to specify which commits you want to "
821 b"pull. -B can be used to specify which bookmark you "
821 b"pull. -B can be used to specify which bookmark you "
822 b"want to pull.\n\n"
822 b"want to pull.\n\n"
823 )
823 )
824 )
824 )
825 for v in args[1:]:
825 for v in args[1:]:
826 if v in repo._bookmarks:
826 if v in repo._bookmarks:
827 cmd[b'-B'] = v
827 cmd[b'-B'] = v
828 else:
828 else:
829 cmd[b'-r'] = v
829 cmd[b'-r'] = v
830
830
831 ui.status((bytes(cmd)), b"\n")
831 ui.status((bytes(cmd)), b"\n")
832
832
833
833
834 def push(ui, repo, *args, **kwargs):
834 def push(ui, repo, *args, **kwargs):
835 cmdoptions = [
835 cmdoptions = [
836 (b'', b'all', None, b''),
836 (b'', b'all', None, b''),
837 (b'f', b'force', None, b''),
837 (b'f', b'force', None, b''),
838 ]
838 ]
839 args, opts = parseoptions(ui, cmdoptions, args)
839 args, opts = parseoptions(ui, cmdoptions, args)
840
840
841 cmd = Command(b'push')
841 cmd = Command(b'push')
842
842
843 if len(args) > 0:
843 if len(args) > 0:
844 cmd.append(args[0])
844 cmd.append(args[0])
845 if len(args) > 1:
845 if len(args) > 1:
846 ui.status(
846 ui.status(
847 _(
847 _(
848 b"note: Mercurial doesn't have refspecs. "
848 b"note: Mercurial doesn't have refspecs. "
849 b"-r can be used to specify which commits you want "
849 b"-r can be used to specify which commits you want "
850 b"to push. -B can be used to specify which bookmark "
850 b"to push. -B can be used to specify which bookmark "
851 b"you want to push.\n\n"
851 b"you want to push.\n\n"
852 )
852 )
853 )
853 )
854 for v in args[1:]:
854 for v in args[1:]:
855 if v in repo._bookmarks:
855 if v in repo._bookmarks:
856 cmd[b'-B'] = v
856 cmd[b'-B'] = v
857 else:
857 else:
858 cmd[b'-r'] = v
858 cmd[b'-r'] = v
859
859
860 if opts.get(b'force'):
860 if opts.get(b'force'):
861 cmd[b'-f'] = None
861 cmd[b'-f'] = None
862
862
863 ui.status((bytes(cmd)), b"\n")
863 ui.status((bytes(cmd)), b"\n")
864
864
865
865
866 def rebase(ui, repo, *args, **kwargs):
866 def rebase(ui, repo, *args, **kwargs):
867 cmdoptions = [
867 cmdoptions = [
868 (b'', b'all', None, b''),
868 (b'', b'all', None, b''),
869 (b'i', b'interactive', None, b''),
869 (b'i', b'interactive', None, b''),
870 (b'', b'onto', b'', b''),
870 (b'', b'onto', b'', b''),
871 (b'', b'abort', None, b''),
871 (b'', b'abort', None, b''),
872 (b'', b'continue', None, b''),
872 (b'', b'continue', None, b''),
873 (b'', b'skip', None, b''),
873 (b'', b'skip', None, b''),
874 ]
874 ]
875 args, opts = parseoptions(ui, cmdoptions, args)
875 args, opts = parseoptions(ui, cmdoptions, args)
876
876
877 if opts.get(b'interactive'):
877 if opts.get(b'interactive'):
878 ui.status(
878 ui.status(
879 _(
879 _(
880 b"note: hg histedit does not perform a rebase. "
880 b"note: hg histedit does not perform a rebase. "
881 b"It just edits history.\n\n"
881 b"It just edits history.\n\n"
882 )
882 )
883 )
883 )
884 cmd = Command(b'histedit')
884 cmd = Command(b'histedit')
885 if len(args) > 0:
885 if len(args) > 0:
886 ui.status(
886 ui.status(
887 _(
887 _(
888 b"also note: 'hg histedit' will automatically detect"
888 b"also note: 'hg histedit' will automatically detect"
889 b" your stack, so no second argument is necessary\n\n"
889 b" your stack, so no second argument is necessary\n\n"
890 )
890 )
891 )
891 )
892 ui.status((bytes(cmd)), b"\n")
892 ui.status((bytes(cmd)), b"\n")
893 return
893 return
894
894
895 if opts.get(b'skip'):
895 if opts.get(b'skip'):
896 cmd = Command(b'revert --all -r .')
896 cmd = Command(b'revert --all -r .')
897 ui.status((bytes(cmd)), b"\n")
897 ui.status((bytes(cmd)), b"\n")
898
898
899 cmd = Command(b'rebase')
899 cmd = Command(b'rebase')
900
900
901 if opts.get(b'continue') or opts.get(b'skip'):
901 if opts.get(b'continue') or opts.get(b'skip'):
902 cmd[b'--continue'] = None
902 cmd[b'--continue'] = None
903 if opts.get(b'abort'):
903 if opts.get(b'abort'):
904 cmd[b'--abort'] = None
904 cmd[b'--abort'] = None
905
905
906 if opts.get(b'onto'):
906 if opts.get(b'onto'):
907 ui.status(
907 ui.status(
908 _(
908 _(
909 b"note: if you're trying to lift a commit off one branch, "
909 b"note: if you're trying to lift a commit off one branch, "
910 b"try hg rebase -d <destination commit> -s <commit to be "
910 b"try hg rebase -d <destination commit> -s <commit to be "
911 b"lifted>\n\n"
911 b"lifted>\n\n"
912 )
912 )
913 )
913 )
914 cmd[b'-d'] = convert(opts.get(b'onto'))
914 cmd[b'-d'] = convert(opts.get(b'onto'))
915 if len(args) < 2:
915 if len(args) < 2:
916 raise error.Abort(_(b"expected format: git rebase --onto X Y Z"))
916 raise error.Abort(_(b"expected format: git rebase --onto X Y Z"))
917 cmd[b'-s'] = b"'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
917 cmd[b'-s'] = b"'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
918 else:
918 else:
919 if len(args) == 1:
919 if len(args) == 1:
920 cmd[b'-d'] = convert(args[0])
920 cmd[b'-d'] = convert(args[0])
921 elif len(args) == 2:
921 elif len(args) == 2:
922 cmd[b'-d'] = convert(args[0])
922 cmd[b'-d'] = convert(args[0])
923 cmd[b'-b'] = convert(args[1])
923 cmd[b'-b'] = convert(args[1])
924
924
925 ui.status((bytes(cmd)), b"\n")
925 ui.status((bytes(cmd)), b"\n")
926
926
927
927
928 def reflog(ui, repo, *args, **kwargs):
928 def reflog(ui, repo, *args, **kwargs):
929 cmdoptions = [
929 cmdoptions = [
930 (b'', b'all', None, b''),
930 (b'', b'all', None, b''),
931 ]
931 ]
932 args, opts = parseoptions(ui, cmdoptions, args)
932 args, opts = parseoptions(ui, cmdoptions, args)
933
933
934 cmd = Command(b'journal')
934 cmd = Command(b'journal')
935 if opts.get(b'all'):
935 if opts.get(b'all'):
936 cmd[b'--all'] = None
936 cmd[b'--all'] = None
937 if len(args) > 0:
937 if len(args) > 0:
938 cmd.append(args[0])
938 cmd.append(args[0])
939
939
940 ui.status(bytes(cmd), b"\n\n")
940 ui.status(bytes(cmd), b"\n\n")
941 ui.status(
941 ui.status(
942 _(
942 _(
943 b"note: in hg commits can be deleted from repo but we always"
943 b"note: in hg commits can be deleted from repo but we always"
944 b" have backups\n"
944 b" have backups\n"
945 )
945 )
946 )
946 )
947
947
948
948
949 def reset(ui, repo, *args, **kwargs):
949 def reset(ui, repo, *args, **kwargs):
950 cmdoptions = [
950 cmdoptions = [
951 (b'', b'soft', None, b''),
951 (b'', b'soft', None, b''),
952 (b'', b'hard', None, b''),
952 (b'', b'hard', None, b''),
953 (b'', b'mixed', None, b''),
953 (b'', b'mixed', None, b''),
954 ]
954 ]
955 args, opts = parseoptions(ui, cmdoptions, args)
955 args, opts = parseoptions(ui, cmdoptions, args)
956
956
957 commit = convert(args[0] if len(args) > 0 else b'.')
957 commit = convert(args[0] if len(args) > 0 else b'.')
958 hard = opts.get(b'hard')
958 hard = opts.get(b'hard')
959
959
960 if opts.get(b'mixed'):
960 if opts.get(b'mixed'):
961 ui.status(
961 ui.status(
962 _(
962 _(
963 b'note: --mixed has no meaning since Mercurial has no '
963 b'note: --mixed has no meaning since Mercurial has no '
964 b'staging area\n\n'
964 b'staging area\n\n'
965 )
965 )
966 )
966 )
967 if opts.get(b'soft'):
967 if opts.get(b'soft'):
968 ui.status(
968 ui.status(
969 _(
969 _(
970 b'note: --soft has no meaning since Mercurial has no '
970 b'note: --soft has no meaning since Mercurial has no '
971 b'staging area\n\n'
971 b'staging area\n\n'
972 )
972 )
973 )
973 )
974
974
975 cmd = Command(b'update')
975 cmd = Command(b'update')
976 if hard:
976 if hard:
977 cmd.append(b'--clean')
977 cmd.append(b'--clean')
978
978
979 cmd.append(commit)
979 cmd.append(commit)
980
980
981 ui.status((bytes(cmd)), b"\n")
981 ui.status((bytes(cmd)), b"\n")
982
982
983
983
984 def revert(ui, repo, *args, **kwargs):
984 def revert(ui, repo, *args, **kwargs):
985 cmdoptions = []
985 cmdoptions = []
986 args, opts = parseoptions(ui, cmdoptions, args)
986 args, opts = parseoptions(ui, cmdoptions, args)
987
987
988 if len(args) > 1:
988 if len(args) > 1:
989 ui.status(
989 ui.status(
990 _(
990 _(
991 b"note: hg backout doesn't support multiple commits at "
991 b"note: hg backout doesn't support multiple commits at "
992 b"once\n\n"
992 b"once\n\n"
993 )
993 )
994 )
994 )
995
995
996 cmd = Command(b'backout')
996 cmd = Command(b'backout')
997 if args:
997 if args:
998 cmd.append(args[0])
998 cmd.append(args[0])
999
999
1000 ui.status((bytes(cmd)), b"\n")
1000 ui.status((bytes(cmd)), b"\n")
1001
1001
1002
1002
1003 def revparse(ui, repo, *args, **kwargs):
1003 def revparse(ui, repo, *args, **kwargs):
1004 cmdoptions = [
1004 cmdoptions = [
1005 (b'', b'show-cdup', None, b''),
1005 (b'', b'show-cdup', None, b''),
1006 (b'', b'show-toplevel', None, b''),
1006 (b'', b'show-toplevel', None, b''),
1007 ]
1007 ]
1008 args, opts = parseoptions(ui, cmdoptions, args)
1008 args, opts = parseoptions(ui, cmdoptions, args)
1009
1009
1010 if opts.get(b'show_cdup') or opts.get(b'show_toplevel'):
1010 if opts.get(b'show_cdup') or opts.get(b'show_toplevel'):
1011 cmd = Command(b'root')
1011 cmd = Command(b'root')
1012 if opts.get(b'show_cdup'):
1012 if opts.get(b'show_cdup'):
1013 ui.status(_(b"note: hg root prints the root of the repository\n\n"))
1013 ui.status(_(b"note: hg root prints the root of the repository\n\n"))
1014 ui.status((bytes(cmd)), b"\n")
1014 ui.status((bytes(cmd)), b"\n")
1015 else:
1015 else:
1016 ui.status(_(b"note: see hg help revset for how to refer to commits\n"))
1016 ui.status(_(b"note: see hg help revset for how to refer to commits\n"))
1017
1017
1018
1018
1019 def rm(ui, repo, *args, **kwargs):
1019 def rm(ui, repo, *args, **kwargs):
1020 cmdoptions = [
1020 cmdoptions = [
1021 (b'f', b'force', None, b''),
1021 (b'f', b'force', None, b''),
1022 (b'n', b'dry-run', None, b''),
1022 (b'n', b'dry-run', None, b''),
1023 ]
1023 ]
1024 args, opts = parseoptions(ui, cmdoptions, args)
1024 args, opts = parseoptions(ui, cmdoptions, args)
1025
1025
1026 cmd = Command(b'rm')
1026 cmd = Command(b'rm')
1027 cmd.extend(args)
1027 cmd.extend(args)
1028
1028
1029 if opts.get(b'force'):
1029 if opts.get(b'force'):
1030 cmd[b'-f'] = None
1030 cmd[b'-f'] = None
1031 if opts.get(b'dry_run'):
1031 if opts.get(b'dry_run'):
1032 cmd[b'-n'] = None
1032 cmd[b'-n'] = None
1033
1033
1034 ui.status((bytes(cmd)), b"\n")
1034 ui.status((bytes(cmd)), b"\n")
1035
1035
1036
1036
1037 def show(ui, repo, *args, **kwargs):
1037 def show(ui, repo, *args, **kwargs):
1038 cmdoptions = [
1038 cmdoptions = [
1039 (b'', b'name-status', None, b''),
1039 (b'', b'name-status', None, b''),
1040 (b'', b'pretty', b'', b''),
1040 (b'', b'pretty', b'', b''),
1041 (b'U', b'unified', int, b''),
1041 (b'U', b'unified', int, b''),
1042 ]
1042 ]
1043 args, opts = parseoptions(ui, cmdoptions, args)
1043 args, opts = parseoptions(ui, cmdoptions, args)
1044
1044
1045 if opts.get(b'name_status'):
1045 if opts.get(b'name_status'):
1046 if opts.get(b'pretty') == b'format:':
1046 if opts.get(b'pretty') == b'format:':
1047 cmd = Command(b'status')
1047 cmd = Command(b'status')
1048 cmd[b'--change'] = b'.'
1048 cmd[b'--change'] = b'.'
1049 else:
1049 else:
1050 cmd = Command(b'log')
1050 cmd = Command(b'log')
1051 cmd.append(b'--style status')
1051 cmd.append(b'--style status')
1052 cmd.append(b'-r .')
1052 cmd.append(b'-r .')
1053 elif len(args) > 0:
1053 elif len(args) > 0:
1054 if ispath(repo, args[0]):
1054 if ispath(repo, args[0]):
1055 cmd = Command(b'cat')
1055 cmd = Command(b'cat')
1056 else:
1056 else:
1057 cmd = Command(b'export')
1057 cmd = Command(b'export')
1058 cmd.extend(args)
1058 cmd.extend(args)
1059 if opts.get(b'unified'):
1059 if opts.get(b'unified'):
1060 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1060 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1061 elif opts.get(b'unified'):
1061 elif opts.get(b'unified'):
1062 cmd = Command(b'export')
1062 cmd = Command(b'export')
1063 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1063 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1064 else:
1064 else:
1065 cmd = Command(b'export')
1065 cmd = Command(b'export')
1066
1066
1067 ui.status((bytes(cmd)), b"\n")
1067 ui.status((bytes(cmd)), b"\n")
1068
1068
1069
1069
1070 def stash(ui, repo, *args, **kwargs):
1070 def stash(ui, repo, *args, **kwargs):
1071 cmdoptions = [
1071 cmdoptions = [
1072 (b'p', b'patch', None, b''),
1072 (b'p', b'patch', None, b''),
1073 ]
1073 ]
1074 args, opts = parseoptions(ui, cmdoptions, args)
1074 args, opts = parseoptions(ui, cmdoptions, args)
1075
1075
1076 cmd = Command(b'shelve')
1076 cmd = Command(b'shelve')
1077 action = args[0] if len(args) > 0 else None
1077 action = args[0] if len(args) > 0 else None
1078
1078
1079 if action == b'list':
1079 if action == b'list':
1080 cmd[b'-l'] = None
1080 cmd[b'-l'] = None
1081 if opts.get(b'patch'):
1081 if opts.get(b'patch'):
1082 cmd[b'-p'] = None
1082 cmd[b'-p'] = None
1083 elif action == b'show':
1083 elif action == b'show':
1084 if opts.get(b'patch'):
1084 if opts.get(b'patch'):
1085 cmd[b'-p'] = None
1085 cmd[b'-p'] = None
1086 else:
1086 else:
1087 cmd[b'--stat'] = None
1087 cmd[b'--stat'] = None
1088 if len(args) > 1:
1088 if len(args) > 1:
1089 cmd.append(args[1])
1089 cmd.append(args[1])
1090 elif action == b'clear':
1090 elif action == b'clear':
1091 cmd[b'--cleanup'] = None
1091 cmd[b'--cleanup'] = None
1092 elif action == b'drop':
1092 elif action == b'drop':
1093 cmd[b'-d'] = None
1093 cmd[b'-d'] = None
1094 if len(args) > 1:
1094 if len(args) > 1:
1095 cmd.append(args[1])
1095 cmd.append(args[1])
1096 else:
1096 else:
1097 cmd.append(b'<shelve name>')
1097 cmd.append(b'<shelve name>')
1098 elif action == b'pop' or action == b'apply':
1098 elif action == b'pop' or action == b'apply':
1099 cmd = Command(b'unshelve')
1099 cmd = Command(b'unshelve')
1100 if len(args) > 1:
1100 if len(args) > 1:
1101 cmd.append(args[1])
1101 cmd.append(args[1])
1102 if action == b'apply':
1102 if action == b'apply':
1103 cmd[b'--keep'] = None
1103 cmd[b'--keep'] = None
1104 elif action == b'branch' or action == b'create':
1104 elif action == b'branch' or action == b'create':
1105 ui.status(
1105 ui.status(
1106 _(
1106 _(
1107 b"note: Mercurial doesn't have equivalents to the "
1107 b"note: Mercurial doesn't have equivalents to the "
1108 b"git stash branch or create actions\n\n"
1108 b"git stash branch or create actions\n\n"
1109 )
1109 )
1110 )
1110 )
1111 return
1111 return
1112 else:
1112 else:
1113 if len(args) > 0:
1113 if len(args) > 0:
1114 if args[0] != b'save':
1114 if args[0] != b'save':
1115 cmd[b'--name'] = args[0]
1115 cmd[b'--name'] = args[0]
1116 elif len(args) > 1:
1116 elif len(args) > 1:
1117 cmd[b'--name'] = args[1]
1117 cmd[b'--name'] = args[1]
1118
1118
1119 ui.status((bytes(cmd)), b"\n")
1119 ui.status((bytes(cmd)), b"\n")
1120
1120
1121
1121
1122 def status(ui, repo, *args, **kwargs):
1122 def status(ui, repo, *args, **kwargs):
1123 cmdoptions = [
1123 cmdoptions = [
1124 (b'', b'ignored', None, b''),
1124 (b'', b'ignored', None, b''),
1125 ]
1125 ]
1126 args, opts = parseoptions(ui, cmdoptions, args)
1126 args, opts = parseoptions(ui, cmdoptions, args)
1127
1127
1128 cmd = Command(b'status')
1128 cmd = Command(b'status')
1129 cmd.extend(args)
1129 cmd.extend(args)
1130
1130
1131 if opts.get(b'ignored'):
1131 if opts.get(b'ignored'):
1132 cmd[b'-i'] = None
1132 cmd[b'-i'] = None
1133
1133
1134 ui.status((bytes(cmd)), b"\n")
1134 ui.status((bytes(cmd)), b"\n")
1135
1135
1136
1136
1137 def svn(ui, repo, *args, **kwargs):
1137 def svn(ui, repo, *args, **kwargs):
1138 if not args:
1138 if not args:
1139 raise error.Abort(_(b'missing svn command'))
1139 raise error.Abort(_(b'missing svn command'))
1140 svncmd = args[0]
1140 svncmd = args[0]
1141 if svncmd not in gitsvncommands:
1141 if svncmd not in gitsvncommands:
1142 raise error.Abort(_(b'unknown git svn command "%s"') % svncmd)
1142 raise error.Abort(_(b'unknown git svn command "%s"') % svncmd)
1143
1143
1144 args = args[1:]
1144 args = args[1:]
1145 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
1145 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
1146
1146
1147
1147
1148 def svndcommit(ui, repo, *args, **kwargs):
1148 def svndcommit(ui, repo, *args, **kwargs):
1149 cmdoptions = []
1149 cmdoptions = []
1150 parseoptions(ui, cmdoptions, args)
1150 parseoptions(ui, cmdoptions, args)
1151
1151
1152 cmd = Command(b'push')
1152 cmd = Command(b'push')
1153
1153
1154 ui.status((bytes(cmd)), b"\n")
1154 ui.status((bytes(cmd)), b"\n")
1155
1155
1156
1156
1157 def svnfetch(ui, repo, *args, **kwargs):
1157 def svnfetch(ui, repo, *args, **kwargs):
1158 cmdoptions = []
1158 cmdoptions = []
1159 parseoptions(ui, cmdoptions, args)
1159 parseoptions(ui, cmdoptions, args)
1160
1160
1161 cmd = Command(b'pull')
1161 cmd = Command(b'pull')
1162 cmd.append(b'default-push')
1162 cmd.append(b'default-push')
1163
1163
1164 ui.status((bytes(cmd)), b"\n")
1164 ui.status((bytes(cmd)), b"\n")
1165
1165
1166
1166
1167 def svnfindrev(ui, repo, *args, **kwargs):
1167 def svnfindrev(ui, repo, *args, **kwargs):
1168 cmdoptions = []
1168 cmdoptions = []
1169 args, opts = parseoptions(ui, cmdoptions, args)
1169 args, opts = parseoptions(ui, cmdoptions, args)
1170
1170
1171 if not args:
1171 if not args:
1172 raise error.Abort(_(b'missing find-rev argument'))
1172 raise error.Abort(_(b'missing find-rev argument'))
1173
1173
1174 cmd = Command(b'log')
1174 cmd = Command(b'log')
1175 cmd[b'-r'] = args[0]
1175 cmd[b'-r'] = args[0]
1176
1176
1177 ui.status((bytes(cmd)), b"\n")
1177 ui.status((bytes(cmd)), b"\n")
1178
1178
1179
1179
1180 def svnrebase(ui, repo, *args, **kwargs):
1180 def svnrebase(ui, repo, *args, **kwargs):
1181 cmdoptions = [
1181 cmdoptions = [
1182 (b'l', b'local', None, b''),
1182 (b'l', b'local', None, b''),
1183 ]
1183 ]
1184 parseoptions(ui, cmdoptions, args)
1184 parseoptions(ui, cmdoptions, args)
1185
1185
1186 pullcmd = Command(b'pull')
1186 pullcmd = Command(b'pull')
1187 pullcmd.append(b'default-push')
1187 pullcmd.append(b'default-push')
1188 rebasecmd = Command(b'rebase')
1188 rebasecmd = Command(b'rebase')
1189 rebasecmd.append(b'tip')
1189 rebasecmd.append(b'tip')
1190
1190
1191 cmd = pullcmd & rebasecmd
1191 cmd = pullcmd & rebasecmd
1192
1192
1193 ui.status((bytes(cmd)), b"\n")
1193 ui.status((bytes(cmd)), b"\n")
1194
1194
1195
1195
1196 def tag(ui, repo, *args, **kwargs):
1196 def tag(ui, repo, *args, **kwargs):
1197 cmdoptions = [
1197 cmdoptions = [
1198 (b'f', b'force', None, b''),
1198 (b'f', b'force', None, b''),
1199 (b'l', b'list', None, b''),
1199 (b'l', b'list', None, b''),
1200 (b'd', b'delete', None, b''),
1200 (b'd', b'delete', None, b''),
1201 ]
1201 ]
1202 args, opts = parseoptions(ui, cmdoptions, args)
1202 args, opts = parseoptions(ui, cmdoptions, args)
1203
1203
1204 if opts.get(b'list'):
1204 if opts.get(b'list'):
1205 cmd = Command(b'tags')
1205 cmd = Command(b'tags')
1206 else:
1206 else:
1207 cmd = Command(b'tag')
1207 cmd = Command(b'tag')
1208
1208
1209 if not args:
1209 if not args:
1210 raise error.Abort(_(b'missing tag argument'))
1210 raise error.Abort(_(b'missing tag argument'))
1211
1211
1212 cmd.append(args[0])
1212 cmd.append(args[0])
1213 if len(args) > 1:
1213 if len(args) > 1:
1214 cmd[b'-r'] = args[1]
1214 cmd[b'-r'] = args[1]
1215
1215
1216 if opts.get(b'delete'):
1216 if opts.get(b'delete'):
1217 cmd[b'--remove'] = None
1217 cmd[b'--remove'] = None
1218
1218
1219 if opts.get(b'force'):
1219 if opts.get(b'force'):
1220 cmd[b'-f'] = None
1220 cmd[b'-f'] = None
1221
1221
1222 ui.status((bytes(cmd)), b"\n")
1222 ui.status((bytes(cmd)), b"\n")
1223
1223
1224
1224
1225 gitcommands = {
1225 gitcommands = {
1226 b'add': add,
1226 b'add': add,
1227 b'am': am,
1227 b'am': am,
1228 b'apply': apply,
1228 b'apply': apply,
1229 b'bisect': bisect,
1229 b'bisect': bisect,
1230 b'blame': blame,
1230 b'blame': blame,
1231 b'branch': branch,
1231 b'branch': branch,
1232 b'checkout': checkout,
1232 b'checkout': checkout,
1233 b'cherry-pick': cherrypick,
1233 b'cherry-pick': cherrypick,
1234 b'clean': clean,
1234 b'clean': clean,
1235 b'clone': clone,
1235 b'clone': clone,
1236 b'commit': commit,
1236 b'commit': commit,
1237 b'diff': diff,
1237 b'diff': diff,
1238 b'difftool': difftool,
1238 b'difftool': difftool,
1239 b'fetch': fetch,
1239 b'fetch': fetch,
1240 b'grep': grep,
1240 b'grep': grep,
1241 b'init': init,
1241 b'init': init,
1242 b'log': log,
1242 b'log': log,
1243 b'ls-files': lsfiles,
1243 b'ls-files': lsfiles,
1244 b'merge': merge,
1244 b'merge': merge,
1245 b'merge-base': mergebase,
1245 b'merge-base': mergebase,
1246 b'mergetool': mergetool,
1246 b'mergetool': mergetool,
1247 b'mv': mv,
1247 b'mv': mv,
1248 b'pull': pull,
1248 b'pull': pull,
1249 b'push': push,
1249 b'push': push,
1250 b'rebase': rebase,
1250 b'rebase': rebase,
1251 b'reflog': reflog,
1251 b'reflog': reflog,
1252 b'reset': reset,
1252 b'reset': reset,
1253 b'revert': revert,
1253 b'revert': revert,
1254 b'rev-parse': revparse,
1254 b'rev-parse': revparse,
1255 b'rm': rm,
1255 b'rm': rm,
1256 b'show': show,
1256 b'show': show,
1257 b'stash': stash,
1257 b'stash': stash,
1258 b'status': status,
1258 b'status': status,
1259 b'svn': svn,
1259 b'svn': svn,
1260 b'tag': tag,
1260 b'tag': tag,
1261 b'whatchanged': deprecated,
1261 b'whatchanged': deprecated,
1262 }
1262 }
1263
1263
1264 gitsvncommands = {
1264 gitsvncommands = {
1265 b'dcommit': svndcommit,
1265 b'dcommit': svndcommit,
1266 b'fetch': svnfetch,
1266 b'fetch': svnfetch,
1267 b'find-rev': svnfindrev,
1267 b'find-rev': svnfindrev,
1268 b'rebase': svnrebase,
1268 b'rebase': svnrebase,
1269 }
1269 }
@@ -1,387 +1,385 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''browse the repository in a graphical way
8 '''browse the repository in a graphical way
9
9
10 The hgk extension allows browsing the history of a repository in a
10 The hgk extension allows browsing the history of a repository in a
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
12 distributed with Mercurial.)
12 distributed with Mercurial.)
13
13
14 hgk consists of two parts: a Tcl script that does the displaying and
14 hgk consists of two parts: a Tcl script that does the displaying and
15 querying of information, and an extension to Mercurial named hgk.py,
15 querying of information, and an extension to Mercurial named hgk.py,
16 which provides hooks for hgk to get information. hgk can be found in
16 which provides hooks for hgk to get information. hgk can be found in
17 the contrib directory, and the extension is shipped in the hgext
17 the contrib directory, and the extension is shipped in the hgext
18 repository, and needs to be enabled.
18 repository, and needs to be enabled.
19
19
20 The :hg:`view` command will launch the hgk Tcl script. For this command
20 The :hg:`view` command will launch the hgk Tcl script. For this command
21 to work, hgk must be in your search path. Alternately, you can specify
21 to work, hgk must be in your search path. Alternately, you can specify
22 the path to hgk in your configuration file::
22 the path to hgk in your configuration file::
23
23
24 [hgk]
24 [hgk]
25 path = /location/of/hgk
25 path = /location/of/hgk
26
26
27 hgk can make use of the extdiff extension to visualize revisions.
27 hgk can make use of the extdiff extension to visualize revisions.
28 Assuming you had already configured extdiff vdiff command, just add::
28 Assuming you had already configured extdiff vdiff command, just add::
29
29
30 [hgk]
30 [hgk]
31 vdiff=vdiff
31 vdiff=vdiff
32
32
33 Revisions context menu will now display additional entries to fire
33 Revisions context menu will now display additional entries to fire
34 vdiff on hovered and selected revisions.
34 vdiff on hovered and selected revisions.
35 '''
35 '''
36
36
37
37
38 import os
38 import os
39
39
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41 from mercurial.node import (
41 from mercurial.node import (
42 nullrev,
42 nullrev,
43 short,
43 short,
44 )
44 )
45 from mercurial import (
45 from mercurial import (
46 commands,
46 commands,
47 obsolete,
47 obsolete,
48 patch,
48 patch,
49 pycompat,
49 pycompat,
50 registrar,
50 registrar,
51 scmutil,
51 scmutil,
52 )
52 )
53
53
54 cmdtable = {}
54 cmdtable = {}
55 command = registrar.command(cmdtable)
55 command = registrar.command(cmdtable)
56 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
56 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
57 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
57 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
58 # be specifying the version(s) of Mercurial they are tested with, or
58 # be specifying the version(s) of Mercurial they are tested with, or
59 # leave the attribute unspecified.
59 # leave the attribute unspecified.
60 testedwith = b'ships-with-hg-core'
60 testedwith = b'ships-with-hg-core'
61
61
62 configtable = {}
62 configtable = {}
63 configitem = registrar.configitem(configtable)
63 configitem = registrar.configitem(configtable)
64
64
65 configitem(
65 configitem(
66 b'hgk',
66 b'hgk',
67 b'path',
67 b'path',
68 default=b'hgk',
68 default=b'hgk',
69 )
69 )
70
70
71
71
72 @command(
72 @command(
73 b'debug-diff-tree',
73 b'debug-diff-tree',
74 [
74 [
75 (b'p', b'patch', None, _(b'generate patch')),
75 (b'p', b'patch', None, _(b'generate patch')),
76 (b'r', b'recursive', None, _(b'recursive')),
76 (b'r', b'recursive', None, _(b'recursive')),
77 (b'P', b'pretty', None, _(b'pretty')),
77 (b'P', b'pretty', None, _(b'pretty')),
78 (b's', b'stdin', None, _(b'stdin')),
78 (b's', b'stdin', None, _(b'stdin')),
79 (b'C', b'copy', None, _(b'detect copies')),
79 (b'C', b'copy', None, _(b'detect copies')),
80 (b'S', b'search', b"", _(b'search')),
80 (b'S', b'search', b"", _(b'search')),
81 ],
81 ],
82 b'[OPTION]... NODE1 NODE2 [FILE]...',
82 b'[OPTION]... NODE1 NODE2 [FILE]...',
83 inferrepo=True,
83 inferrepo=True,
84 )
84 )
85 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
85 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
86 """diff trees from two commits"""
86 """diff trees from two commits"""
87
87
88 def __difftree(repo, node1, node2, files=None):
88 def __difftree(repo, node1, node2, files=None):
89 assert node2 is not None
89 assert node2 is not None
90 if files is None:
90 if files is None:
91 files = []
91 files = []
92 mmap = repo[node1].manifest()
92 mmap = repo[node1].manifest()
93 mmap2 = repo[node2].manifest()
93 mmap2 = repo[node2].manifest()
94 m = scmutil.match(repo[node1], files)
94 m = scmutil.match(repo[node1], files)
95 st = repo.status(node1, node2, m)
95 st = repo.status(node1, node2, m)
96 empty = short(repo.nullid)
96 empty = short(repo.nullid)
97
97
98 for f in st.modified:
98 for f in st.modified:
99 # TODO get file permissions
99 # TODO get file permissions
100 ui.writenoi18n(
100 ui.writenoi18n(
101 b":100664 100664 %s %s M\t%s\t%s\n"
101 b":100664 100664 %s %s M\t%s\t%s\n"
102 % (short(mmap[f]), short(mmap2[f]), f, f)
102 % (short(mmap[f]), short(mmap2[f]), f, f)
103 )
103 )
104 for f in st.added:
104 for f in st.added:
105 ui.writenoi18n(
105 ui.writenoi18n(
106 b":000000 100664 %s %s N\t%s\t%s\n"
106 b":000000 100664 %s %s N\t%s\t%s\n"
107 % (empty, short(mmap2[f]), f, f)
107 % (empty, short(mmap2[f]), f, f)
108 )
108 )
109 for f in st.removed:
109 for f in st.removed:
110 ui.writenoi18n(
110 ui.writenoi18n(
111 b":100664 000000 %s %s D\t%s\t%s\n"
111 b":100664 000000 %s %s D\t%s\t%s\n"
112 % (short(mmap[f]), empty, f, f)
112 % (short(mmap[f]), empty, f, f)
113 )
113 )
114
114
115 ##
115 ##
116
116
117 while True:
117 while True:
118 if opts['stdin']:
118 if opts['stdin']:
119 line = ui.fin.readline()
119 line = ui.fin.readline()
120 if not line:
120 if not line:
121 break
121 break
122 line = line.rstrip(pycompat.oslinesep).split(b' ')
122 line = line.rstrip(pycompat.oslinesep).split(b' ')
123 node1 = line[0]
123 node1 = line[0]
124 if len(line) > 1:
124 if len(line) > 1:
125 node2 = line[1]
125 node2 = line[1]
126 else:
126 else:
127 node2 = None
127 node2 = None
128 node1 = repo.lookup(node1)
128 node1 = repo.lookup(node1)
129 if node2:
129 if node2:
130 node2 = repo.lookup(node2)
130 node2 = repo.lookup(node2)
131 else:
131 else:
132 node2 = node1
132 node2 = node1
133 node1 = repo.changelog.parents(node1)[0]
133 node1 = repo.changelog.parents(node1)[0]
134 if opts['patch']:
134 if opts['patch']:
135 if opts['pretty']:
135 if opts['pretty']:
136 catcommit(ui, repo, node2, b"")
136 catcommit(ui, repo, node2, b"")
137 m = scmutil.match(repo[node1], files)
137 m = scmutil.match(repo[node1], files)
138 diffopts = patch.difffeatureopts(ui)
138 diffopts = patch.difffeatureopts(ui)
139 diffopts.git = True
139 diffopts.git = True
140 chunks = patch.diff(repo, node1, node2, match=m, opts=diffopts)
140 chunks = patch.diff(repo, node1, node2, match=m, opts=diffopts)
141 for chunk in chunks:
141 for chunk in chunks:
142 ui.write(chunk)
142 ui.write(chunk)
143 else:
143 else:
144 __difftree(repo, node1, node2, files=files)
144 __difftree(repo, node1, node2, files=files)
145 if not opts['stdin']:
145 if not opts['stdin']:
146 break
146 break
147
147
148
148
149 def catcommit(ui, repo, n, prefix, ctx=None):
149 def catcommit(ui, repo, n, prefix, ctx=None):
150 nlprefix = b'\n' + prefix
150 nlprefix = b'\n' + prefix
151 if ctx is None:
151 if ctx is None:
152 ctx = repo[n]
152 ctx = repo[n]
153 # use ctx.node() instead ??
153 # use ctx.node() instead ??
154 ui.write((b"tree %s\n" % short(ctx.changeset()[0])))
154 ui.write((b"tree %s\n" % short(ctx.changeset()[0])))
155 for p in ctx.parents():
155 for p in ctx.parents():
156 ui.write((b"parent %s\n" % p))
156 ui.write((b"parent %s\n" % p))
157
157
158 date = ctx.date()
158 date = ctx.date()
159 description = ctx.description().replace(b"\0", b"")
159 description = ctx.description().replace(b"\0", b"")
160 ui.write((b"author %s %d %d\n" % (ctx.user(), int(date[0]), date[1])))
160 ui.write((b"author %s %d %d\n" % (ctx.user(), int(date[0]), date[1])))
161
161
162 if b'committer' in ctx.extra():
162 if b'committer' in ctx.extra():
163 ui.write((b"committer %s\n" % ctx.extra()[b'committer']))
163 ui.write((b"committer %s\n" % ctx.extra()[b'committer']))
164
164
165 ui.write((b"revision %d\n" % ctx.rev()))
165 ui.write((b"revision %d\n" % ctx.rev()))
166 ui.write((b"branch %s\n" % ctx.branch()))
166 ui.write((b"branch %s\n" % ctx.branch()))
167 if obsolete.isenabled(repo, obsolete.createmarkersopt):
167 if obsolete.isenabled(repo, obsolete.createmarkersopt):
168 if ctx.obsolete():
168 if ctx.obsolete():
169 ui.writenoi18n(b"obsolete\n")
169 ui.writenoi18n(b"obsolete\n")
170 ui.write((b"phase %s\n\n" % ctx.phasestr()))
170 ui.write((b"phase %s\n\n" % ctx.phasestr()))
171
171
172 if prefix != b"":
172 if prefix != b"":
173 ui.write(
173 ui.write(
174 b"%s%s\n" % (prefix, description.replace(b'\n', nlprefix).strip())
174 b"%s%s\n" % (prefix, description.replace(b'\n', nlprefix).strip())
175 )
175 )
176 else:
176 else:
177 ui.write(description + b"\n")
177 ui.write(description + b"\n")
178 if prefix:
178 if prefix:
179 ui.write(b'\0')
179 ui.write(b'\0')
180
180
181
181
182 @command(b'debug-merge-base', [], _(b'REV REV'))
182 @command(b'debug-merge-base', [], _(b'REV REV'))
183 def base(ui, repo, node1, node2):
183 def base(ui, repo, node1, node2):
184 """output common ancestor information"""
184 """output common ancestor information"""
185 node1 = repo.lookup(node1)
185 node1 = repo.lookup(node1)
186 node2 = repo.lookup(node2)
186 node2 = repo.lookup(node2)
187 n = repo.changelog.ancestor(node1, node2)
187 n = repo.changelog.ancestor(node1, node2)
188 ui.write(short(n) + b"\n")
188 ui.write(short(n) + b"\n")
189
189
190
190
191 @command(
191 @command(
192 b'debug-cat-file',
192 b'debug-cat-file',
193 [(b's', b'stdin', None, _(b'stdin'))],
193 [(b's', b'stdin', None, _(b'stdin'))],
194 _(b'[OPTION]... TYPE FILE'),
194 _(b'[OPTION]... TYPE FILE'),
195 inferrepo=True,
195 inferrepo=True,
196 )
196 )
197 def catfile(ui, repo, type=None, r=None, **opts):
197 def catfile(ui, repo, type=None, r=None, **opts):
198 """cat a specific revision"""
198 """cat a specific revision"""
199 # in stdin mode, every line except the commit is prefixed with two
199 # in stdin mode, every line except the commit is prefixed with two
200 # spaces. This way the our caller can find the commit without magic
200 # spaces. This way the our caller can find the commit without magic
201 # strings
201 # strings
202 #
202 #
203 prefix = b""
203 prefix = b""
204 if opts['stdin']:
204 if opts['stdin']:
205 line = ui.fin.readline()
205 line = ui.fin.readline()
206 if not line:
206 if not line:
207 return
207 return
208 (type, r) = line.rstrip(pycompat.oslinesep).split(b' ')
208 (type, r) = line.rstrip(pycompat.oslinesep).split(b' ')
209 prefix = b" "
209 prefix = b" "
210 else:
210 else:
211 if not type or not r:
211 if not type or not r:
212 ui.warn(_(b"cat-file: type or revision not supplied\n"))
212 ui.warn(_(b"cat-file: type or revision not supplied\n"))
213 commands.help_(ui, b'cat-file')
213 commands.help_(ui, b'cat-file')
214
214
215 while r:
215 while r:
216 if type != b"commit":
216 if type != b"commit":
217 ui.warn(_(b"aborting hg cat-file only understands commits\n"))
217 ui.warn(_(b"aborting hg cat-file only understands commits\n"))
218 return 1
218 return 1
219 n = repo.lookup(r)
219 n = repo.lookup(r)
220 catcommit(ui, repo, n, prefix)
220 catcommit(ui, repo, n, prefix)
221 if opts['stdin']:
221 if opts['stdin']:
222 line = ui.fin.readline()
222 line = ui.fin.readline()
223 if not line:
223 if not line:
224 break
224 break
225 (type, r) = line.rstrip(pycompat.oslinesep).split(b' ')
225 (type, r) = line.rstrip(pycompat.oslinesep).split(b' ')
226 else:
226 else:
227 break
227 break
228
228
229
229
230 # git rev-tree is a confusing thing. You can supply a number of
230 # git rev-tree is a confusing thing. You can supply a number of
231 # commit sha1s on the command line, and it walks the commit history
231 # commit sha1s on the command line, and it walks the commit history
232 # telling you which commits are reachable from the supplied ones via
232 # telling you which commits are reachable from the supplied ones via
233 # a bitmask based on arg position.
233 # a bitmask based on arg position.
234 # you can specify a commit to stop at by starting the sha1 with ^
234 # you can specify a commit to stop at by starting the sha1 with ^
235 def revtree(ui, args, repo, full=b"tree", maxnr=0, parents=False):
235 def revtree(ui, args, repo, full=b"tree", maxnr=0, parents=False):
236 def chlogwalk():
236 def chlogwalk():
237 count = len(repo)
237 count = len(repo)
238 i = count
238 i = count
239 l = [0] * 100
239 l = [0] * 100
240 chunk = 100
240 chunk = 100
241 while True:
241 while True:
242 if chunk > i:
242 if chunk > i:
243 chunk = i
243 chunk = i
244 i = 0
244 i = 0
245 else:
245 else:
246 i -= chunk
246 i -= chunk
247
247
248 for x in pycompat.xrange(chunk):
248 for x in pycompat.xrange(chunk):
249 if i + x >= count:
249 if i + x >= count:
250 l[chunk - x :] = [0] * (chunk - x)
250 l[chunk - x :] = [0] * (chunk - x)
251 break
251 break
252 if full is not None:
252 if full is not None:
253 if (i + x) in repo:
253 if (i + x) in repo:
254 l[x] = repo[i + x]
254 l[x] = repo[i + x]
255 l[x].changeset() # force reading
255 l[x].changeset() # force reading
256 else:
256 else:
257 if (i + x) in repo:
257 if (i + x) in repo:
258 l[x] = 1
258 l[x] = 1
259 for x in pycompat.xrange(chunk - 1, -1, -1):
259 for x in pycompat.xrange(chunk - 1, -1, -1):
260 if l[x] != 0:
260 if l[x] != 0:
261 yield (i + x, full is not None and l[x] or None)
261 yield (i + x, full is not None and l[x] or None)
262 if i == 0:
262 if i == 0:
263 break
263 break
264
264
265 # calculate and return the reachability bitmask for sha
265 # calculate and return the reachability bitmask for sha
266 def is_reachable(ar, reachable, sha):
266 def is_reachable(ar, reachable, sha):
267 if len(ar) == 0:
267 if len(ar) == 0:
268 return 1
268 return 1
269 mask = 0
269 mask = 0
270 for i in pycompat.xrange(len(ar)):
270 for i in pycompat.xrange(len(ar)):
271 if sha in reachable[i]:
271 if sha in reachable[i]:
272 mask |= 1 << i
272 mask |= 1 << i
273
273
274 return mask
274 return mask
275
275
276 reachable = []
276 reachable = []
277 stop_sha1 = []
277 stop_sha1 = []
278 want_sha1 = []
278 want_sha1 = []
279 count = 0
279 count = 0
280
280
281 # figure out which commits they are asking for and which ones they
281 # figure out which commits they are asking for and which ones they
282 # want us to stop on
282 # want us to stop on
283 for i, arg in enumerate(args):
283 for i, arg in enumerate(args):
284 if arg.startswith(b'^'):
284 if arg.startswith(b'^'):
285 s = repo.lookup(arg[1:])
285 s = repo.lookup(arg[1:])
286 stop_sha1.append(s)
286 stop_sha1.append(s)
287 want_sha1.append(s)
287 want_sha1.append(s)
288 elif arg != b'HEAD':
288 elif arg != b'HEAD':
289 want_sha1.append(repo.lookup(arg))
289 want_sha1.append(repo.lookup(arg))
290
290
291 # calculate the graph for the supplied commits
291 # calculate the graph for the supplied commits
292 for i, n in enumerate(want_sha1):
292 for i, n in enumerate(want_sha1):
293 reachable.append(set())
293 reachable.append(set())
294 visit = [n]
294 visit = [n]
295 reachable[i].add(n)
295 reachable[i].add(n)
296 while visit:
296 while visit:
297 n = visit.pop(0)
297 n = visit.pop(0)
298 if n in stop_sha1:
298 if n in stop_sha1:
299 continue
299 continue
300 for p in repo.changelog.parents(n):
300 for p in repo.changelog.parents(n):
301 if p not in reachable[i]:
301 if p not in reachable[i]:
302 reachable[i].add(p)
302 reachable[i].add(p)
303 visit.append(p)
303 visit.append(p)
304 if p in stop_sha1:
304 if p in stop_sha1:
305 continue
305 continue
306
306
307 # walk the repository looking for commits that are in our
307 # walk the repository looking for commits that are in our
308 # reachability graph
308 # reachability graph
309 for i, ctx in chlogwalk():
309 for i, ctx in chlogwalk():
310 if i not in repo:
310 if i not in repo:
311 continue
311 continue
312 n = repo.changelog.node(i)
312 n = repo.changelog.node(i)
313 mask = is_reachable(want_sha1, reachable, n)
313 mask = is_reachable(want_sha1, reachable, n)
314 if mask:
314 if mask:
315 parentstr = b""
315 parentstr = b""
316 if parents:
316 if parents:
317 pp = repo.changelog.parents(n)
317 pp = repo.changelog.parents(n)
318 if pp[0] != repo.nullid:
318 if pp[0] != repo.nullid:
319 parentstr += b" " + short(pp[0])
319 parentstr += b" " + short(pp[0])
320 if pp[1] != repo.nullid:
320 if pp[1] != repo.nullid:
321 parentstr += b" " + short(pp[1])
321 parentstr += b" " + short(pp[1])
322 if not full:
322 if not full:
323 ui.write(b"%s%s\n" % (short(n), parentstr))
323 ui.write(b"%s%s\n" % (short(n), parentstr))
324 elif full == b"commit":
324 elif full == b"commit":
325 ui.write(b"%s%s\n" % (short(n), parentstr))
325 ui.write(b"%s%s\n" % (short(n), parentstr))
326 catcommit(ui, repo, n, b' ', ctx)
326 catcommit(ui, repo, n, b' ', ctx)
327 else:
327 else:
328 (p1, p2) = repo.changelog.parents(n)
328 (p1, p2) = repo.changelog.parents(n)
329 (h, h1, h2) = map(short, (n, p1, p2))
329 (h, h1, h2) = map(short, (n, p1, p2))
330 (i1, i2) = map(repo.changelog.rev, (p1, p2))
330 (i1, i2) = map(repo.changelog.rev, (p1, p2))
331
331
332 date = ctx.date()[0]
332 date = ctx.date()[0]
333 ui.write(b"%s %s:%s" % (date, h, mask))
333 ui.write(b"%s %s:%s" % (date, h, mask))
334 mask = is_reachable(want_sha1, reachable, p1)
334 mask = is_reachable(want_sha1, reachable, p1)
335 if i1 != nullrev and mask > 0:
335 if i1 != nullrev and mask > 0:
336 ui.write(b"%s:%s " % (h1, mask)),
336 ui.write(b"%s:%s " % (h1, mask)),
337 mask = is_reachable(want_sha1, reachable, p2)
337 mask = is_reachable(want_sha1, reachable, p2)
338 if i2 != nullrev and mask > 0:
338 if i2 != nullrev and mask > 0:
339 ui.write(b"%s:%s " % (h2, mask))
339 ui.write(b"%s:%s " % (h2, mask))
340 ui.write(b"\n")
340 ui.write(b"\n")
341 if maxnr and count >= maxnr:
341 if maxnr and count >= maxnr:
342 break
342 break
343 count += 1
343 count += 1
344
344
345
345
346 # git rev-list tries to order things by date, and has the ability to stop
346 # git rev-list tries to order things by date, and has the ability to stop
347 # at a given commit without walking the whole repo. TODO add the stop
347 # at a given commit without walking the whole repo. TODO add the stop
348 # parameter
348 # parameter
349 @command(
349 @command(
350 b'debug-rev-list',
350 b'debug-rev-list',
351 [
351 [
352 (b'H', b'header', None, _(b'header')),
352 (b'H', b'header', None, _(b'header')),
353 (b't', b'topo-order', None, _(b'topo-order')),
353 (b't', b'topo-order', None, _(b'topo-order')),
354 (b'p', b'parents', None, _(b'parents')),
354 (b'p', b'parents', None, _(b'parents')),
355 (b'n', b'max-count', 0, _(b'max-count')),
355 (b'n', b'max-count', 0, _(b'max-count')),
356 ],
356 ],
357 b'[OPTION]... REV...',
357 b'[OPTION]... REV...',
358 )
358 )
359 def revlist(ui, repo, *revs, **opts):
359 def revlist(ui, repo, *revs, **opts):
360 """print revisions"""
360 """print revisions"""
361 if opts['header']:
361 if opts['header']:
362 full = b"commit"
362 full = b"commit"
363 else:
363 else:
364 full = None
364 full = None
365 copy = [x for x in revs]
365 copy = [x for x in revs]
366 revtree(ui, copy, repo, full, opts['max_count'], opts[r'parents'])
366 revtree(ui, copy, repo, full, opts['max_count'], opts[r'parents'])
367
367
368
368
369 @command(
369 @command(
370 b'view',
370 b'view',
371 [(b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM'))],
371 [(b'l', b'limit', b'', _(b'limit number of changes displayed'), _(b'NUM'))],
372 _(b'[-l LIMIT] [REVRANGE]'),
372 _(b'[-l LIMIT] [REVRANGE]'),
373 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
373 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
374 )
374 )
375 def view(ui, repo, *etc, **opts):
375 def view(ui, repo, *etc, **opts):
376 """start interactive history viewer"""
376 """start interactive history viewer"""
377 opts = pycompat.byteskwargs(opts)
377 opts = pycompat.byteskwargs(opts)
378 os.chdir(repo.root)
378 os.chdir(repo.root)
379 optstr = b' '.join(
379 optstr = b' '.join([b'--%s %s' % (k, v) for k, v in opts.items() if v])
380 [b'--%s %s' % (k, v) for k, v in pycompat.iteritems(opts) if v]
381 )
382 if repo.filtername is None:
380 if repo.filtername is None:
383 optstr += b'--hidden'
381 optstr += b'--hidden'
384
382
385 cmd = ui.config(b"hgk", b"path") + b" %s %s" % (optstr, b" ".join(etc))
383 cmd = ui.config(b"hgk", b"path") + b" %s %s" % (optstr, b" ".join(etc))
386 ui.debug(b"running %s\n" % cmd)
384 ui.debug(b"running %s\n" % cmd)
387 ui.system(cmd, blockedtag=b'hgk_view')
385 ui.system(cmd, blockedtag=b'hgk_view')
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now