##// END OF EJS Templates
absorb: port partway to Python 3...
Augie Fackler -
r39023:9204445a default
parent child Browse files
Show More
@@ -1,547 +1,549 b''
1 test-abort-checkin.t
1 test-abort-checkin.t
2 test-absorb-phase.t
3 test-absorb-strip.t
2 test-add.t
4 test-add.t
3 test-addremove-similar.t
5 test-addremove-similar.t
4 test-addremove.t
6 test-addremove.t
5 test-alias.t
7 test-alias.t
6 test-amend-subrepo.t
8 test-amend-subrepo.t
7 test-amend.t
9 test-amend.t
8 test-ancestor.py
10 test-ancestor.py
9 test-annotate.py
11 test-annotate.py
10 test-annotate.t
12 test-annotate.t
11 test-archive-symlinks.t
13 test-archive-symlinks.t
12 test-atomictempfile.py
14 test-atomictempfile.py
13 test-audit-path.t
15 test-audit-path.t
14 test-audit-subrepo.t
16 test-audit-subrepo.t
15 test-automv.t
17 test-automv.t
16 test-backout.t
18 test-backout.t
17 test-backwards-remove.t
19 test-backwards-remove.t
18 test-bad-pull.t
20 test-bad-pull.t
19 test-basic.t
21 test-basic.t
20 test-bheads.t
22 test-bheads.t
21 test-bisect.t
23 test-bisect.t
22 test-bisect2.t
24 test-bisect2.t
23 test-bisect3.t
25 test-bisect3.t
24 test-blackbox.t
26 test-blackbox.t
25 test-bookmarks-current.t
27 test-bookmarks-current.t
26 test-bookmarks-merge.t
28 test-bookmarks-merge.t
27 test-bookmarks-pushpull.t
29 test-bookmarks-pushpull.t
28 test-bookmarks-rebase.t
30 test-bookmarks-rebase.t
29 test-bookmarks-strip.t
31 test-bookmarks-strip.t
30 test-bookmarks.t
32 test-bookmarks.t
31 test-branch-change.t
33 test-branch-change.t
32 test-branch-option.t
34 test-branch-option.t
33 test-branch-tag-confict.t
35 test-branch-tag-confict.t
34 test-branches.t
36 test-branches.t
35 test-bundle-phases.t
37 test-bundle-phases.t
36 test-bundle-r.t
38 test-bundle-r.t
37 test-bundle-type.t
39 test-bundle-type.t
38 test-bundle-vs-outgoing.t
40 test-bundle-vs-outgoing.t
39 test-bundle.t
41 test-bundle.t
40 test-bundle2-exchange.t
42 test-bundle2-exchange.t
41 test-bundle2-format.t
43 test-bundle2-format.t
42 test-bundle2-multiple-changegroups.t
44 test-bundle2-multiple-changegroups.t
43 test-bundle2-pushback.t
45 test-bundle2-pushback.t
44 test-bundle2-remote-changegroup.t
46 test-bundle2-remote-changegroup.t
45 test-cappedreader.py
47 test-cappedreader.py
46 test-casecollision.t
48 test-casecollision.t
47 test-cat.t
49 test-cat.t
48 test-cbor.py
50 test-cbor.py
49 test-censor.t
51 test-censor.t
50 test-changelog-exec.t
52 test-changelog-exec.t
51 test-check-commit.t
53 test-check-commit.t
52 test-check-execute.t
54 test-check-execute.t
53 test-check-interfaces.py
55 test-check-interfaces.py
54 test-check-module-imports.t
56 test-check-module-imports.t
55 test-check-pyflakes.t
57 test-check-pyflakes.t
56 test-check-pylint.t
58 test-check-pylint.t
57 test-check-shbang.t
59 test-check-shbang.t
58 test-children.t
60 test-children.t
59 test-clone-cgi.t
61 test-clone-cgi.t
60 test-clone-pull-corruption.t
62 test-clone-pull-corruption.t
61 test-clone-r.t
63 test-clone-r.t
62 test-clone-update-order.t
64 test-clone-update-order.t
63 test-clonebundles.t
65 test-clonebundles.t
64 test-commit-amend.t
66 test-commit-amend.t
65 test-commit-interactive.t
67 test-commit-interactive.t
66 test-commit-multiple.t
68 test-commit-multiple.t
67 test-commit-unresolved.t
69 test-commit-unresolved.t
68 test-commit.t
70 test-commit.t
69 test-committer.t
71 test-committer.t
70 test-completion.t
72 test-completion.t
71 test-config-env.py
73 test-config-env.py
72 test-config.t
74 test-config.t
73 test-conflict.t
75 test-conflict.t
74 test-confused-revert.t
76 test-confused-revert.t
75 test-context.py
77 test-context.py
76 test-contrib-check-code.t
78 test-contrib-check-code.t
77 test-contrib-check-commit.t
79 test-contrib-check-commit.t
78 test-convert-authormap.t
80 test-convert-authormap.t
79 test-convert-clonebranches.t
81 test-convert-clonebranches.t
80 test-convert-cvs-branch.t
82 test-convert-cvs-branch.t
81 test-convert-cvs-detectmerge.t
83 test-convert-cvs-detectmerge.t
82 test-convert-cvs-synthetic.t
84 test-convert-cvs-synthetic.t
83 test-convert-cvs.t
85 test-convert-cvs.t
84 test-convert-cvsnt-mergepoints.t
86 test-convert-cvsnt-mergepoints.t
85 test-convert-datesort.t
87 test-convert-datesort.t
86 test-convert-filemap.t
88 test-convert-filemap.t
87 test-convert-hg-sink.t
89 test-convert-hg-sink.t
88 test-convert-hg-source.t
90 test-convert-hg-source.t
89 test-convert-hg-startrev.t
91 test-convert-hg-startrev.t
90 test-convert-splicemap.t
92 test-convert-splicemap.t
91 test-convert-tagsbranch-topology.t
93 test-convert-tagsbranch-topology.t
92 test-copy-move-merge.t
94 test-copy-move-merge.t
93 test-copy.t
95 test-copy.t
94 test-copytrace-heuristics.t
96 test-copytrace-heuristics.t
95 test-debugbuilddag.t
97 test-debugbuilddag.t
96 test-debugbundle.t
98 test-debugbundle.t
97 test-debugextensions.t
99 test-debugextensions.t
98 test-debugindexdot.t
100 test-debugindexdot.t
99 test-debugrename.t
101 test-debugrename.t
100 test-default-push.t
102 test-default-push.t
101 test-diff-antipatience.t
103 test-diff-antipatience.t
102 test-diff-binary-file.t
104 test-diff-binary-file.t
103 test-diff-change.t
105 test-diff-change.t
104 test-diff-copy-depth.t
106 test-diff-copy-depth.t
105 test-diff-hashes.t
107 test-diff-hashes.t
106 test-diff-ignore-whitespace.t
108 test-diff-ignore-whitespace.t
107 test-diff-indent-heuristic.t
109 test-diff-indent-heuristic.t
108 test-diff-issue2761.t
110 test-diff-issue2761.t
109 test-diff-newlines.t
111 test-diff-newlines.t
110 test-diff-reverse.t
112 test-diff-reverse.t
111 test-diff-subdir.t
113 test-diff-subdir.t
112 test-diff-unified.t
114 test-diff-unified.t
113 test-diff-upgrade.t
115 test-diff-upgrade.t
114 test-diffdir.t
116 test-diffdir.t
115 test-diffstat.t
117 test-diffstat.t
116 test-directaccess.t
118 test-directaccess.t
117 test-dirstate-backup.t
119 test-dirstate-backup.t
118 test-dirstate-nonnormalset.t
120 test-dirstate-nonnormalset.t
119 test-dirstate.t
121 test-dirstate.t
120 test-dispatch.py
122 test-dispatch.py
121 test-doctest.py
123 test-doctest.py
122 test-double-merge.t
124 test-double-merge.t
123 test-drawdag.t
125 test-drawdag.t
124 test-duplicateoptions.py
126 test-duplicateoptions.py
125 test-editor-filename.t
127 test-editor-filename.t
126 test-empty-dir.t
128 test-empty-dir.t
127 test-empty-file.t
129 test-empty-file.t
128 test-empty-group.t
130 test-empty-group.t
129 test-empty.t
131 test-empty.t
130 test-encode.t
132 test-encode.t
131 test-encoding-func.py
133 test-encoding-func.py
132 test-encoding.t
134 test-encoding.t
133 test-eol-add.t
135 test-eol-add.t
134 test-eol-clone.t
136 test-eol-clone.t
135 test-eol-hook.t
137 test-eol-hook.t
136 test-eol-patch.t
138 test-eol-patch.t
137 test-eol-tag.t
139 test-eol-tag.t
138 test-eol-update.t
140 test-eol-update.t
139 test-eol.t
141 test-eol.t
140 test-eolfilename.t
142 test-eolfilename.t
141 test-excessive-merge.t
143 test-excessive-merge.t
142 test-exchange-obsmarkers-case-A1.t
144 test-exchange-obsmarkers-case-A1.t
143 test-exchange-obsmarkers-case-A2.t
145 test-exchange-obsmarkers-case-A2.t
144 test-exchange-obsmarkers-case-A3.t
146 test-exchange-obsmarkers-case-A3.t
145 test-exchange-obsmarkers-case-A4.t
147 test-exchange-obsmarkers-case-A4.t
146 test-exchange-obsmarkers-case-A5.t
148 test-exchange-obsmarkers-case-A5.t
147 test-exchange-obsmarkers-case-A6.t
149 test-exchange-obsmarkers-case-A6.t
148 test-exchange-obsmarkers-case-A7.t
150 test-exchange-obsmarkers-case-A7.t
149 test-exchange-obsmarkers-case-B1.t
151 test-exchange-obsmarkers-case-B1.t
150 test-exchange-obsmarkers-case-B2.t
152 test-exchange-obsmarkers-case-B2.t
151 test-exchange-obsmarkers-case-B3.t
153 test-exchange-obsmarkers-case-B3.t
152 test-exchange-obsmarkers-case-B4.t
154 test-exchange-obsmarkers-case-B4.t
153 test-exchange-obsmarkers-case-B5.t
155 test-exchange-obsmarkers-case-B5.t
154 test-exchange-obsmarkers-case-B6.t
156 test-exchange-obsmarkers-case-B6.t
155 test-exchange-obsmarkers-case-B7.t
157 test-exchange-obsmarkers-case-B7.t
156 test-exchange-obsmarkers-case-C1.t
158 test-exchange-obsmarkers-case-C1.t
157 test-exchange-obsmarkers-case-C2.t
159 test-exchange-obsmarkers-case-C2.t
158 test-exchange-obsmarkers-case-C3.t
160 test-exchange-obsmarkers-case-C3.t
159 test-exchange-obsmarkers-case-C4.t
161 test-exchange-obsmarkers-case-C4.t
160 test-exchange-obsmarkers-case-D1.t
162 test-exchange-obsmarkers-case-D1.t
161 test-exchange-obsmarkers-case-D2.t
163 test-exchange-obsmarkers-case-D2.t
162 test-exchange-obsmarkers-case-D3.t
164 test-exchange-obsmarkers-case-D3.t
163 test-exchange-obsmarkers-case-D4.t
165 test-exchange-obsmarkers-case-D4.t
164 test-execute-bit.t
166 test-execute-bit.t
165 test-export.t
167 test-export.t
166 test-extdata.t
168 test-extdata.t
167 test-extdiff.t
169 test-extdiff.t
168 test-extensions-afterloaded.t
170 test-extensions-afterloaded.t
169 test-extensions-wrapfunction.py
171 test-extensions-wrapfunction.py
170 test-extra-filelog-entry.t
172 test-extra-filelog-entry.t
171 test-fetch.t
173 test-fetch.t
172 test-filebranch.t
174 test-filebranch.t
173 test-filecache.py
175 test-filecache.py
174 test-filelog.py
176 test-filelog.py
175 test-fileset-generated.t
177 test-fileset-generated.t
176 test-fileset.t
178 test-fileset.t
177 test-fix-topology.t
179 test-fix-topology.t
178 test-flags.t
180 test-flags.t
179 test-generaldelta.t
181 test-generaldelta.t
180 test-getbundle.t
182 test-getbundle.t
181 test-git-export.t
183 test-git-export.t
182 test-glog-topological.t
184 test-glog-topological.t
183 test-gpg.t
185 test-gpg.t
184 test-graft.t
186 test-graft.t
185 test-hg-parseurl.py
187 test-hg-parseurl.py
186 test-hghave.t
188 test-hghave.t
187 test-hgignore.t
189 test-hgignore.t
188 test-hgk.t
190 test-hgk.t
189 test-hgrc.t
191 test-hgrc.t
190 test-hgweb-bundle.t
192 test-hgweb-bundle.t
191 test-hgweb-descend-empties.t
193 test-hgweb-descend-empties.t
192 test-hgweb-empty.t
194 test-hgweb-empty.t
193 test-hgweb-removed.t
195 test-hgweb-removed.t
194 test-hgwebdir-paths.py
196 test-hgwebdir-paths.py
195 test-hgwebdirsym.t
197 test-hgwebdirsym.t
196 test-histedit-arguments.t
198 test-histedit-arguments.t
197 test-histedit-base.t
199 test-histedit-base.t
198 test-histedit-bookmark-motion.t
200 test-histedit-bookmark-motion.t
199 test-histedit-commute.t
201 test-histedit-commute.t
200 test-histedit-drop.t
202 test-histedit-drop.t
201 test-histedit-edit.t
203 test-histedit-edit.t
202 test-histedit-fold-non-commute.t
204 test-histedit-fold-non-commute.t
203 test-histedit-fold.t
205 test-histedit-fold.t
204 test-histedit-no-backup.t
206 test-histedit-no-backup.t
205 test-histedit-no-change.t
207 test-histedit-no-change.t
206 test-histedit-non-commute-abort.t
208 test-histedit-non-commute-abort.t
207 test-histedit-non-commute.t
209 test-histedit-non-commute.t
208 test-histedit-obsolete.t
210 test-histedit-obsolete.t
209 test-histedit-outgoing.t
211 test-histedit-outgoing.t
210 test-histedit-templates.t
212 test-histedit-templates.t
211 test-http-branchmap.t
213 test-http-branchmap.t
212 test-http-bundle1.t
214 test-http-bundle1.t
213 test-http-clone-r.t
215 test-http-clone-r.t
214 test-http.t
216 test-http.t
215 test-hybridencode.py
217 test-hybridencode.py
216 test-identify.t
218 test-identify.t
217 test-impexp-branch.t
219 test-impexp-branch.t
218 test-import-bypass.t
220 test-import-bypass.t
219 test-import-eol.t
221 test-import-eol.t
220 test-import-merge.t
222 test-import-merge.t
221 test-import-unknown.t
223 test-import-unknown.t
222 test-import.t
224 test-import.t
223 test-imports-checker.t
225 test-imports-checker.t
224 test-incoming-outgoing.t
226 test-incoming-outgoing.t
225 test-inherit-mode.t
227 test-inherit-mode.t
226 test-init.t
228 test-init.t
227 test-issue1089.t
229 test-issue1089.t
228 test-issue1102.t
230 test-issue1102.t
229 test-issue1175.t
231 test-issue1175.t
230 test-issue1306.t
232 test-issue1306.t
231 test-issue1438.t
233 test-issue1438.t
232 test-issue1502.t
234 test-issue1502.t
233 test-issue1802.t
235 test-issue1802.t
234 test-issue1877.t
236 test-issue1877.t
235 test-issue1993.t
237 test-issue1993.t
236 test-issue2137.t
238 test-issue2137.t
237 test-issue3084.t
239 test-issue3084.t
238 test-issue4074.t
240 test-issue4074.t
239 test-issue522.t
241 test-issue522.t
240 test-issue586.t
242 test-issue586.t
241 test-issue612.t
243 test-issue612.t
242 test-issue619.t
244 test-issue619.t
243 test-issue660.t
245 test-issue660.t
244 test-issue672.t
246 test-issue672.t
245 test-issue842.t
247 test-issue842.t
246 test-journal-exists.t
248 test-journal-exists.t
247 test-journal-share.t
249 test-journal-share.t
248 test-journal.t
250 test-journal.t
249 test-known.t
251 test-known.t
250 test-largefiles-cache.t
252 test-largefiles-cache.t
251 test-largefiles-misc.t
253 test-largefiles-misc.t
252 test-largefiles-small-disk.t
254 test-largefiles-small-disk.t
253 test-largefiles-update.t
255 test-largefiles-update.t
254 test-largefiles.t
256 test-largefiles.t
255 test-lfs-largefiles.t
257 test-lfs-largefiles.t
256 test-lfs-pointer.py
258 test-lfs-pointer.py
257 test-linelog.py
259 test-linelog.py
258 test-linerange.py
260 test-linerange.py
259 test-locate.t
261 test-locate.t
260 test-lock-badness.t
262 test-lock-badness.t
261 test-log-linerange.t
263 test-log-linerange.t
262 test-log.t
264 test-log.t
263 test-logexchange.t
265 test-logexchange.t
264 test-lrucachedict.py
266 test-lrucachedict.py
265 test-mactext.t
267 test-mactext.t
266 test-mailmap.t
268 test-mailmap.t
267 test-manifest-merging.t
269 test-manifest-merging.t
268 test-manifest.py
270 test-manifest.py
269 test-manifest.t
271 test-manifest.t
270 test-match.py
272 test-match.py
271 test-mdiff.py
273 test-mdiff.py
272 test-merge-changedelete.t
274 test-merge-changedelete.t
273 test-merge-closedheads.t
275 test-merge-closedheads.t
274 test-merge-commit.t
276 test-merge-commit.t
275 test-merge-criss-cross.t
277 test-merge-criss-cross.t
276 test-merge-default.t
278 test-merge-default.t
277 test-merge-force.t
279 test-merge-force.t
278 test-merge-halt.t
280 test-merge-halt.t
279 test-merge-internal-tools-pattern.t
281 test-merge-internal-tools-pattern.t
280 test-merge-local.t
282 test-merge-local.t
281 test-merge-remove.t
283 test-merge-remove.t
282 test-merge-revert.t
284 test-merge-revert.t
283 test-merge-revert2.t
285 test-merge-revert2.t
284 test-merge-subrepos.t
286 test-merge-subrepos.t
285 test-merge-symlinks.t
287 test-merge-symlinks.t
286 test-merge-tools.t
288 test-merge-tools.t
287 test-merge-types.t
289 test-merge-types.t
288 test-merge1.t
290 test-merge1.t
289 test-merge10.t
291 test-merge10.t
290 test-merge2.t
292 test-merge2.t
291 test-merge4.t
293 test-merge4.t
292 test-merge5.t
294 test-merge5.t
293 test-merge6.t
295 test-merge6.t
294 test-merge7.t
296 test-merge7.t
295 test-merge8.t
297 test-merge8.t
296 test-merge9.t
298 test-merge9.t
297 test-minifileset.py
299 test-minifileset.py
298 test-minirst.py
300 test-minirst.py
299 test-mq-git.t
301 test-mq-git.t
300 test-mq-header-date.t
302 test-mq-header-date.t
301 test-mq-header-from.t
303 test-mq-header-from.t
302 test-mq-merge.t
304 test-mq-merge.t
303 test-mq-pull-from-bundle.t
305 test-mq-pull-from-bundle.t
304 test-mq-qclone-http.t
306 test-mq-qclone-http.t
305 test-mq-qdelete.t
307 test-mq-qdelete.t
306 test-mq-qdiff.t
308 test-mq-qdiff.t
307 test-mq-qfold.t
309 test-mq-qfold.t
308 test-mq-qgoto.t
310 test-mq-qgoto.t
309 test-mq-qimport-fail-cleanup.t
311 test-mq-qimport-fail-cleanup.t
310 test-mq-qnew.t
312 test-mq-qnew.t
311 test-mq-qpush-exact.t
313 test-mq-qpush-exact.t
312 test-mq-qqueue.t
314 test-mq-qqueue.t
313 test-mq-qrefresh-interactive.t
315 test-mq-qrefresh-interactive.t
314 test-mq-qrefresh-replace-log-message.t
316 test-mq-qrefresh-replace-log-message.t
315 test-mq-qrefresh.t
317 test-mq-qrefresh.t
316 test-mq-qrename.t
318 test-mq-qrename.t
317 test-mq-qsave.t
319 test-mq-qsave.t
318 test-mq-safety.t
320 test-mq-safety.t
319 test-mq-subrepo.t
321 test-mq-subrepo.t
320 test-mq-symlinks.t
322 test-mq-symlinks.t
321 test-mv-cp-st-diff.t
323 test-mv-cp-st-diff.t
322 test-narrow-acl.t
324 test-narrow-acl.t
323 test-narrow-archive.t
325 test-narrow-archive.t
324 test-narrow-clone-no-ellipsis.t
326 test-narrow-clone-no-ellipsis.t
325 test-narrow-clone-non-narrow-server.t
327 test-narrow-clone-non-narrow-server.t
326 test-narrow-clone-nonlinear.t
328 test-narrow-clone-nonlinear.t
327 test-narrow-clone.t
329 test-narrow-clone.t
328 test-narrow-commit.t
330 test-narrow-commit.t
329 test-narrow-copies.t
331 test-narrow-copies.t
330 test-narrow-debugcommands.t
332 test-narrow-debugcommands.t
331 test-narrow-debugrebuilddirstate.t
333 test-narrow-debugrebuilddirstate.t
332 test-narrow-exchange-merges.t
334 test-narrow-exchange-merges.t
333 test-narrow-exchange.t
335 test-narrow-exchange.t
334 test-narrow-expanddirstate.t
336 test-narrow-expanddirstate.t
335 test-narrow-merge.t
337 test-narrow-merge.t
336 test-narrow-patch.t
338 test-narrow-patch.t
337 test-narrow-patterns.t
339 test-narrow-patterns.t
338 test-narrow-pull.t
340 test-narrow-pull.t
339 test-narrow-rebase.t
341 test-narrow-rebase.t
340 test-narrow-shallow-merges.t
342 test-narrow-shallow-merges.t
341 test-narrow-shallow.t
343 test-narrow-shallow.t
342 test-narrow-strip.t
344 test-narrow-strip.t
343 test-narrow-update.t
345 test-narrow-update.t
344 test-narrow-widen.t
346 test-narrow-widen.t
345 test-narrow.t
347 test-narrow.t
346 test-nested-repo.t
348 test-nested-repo.t
347 test-newbranch.t
349 test-newbranch.t
348 test-nointerrupt.t
350 test-nointerrupt.t
349 test-obshistory.t
351 test-obshistory.t
350 test-obsmarker-template.t
352 test-obsmarker-template.t
351 test-obsmarkers-effectflag.t
353 test-obsmarkers-effectflag.t
352 test-obsolete-bundle-strip.t
354 test-obsolete-bundle-strip.t
353 test-obsolete-changeset-exchange.t
355 test-obsolete-changeset-exchange.t
354 test-obsolete-checkheads.t
356 test-obsolete-checkheads.t
355 test-obsolete-distributed.t
357 test-obsolete-distributed.t
356 test-obsolete-divergent.t
358 test-obsolete-divergent.t
357 test-obsolete-tag-cache.t
359 test-obsolete-tag-cache.t
358 test-pager.t
360 test-pager.t
359 test-parents.t
361 test-parents.t
360 test-parseindex2.py
362 test-parseindex2.py
361 test-patch-offset.t
363 test-patch-offset.t
362 test-patch.t
364 test-patch.t
363 test-pathconflicts-merge.t
365 test-pathconflicts-merge.t
364 test-pathconflicts-update.t
366 test-pathconflicts-update.t
365 test-pathencode.py
367 test-pathencode.py
366 test-pending.t
368 test-pending.t
367 test-permissions.t
369 test-permissions.t
368 test-phases.t
370 test-phases.t
369 test-pull-branch.t
371 test-pull-branch.t
370 test-pull-http.t
372 test-pull-http.t
371 test-pull-permission.t
373 test-pull-permission.t
372 test-pull-pull-corruption.t
374 test-pull-pull-corruption.t
373 test-pull-r.t
375 test-pull-r.t
374 test-pull-update.t
376 test-pull-update.t
375 test-pull.t
377 test-pull.t
376 test-purge.t
378 test-purge.t
377 test-push-checkheads-partial-C1.t
379 test-push-checkheads-partial-C1.t
378 test-push-checkheads-partial-C2.t
380 test-push-checkheads-partial-C2.t
379 test-push-checkheads-partial-C3.t
381 test-push-checkheads-partial-C3.t
380 test-push-checkheads-partial-C4.t
382 test-push-checkheads-partial-C4.t
381 test-push-checkheads-pruned-B1.t
383 test-push-checkheads-pruned-B1.t
382 test-push-checkheads-pruned-B2.t
384 test-push-checkheads-pruned-B2.t
383 test-push-checkheads-pruned-B3.t
385 test-push-checkheads-pruned-B3.t
384 test-push-checkheads-pruned-B4.t
386 test-push-checkheads-pruned-B4.t
385 test-push-checkheads-pruned-B5.t
387 test-push-checkheads-pruned-B5.t
386 test-push-checkheads-pruned-B6.t
388 test-push-checkheads-pruned-B6.t
387 test-push-checkheads-pruned-B7.t
389 test-push-checkheads-pruned-B7.t
388 test-push-checkheads-pruned-B8.t
390 test-push-checkheads-pruned-B8.t
389 test-push-checkheads-superceed-A1.t
391 test-push-checkheads-superceed-A1.t
390 test-push-checkheads-superceed-A2.t
392 test-push-checkheads-superceed-A2.t
391 test-push-checkheads-superceed-A3.t
393 test-push-checkheads-superceed-A3.t
392 test-push-checkheads-superceed-A4.t
394 test-push-checkheads-superceed-A4.t
393 test-push-checkheads-superceed-A5.t
395 test-push-checkheads-superceed-A5.t
394 test-push-checkheads-superceed-A6.t
396 test-push-checkheads-superceed-A6.t
395 test-push-checkheads-superceed-A7.t
397 test-push-checkheads-superceed-A7.t
396 test-push-checkheads-superceed-A8.t
398 test-push-checkheads-superceed-A8.t
397 test-push-checkheads-unpushed-D1.t
399 test-push-checkheads-unpushed-D1.t
398 test-push-checkheads-unpushed-D2.t
400 test-push-checkheads-unpushed-D2.t
399 test-push-checkheads-unpushed-D3.t
401 test-push-checkheads-unpushed-D3.t
400 test-push-checkheads-unpushed-D4.t
402 test-push-checkheads-unpushed-D4.t
401 test-push-checkheads-unpushed-D5.t
403 test-push-checkheads-unpushed-D5.t
402 test-push-checkheads-unpushed-D6.t
404 test-push-checkheads-unpushed-D6.t
403 test-push-checkheads-unpushed-D7.t
405 test-push-checkheads-unpushed-D7.t
404 test-push-http.t
406 test-push-http.t
405 test-push-warn.t
407 test-push-warn.t
406 test-push.t
408 test-push.t
407 test-pushvars.t
409 test-pushvars.t
408 test-qrecord.t
410 test-qrecord.t
409 test-rebase-abort.t
411 test-rebase-abort.t
410 test-rebase-backup.t
412 test-rebase-backup.t
411 test-rebase-base-flag.t
413 test-rebase-base-flag.t
412 test-rebase-bookmarks.t
414 test-rebase-bookmarks.t
413 test-rebase-brute-force.t
415 test-rebase-brute-force.t
414 test-rebase-cache.t
416 test-rebase-cache.t
415 test-rebase-check-restore.t
417 test-rebase-check-restore.t
416 test-rebase-collapse.t
418 test-rebase-collapse.t
417 test-rebase-conflicts.t
419 test-rebase-conflicts.t
418 test-rebase-dest.t
420 test-rebase-dest.t
419 test-rebase-detach.t
421 test-rebase-detach.t
420 test-rebase-emptycommit.t
422 test-rebase-emptycommit.t
421 test-rebase-inmemory.t
423 test-rebase-inmemory.t
422 test-rebase-interruptions.t
424 test-rebase-interruptions.t
423 test-rebase-issue-noparam-single-rev.t
425 test-rebase-issue-noparam-single-rev.t
424 test-rebase-legacy.t
426 test-rebase-legacy.t
425 test-rebase-mq-skip.t
427 test-rebase-mq-skip.t
426 test-rebase-mq.t
428 test-rebase-mq.t
427 test-rebase-named-branches.t
429 test-rebase-named-branches.t
428 test-rebase-newancestor.t
430 test-rebase-newancestor.t
429 test-rebase-obsolete.t
431 test-rebase-obsolete.t
430 test-rebase-parameters.t
432 test-rebase-parameters.t
431 test-rebase-partial.t
433 test-rebase-partial.t
432 test-rebase-pull.t
434 test-rebase-pull.t
433 test-rebase-rename.t
435 test-rebase-rename.t
434 test-rebase-scenario-global.t
436 test-rebase-scenario-global.t
435 test-rebase-templates.t
437 test-rebase-templates.t
436 test-rebase-transaction.t
438 test-rebase-transaction.t
437 test-rebuildstate.t
439 test-rebuildstate.t
438 test-record.t
440 test-record.t
439 test-relink.t
441 test-relink.t
440 test-remove.t
442 test-remove.t
441 test-removeemptydirs.t
443 test-removeemptydirs.t
442 test-rename-after-merge.t
444 test-rename-after-merge.t
443 test-rename-dir-merge.t
445 test-rename-dir-merge.t
444 test-rename-merge1.t
446 test-rename-merge1.t
445 test-rename.t
447 test-rename.t
446 test-repair-strip.t
448 test-repair-strip.t
447 test-repo-compengines.t
449 test-repo-compengines.t
448 test-resolve.t
450 test-resolve.t
449 test-revert-flags.t
451 test-revert-flags.t
450 test-revert-interactive.t
452 test-revert-interactive.t
451 test-revert-unknown.t
453 test-revert-unknown.t
452 test-revisions.t
454 test-revisions.t
453 test-revlog-ancestry.py
455 test-revlog-ancestry.py
454 test-revlog-group-emptyiter.t
456 test-revlog-group-emptyiter.t
455 test-revlog-mmapindex.t
457 test-revlog-mmapindex.t
456 test-revlog-packentry.t
458 test-revlog-packentry.t
457 test-revlog-raw.py
459 test-revlog-raw.py
458 test-revlog-v2.t
460 test-revlog-v2.t
459 test-revset-dirstate-parents.t
461 test-revset-dirstate-parents.t
460 test-revset-legacy-lookup.t
462 test-revset-legacy-lookup.t
461 test-revset-outgoing.t
463 test-revset-outgoing.t
462 test-rollback.t
464 test-rollback.t
463 test-run-tests.py
465 test-run-tests.py
464 test-run-tests.t
466 test-run-tests.t
465 test-schemes.t
467 test-schemes.t
466 test-serve.t
468 test-serve.t
467 test-setdiscovery.t
469 test-setdiscovery.t
468 test-share.t
470 test-share.t
469 test-shelve.t
471 test-shelve.t
470 test-show-stack.t
472 test-show-stack.t
471 test-show-work.t
473 test-show-work.t
472 test-show.t
474 test-show.t
473 test-simple-update.t
475 test-simple-update.t
474 test-simplekeyvaluefile.py
476 test-simplekeyvaluefile.py
475 test-simplemerge.py
477 test-simplemerge.py
476 test-single-head.t
478 test-single-head.t
477 test-sparse-clear.t
479 test-sparse-clear.t
478 test-sparse-clone.t
480 test-sparse-clone.t
479 test-sparse-import.t
481 test-sparse-import.t
480 test-sparse-merges.t
482 test-sparse-merges.t
481 test-sparse-profiles.t
483 test-sparse-profiles.t
482 test-sparse-requirement.t
484 test-sparse-requirement.t
483 test-sparse-verbose-json.t
485 test-sparse-verbose-json.t
484 test-sparse.t
486 test-sparse.t
485 test-split.t
487 test-split.t
486 test-ssh-bundle1.t
488 test-ssh-bundle1.t
487 test-ssh-clone-r.t
489 test-ssh-clone-r.t
488 test-ssh-proto-unbundle.t
490 test-ssh-proto-unbundle.t
489 test-ssh-proto.t
491 test-ssh-proto.t
490 test-ssh.t
492 test-ssh.t
491 test-sshserver.py
493 test-sshserver.py
492 test-stack.t
494 test-stack.t
493 test-status-inprocess.py
495 test-status-inprocess.py
494 test-status-rev.t
496 test-status-rev.t
495 test-status-terse.t
497 test-status-terse.t
496 test-strict.t
498 test-strict.t
497 test-strip-cross.t
499 test-strip-cross.t
498 test-strip.t
500 test-strip.t
499 test-subrepo-deep-nested-change.t
501 test-subrepo-deep-nested-change.t
500 test-subrepo-missing.t
502 test-subrepo-missing.t
501 test-subrepo-paths.t
503 test-subrepo-paths.t
502 test-subrepo-recursion.t
504 test-subrepo-recursion.t
503 test-subrepo-relative-path.t
505 test-subrepo-relative-path.t
504 test-subrepo.t
506 test-subrepo.t
505 test-symlink-os-yes-fs-no.py
507 test-symlink-os-yes-fs-no.py
506 test-symlink-placeholder.t
508 test-symlink-placeholder.t
507 test-symlinks.t
509 test-symlinks.t
508 test-tag.t
510 test-tag.t
509 test-tags.t
511 test-tags.t
510 test-template-basic.t
512 test-template-basic.t
511 test-template-functions.t
513 test-template-functions.t
512 test-template-keywords.t
514 test-template-keywords.t
513 test-template-map.t
515 test-template-map.t
514 test-transplant.t
516 test-transplant.t
515 test-treemanifest.t
517 test-treemanifest.t
516 test-ui-color.py
518 test-ui-color.py
517 test-ui-config.py
519 test-ui-config.py
518 test-ui-verbosity.py
520 test-ui-verbosity.py
519 test-unamend.t
521 test-unamend.t
520 test-unbundlehash.t
522 test-unbundlehash.t
521 test-uncommit.t
523 test-uncommit.t
522 test-unified-test.t
524 test-unified-test.t
523 test-unionrepo.t
525 test-unionrepo.t
524 test-unrelated-pull.t
526 test-unrelated-pull.t
525 test-up-local-change.t
527 test-up-local-change.t
526 test-update-branches.t
528 test-update-branches.t
527 test-update-dest.t
529 test-update-dest.t
528 test-update-issue1456.t
530 test-update-issue1456.t
529 test-update-names.t
531 test-update-names.t
530 test-update-reverse.t
532 test-update-reverse.t
531 test-upgrade-repo.t
533 test-upgrade-repo.t
532 test-url-download.t
534 test-url-download.t
533 test-url-rev.t
535 test-url-rev.t
534 test-url.py
536 test-url.py
535 test-username-newline.t
537 test-username-newline.t
536 test-util.py
538 test-util.py
537 test-verify.t
539 test-verify.t
538 test-walk.t
540 test-walk.t
539 test-walkrepo.py
541 test-walkrepo.py
540 test-websub.t
542 test-websub.t
541 test-win32text.t
543 test-win32text.t
542 test-wireproto-clientreactor.py
544 test-wireproto-clientreactor.py
543 test-wireproto-framing.py
545 test-wireproto-framing.py
544 test-wireproto-serverreactor.py
546 test-wireproto-serverreactor.py
545 test-wireproto.py
547 test-wireproto.py
546 test-wsgirequest.py
548 test-wsgirequest.py
547 test-xdg.t
549 test-xdg.t
@@ -1,977 +1,977 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.node = blue bold
24 absorb.node = blue bold
25 absorb.path = bold
25 absorb.path = bold
26 """
26 """
27
27
28 # TODO:
28 # TODO:
29 # * Rename config items to [commands] namespace
29 # * Rename config items to [commands] namespace
30 # * Converge getdraftstack() with other code in core
30 # * Converge getdraftstack() with other code in core
31 # * move many attributes on fixupstate to be private
31 # * move many attributes on fixupstate to be private
32
32
33 from __future__ import absolute_import
33 from __future__ import absolute_import
34
34
35 import collections
35 import collections
36
36
37 from mercurial.i18n import _
37 from mercurial.i18n import _
38 from mercurial import (
38 from mercurial import (
39 cmdutil,
39 cmdutil,
40 commands,
40 commands,
41 context,
41 context,
42 crecord,
42 crecord,
43 error,
43 error,
44 linelog,
44 linelog,
45 mdiff,
45 mdiff,
46 node,
46 node,
47 obsolete,
47 obsolete,
48 patch,
48 patch,
49 phases,
49 phases,
50 pycompat,
50 pycompat,
51 registrar,
51 registrar,
52 repair,
52 repair,
53 scmutil,
53 scmutil,
54 util,
54 util,
55 )
55 )
56 from mercurial.utils import (
56 from mercurial.utils import (
57 stringutil,
57 stringutil,
58 )
58 )
59
59
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
63 # leave the attribute unspecified.
64 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
65
65
66 cmdtable = {}
66 cmdtable = {}
67 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
68
68
69 configtable = {}
69 configtable = {}
70 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
71
71
72 configitem('absorb', 'add-noise', default=True)
72 configitem('absorb', 'add-noise', default=True)
73 configitem('absorb', 'amend-flag', default=None)
73 configitem('absorb', 'amend-flag', default=None)
74 configitem('absorb', 'max-stack-size', default=50)
74 configitem('absorb', 'max-stack-size', default=50)
75
75
76 colortable = {
76 colortable = {
77 'absorb.node': 'blue bold',
77 'absorb.node': 'blue bold',
78 'absorb.path': 'bold',
78 'absorb.path': 'bold',
79 }
79 }
80
80
81 defaultdict = collections.defaultdict
81 defaultdict = collections.defaultdict
82
82
83 class nullui(object):
83 class nullui(object):
84 """blank ui object doing nothing"""
84 """blank ui object doing nothing"""
85 debugflag = False
85 debugflag = False
86 verbose = False
86 verbose = False
87 quiet = True
87 quiet = True
88
88
89 def __getitem__(name):
89 def __getitem__(name):
90 def nullfunc(*args, **kwds):
90 def nullfunc(*args, **kwds):
91 return
91 return
92 return nullfunc
92 return nullfunc
93
93
94 class emptyfilecontext(object):
94 class emptyfilecontext(object):
95 """minimal filecontext representing an empty file"""
95 """minimal filecontext representing an empty file"""
96 def data(self):
96 def data(self):
97 return ''
97 return ''
98
98
99 def node(self):
99 def node(self):
100 return node.nullid
100 return node.nullid
101
101
102 def uniq(lst):
102 def uniq(lst):
103 """list -> list. remove duplicated items without changing the order"""
103 """list -> list. remove duplicated items without changing the order"""
104 seen = set()
104 seen = set()
105 result = []
105 result = []
106 for x in lst:
106 for x in lst:
107 if x not in seen:
107 if x not in seen:
108 seen.add(x)
108 seen.add(x)
109 result.append(x)
109 result.append(x)
110 return result
110 return result
111
111
112 def getdraftstack(headctx, limit=None):
112 def getdraftstack(headctx, limit=None):
113 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
113 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
114
114
115 changesets are sorted in topo order, oldest first.
115 changesets are sorted in topo order, oldest first.
116 return at most limit items, if limit is a positive number.
116 return at most limit items, if limit is a positive number.
117
117
118 merges are considered as non-draft as well. i.e. every commit
118 merges are considered as non-draft as well. i.e. every commit
119 returned has and only has 1 parent.
119 returned has and only has 1 parent.
120 """
120 """
121 ctx = headctx
121 ctx = headctx
122 result = []
122 result = []
123 while ctx.phase() != phases.public:
123 while ctx.phase() != phases.public:
124 if limit and len(result) >= limit:
124 if limit and len(result) >= limit:
125 break
125 break
126 parents = ctx.parents()
126 parents = ctx.parents()
127 if len(parents) != 1:
127 if len(parents) != 1:
128 break
128 break
129 result.append(ctx)
129 result.append(ctx)
130 ctx = parents[0]
130 ctx = parents[0]
131 result.reverse()
131 result.reverse()
132 return result
132 return result
133
133
134 def getfilestack(stack, path, seenfctxs=None):
134 def getfilestack(stack, path, seenfctxs=None):
135 """([ctx], str, set) -> [fctx], {ctx: fctx}
135 """([ctx], str, set) -> [fctx], {ctx: fctx}
136
136
137 stack is a list of contexts, from old to new. usually they are what
137 stack is a list of contexts, from old to new. usually they are what
138 "getdraftstack" returns.
138 "getdraftstack" returns.
139
139
140 follows renames, but not copies.
140 follows renames, but not copies.
141
141
142 seenfctxs is a set of filecontexts that will be considered "immutable".
142 seenfctxs is a set of filecontexts that will be considered "immutable".
143 they are usually what this function returned in earlier calls, useful
143 they are usually what this function returned in earlier calls, useful
144 to avoid issues that a file was "moved" to multiple places and was then
144 to avoid issues that a file was "moved" to multiple places and was then
145 modified differently, like: "a" was copied to "b", "a" was also copied to
145 modified differently, like: "a" was copied to "b", "a" was also copied to
146 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
146 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
147 and we enforce only one of them to be able to affect "a"'s content.
147 and we enforce only one of them to be able to affect "a"'s content.
148
148
149 return an empty list and an empty dict, if the specified path does not
149 return an empty list and an empty dict, if the specified path does not
150 exist in stack[-1] (the top of the stack).
150 exist in stack[-1] (the top of the stack).
151
151
152 otherwise, return a list of de-duplicated filecontexts, and the map to
152 otherwise, return a list of de-duplicated filecontexts, and the map to
153 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
153 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
154 of the list would be outside the stack and should be considered immutable.
154 of the list would be outside the stack and should be considered immutable.
155 the remaining items are within the stack.
155 the remaining items are within the stack.
156
156
157 for example, given the following changelog and corresponding filelog
157 for example, given the following changelog and corresponding filelog
158 revisions:
158 revisions:
159
159
160 changelog: 3----4----5----6----7
160 changelog: 3----4----5----6----7
161 filelog: x 0----1----1----2 (x: no such file yet)
161 filelog: x 0----1----1----2 (x: no such file yet)
162
162
163 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
163 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
164 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
164 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
165 dummy empty filecontext.
165 dummy empty filecontext.
166 - if stack = [2], returns ([], {})
166 - if stack = [2], returns ([], {})
167 - if stack = [7], returns ([1, 2], {7: 2})
167 - if stack = [7], returns ([1, 2], {7: 2})
168 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
168 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
169 removed, since 1 is immutable.
169 removed, since 1 is immutable.
170 """
170 """
171 if seenfctxs is None:
171 if seenfctxs is None:
172 seenfctxs = set()
172 seenfctxs = set()
173 assert stack
173 assert stack
174
174
175 if path not in stack[-1]:
175 if path not in stack[-1]:
176 return [], {}
176 return [], {}
177
177
178 fctxs = []
178 fctxs = []
179 fctxmap = {}
179 fctxmap = {}
180
180
181 pctx = stack[0].p1() # the public (immutable) ctx we stop at
181 pctx = stack[0].p1() # the public (immutable) ctx we stop at
182 for ctx in reversed(stack):
182 for ctx in reversed(stack):
183 if path not in ctx: # the file is added in the next commit
183 if path not in ctx: # the file is added in the next commit
184 pctx = ctx
184 pctx = ctx
185 break
185 break
186 fctx = ctx[path]
186 fctx = ctx[path]
187 fctxs.append(fctx)
187 fctxs.append(fctx)
188 if fctx in seenfctxs: # treat fctx as the immutable one
188 if fctx in seenfctxs: # treat fctx as the immutable one
189 pctx = None # do not add another immutable fctx
189 pctx = None # do not add another immutable fctx
190 break
190 break
191 fctxmap[ctx] = fctx # only for mutable fctxs
191 fctxmap[ctx] = fctx # only for mutable fctxs
192 renamed = fctx.renamed()
192 renamed = fctx.renamed()
193 if renamed:
193 if renamed:
194 path = renamed[0] # follow rename
194 path = renamed[0] # follow rename
195 if path in ctx: # but do not follow copy
195 if path in ctx: # but do not follow copy
196 pctx = ctx.p1()
196 pctx = ctx.p1()
197 break
197 break
198
198
199 if pctx is not None: # need an extra immutable fctx
199 if pctx is not None: # need an extra immutable fctx
200 if path in pctx:
200 if path in pctx:
201 fctxs.append(pctx[path])
201 fctxs.append(pctx[path])
202 else:
202 else:
203 fctxs.append(emptyfilecontext())
203 fctxs.append(emptyfilecontext())
204
204
205 fctxs.reverse()
205 fctxs.reverse()
206 # note: we rely on a property of hg: filerev is not reused for linear
206 # note: we rely on a property of hg: filerev is not reused for linear
207 # history. i.e. it's impossible to have:
207 # history. i.e. it's impossible to have:
208 # changelog: 4----5----6 (linear, no merges)
208 # changelog: 4----5----6 (linear, no merges)
209 # filelog: 1----2----1
209 # filelog: 1----2----1
210 # ^ reuse filerev (impossible)
210 # ^ reuse filerev (impossible)
211 # because parents are part of the hash. if that's not true, we need to
211 # because parents are part of the hash. if that's not true, we need to
212 # remove uniq and find a different way to identify fctxs.
212 # remove uniq and find a different way to identify fctxs.
213 return uniq(fctxs), fctxmap
213 return uniq(fctxs), fctxmap
214
214
215 class overlaystore(patch.filestore):
215 class overlaystore(patch.filestore):
216 """read-only, hybrid store based on a dict and ctx.
216 """read-only, hybrid store based on a dict and ctx.
217 memworkingcopy: {path: content}, overrides file contents.
217 memworkingcopy: {path: content}, overrides file contents.
218 """
218 """
219 def __init__(self, basectx, memworkingcopy):
219 def __init__(self, basectx, memworkingcopy):
220 self.basectx = basectx
220 self.basectx = basectx
221 self.memworkingcopy = memworkingcopy
221 self.memworkingcopy = memworkingcopy
222
222
223 def getfile(self, path):
223 def getfile(self, path):
224 """comply with mercurial.patch.filestore.getfile"""
224 """comply with mercurial.patch.filestore.getfile"""
225 if path not in self.basectx:
225 if path not in self.basectx:
226 return None, None, None
226 return None, None, None
227 fctx = self.basectx[path]
227 fctx = self.basectx[path]
228 if path in self.memworkingcopy:
228 if path in self.memworkingcopy:
229 content = self.memworkingcopy[path]
229 content = self.memworkingcopy[path]
230 else:
230 else:
231 content = fctx.data()
231 content = fctx.data()
232 mode = (fctx.islink(), fctx.isexec())
232 mode = (fctx.islink(), fctx.isexec())
233 renamed = fctx.renamed() # False or (path, node)
233 renamed = fctx.renamed() # False or (path, node)
234 return content, mode, (renamed and renamed[0])
234 return content, mode, (renamed and renamed[0])
235
235
236 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
236 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
237 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
237 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
238 memworkingcopy overrides file contents.
238 memworkingcopy overrides file contents.
239 """
239 """
240 # parents must contain 2 items: (node1, node2)
240 # parents must contain 2 items: (node1, node2)
241 if parents is None:
241 if parents is None:
242 parents = ctx.repo().changelog.parents(ctx.node())
242 parents = ctx.repo().changelog.parents(ctx.node())
243 if extra is None:
243 if extra is None:
244 extra = ctx.extra()
244 extra = ctx.extra()
245 date = ctx.date()
245 date = ctx.date()
246 desc = ctx.description()
246 desc = ctx.description()
247 user = ctx.user()
247 user = ctx.user()
248 files = set(ctx.files()).union(memworkingcopy.iterkeys())
248 files = set(ctx.files()).union(memworkingcopy)
249 store = overlaystore(ctx, memworkingcopy)
249 store = overlaystore(ctx, memworkingcopy)
250 return context.memctx(
250 return context.memctx(
251 repo=ctx.repo(), parents=parents, text=desc,
251 repo=ctx.repo(), parents=parents, text=desc,
252 files=files, filectxfn=store, user=user, date=date,
252 files=files, filectxfn=store, user=user, date=date,
253 branch=None, extra=extra)
253 branch=None, extra=extra)
254
254
255 class filefixupstate(object):
255 class filefixupstate(object):
256 """state needed to apply fixups to a single file
256 """state needed to apply fixups to a single file
257
257
258 internally, it keeps file contents of several revisions and a linelog.
258 internally, it keeps file contents of several revisions and a linelog.
259
259
260 the linelog uses odd revision numbers for original contents (fctxs passed
260 the linelog uses odd revision numbers for original contents (fctxs passed
261 to __init__), and even revision numbers for fixups, like:
261 to __init__), and even revision numbers for fixups, like:
262
262
263 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
263 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
264 linelog rev 2: fixups made to self.fctxs[0]
264 linelog rev 2: fixups made to self.fctxs[0]
265 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
265 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
266 linelog rev 4: fixups made to self.fctxs[1]
266 linelog rev 4: fixups made to self.fctxs[1]
267 ...
267 ...
268
268
269 a typical use is like:
269 a typical use is like:
270
270
271 1. call diffwith, to calculate self.fixups
271 1. call diffwith, to calculate self.fixups
272 2. (optionally), present self.fixups to the user, or change it
272 2. (optionally), present self.fixups to the user, or change it
273 3. call apply, to apply changes
273 3. call apply, to apply changes
274 4. read results from "finalcontents", or call getfinalcontent
274 4. read results from "finalcontents", or call getfinalcontent
275 """
275 """
276
276
277 def __init__(self, fctxs, ui=None, opts=None):
277 def __init__(self, fctxs, ui=None, opts=None):
278 """([fctx], ui or None) -> None
278 """([fctx], ui or None) -> None
279
279
280 fctxs should be linear, and sorted by topo order - oldest first.
280 fctxs should be linear, and sorted by topo order - oldest first.
281 fctxs[0] will be considered as "immutable" and will not be changed.
281 fctxs[0] will be considered as "immutable" and will not be changed.
282 """
282 """
283 self.fctxs = fctxs
283 self.fctxs = fctxs
284 self.ui = ui or nullui()
284 self.ui = ui or nullui()
285 self.opts = opts or {}
285 self.opts = opts or {}
286
286
287 # following fields are built from fctxs. they exist for perf reason
287 # following fields are built from fctxs. they exist for perf reason
288 self.contents = [f.data() for f in fctxs]
288 self.contents = [f.data() for f in fctxs]
289 self.contentlines = map(mdiff.splitnewlines, self.contents)
289 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
290 self.linelog = self._buildlinelog()
290 self.linelog = self._buildlinelog()
291 if self.ui.debugflag:
291 if self.ui.debugflag:
292 assert self._checkoutlinelog() == self.contents
292 assert self._checkoutlinelog() == self.contents
293
293
294 # following fields will be filled later
294 # following fields will be filled later
295 self.chunkstats = [0, 0] # [adopted, total : int]
295 self.chunkstats = [0, 0] # [adopted, total : int]
296 self.targetlines = [] # [str]
296 self.targetlines = [] # [str]
297 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
297 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
298 self.finalcontents = [] # [str]
298 self.finalcontents = [] # [str]
299
299
300 def diffwith(self, targetfctx, showchanges=False):
300 def diffwith(self, targetfctx, showchanges=False):
301 """calculate fixups needed by examining the differences between
301 """calculate fixups needed by examining the differences between
302 self.fctxs[-1] and targetfctx, chunk by chunk.
302 self.fctxs[-1] and targetfctx, chunk by chunk.
303
303
304 targetfctx is the target state we move towards. we may or may not be
304 targetfctx is the target state we move towards. we may or may not be
305 able to get there because not all modified chunks can be amended into
305 able to get there because not all modified chunks can be amended into
306 a non-public fctx unambiguously.
306 a non-public fctx unambiguously.
307
307
308 call this only once, before apply().
308 call this only once, before apply().
309
309
310 update self.fixups, self.chunkstats, and self.targetlines.
310 update self.fixups, self.chunkstats, and self.targetlines.
311 """
311 """
312 a = self.contents[-1]
312 a = self.contents[-1]
313 alines = self.contentlines[-1]
313 alines = self.contentlines[-1]
314 b = targetfctx.data()
314 b = targetfctx.data()
315 blines = mdiff.splitnewlines(b)
315 blines = mdiff.splitnewlines(b)
316 self.targetlines = blines
316 self.targetlines = blines
317
317
318 self.linelog.annotate(self.linelog.maxrev)
318 self.linelog.annotate(self.linelog.maxrev)
319 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
319 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
320 assert len(annotated) == len(alines)
320 assert len(annotated) == len(alines)
321 # add a dummy end line to make insertion at the end easier
321 # add a dummy end line to make insertion at the end easier
322 if annotated:
322 if annotated:
323 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
323 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
324 annotated.append(dummyendline)
324 annotated.append(dummyendline)
325
325
326 # analyse diff blocks
326 # analyse diff blocks
327 for chunk in self._alldiffchunks(a, b, alines, blines):
327 for chunk in self._alldiffchunks(a, b, alines, blines):
328 newfixups = self._analysediffchunk(chunk, annotated)
328 newfixups = self._analysediffchunk(chunk, annotated)
329 self.chunkstats[0] += bool(newfixups) # 1 or 0
329 self.chunkstats[0] += bool(newfixups) # 1 or 0
330 self.chunkstats[1] += 1
330 self.chunkstats[1] += 1
331 self.fixups += newfixups
331 self.fixups += newfixups
332 if showchanges:
332 if showchanges:
333 self._showchanges(alines, blines, chunk, newfixups)
333 self._showchanges(alines, blines, chunk, newfixups)
334
334
335 def apply(self):
335 def apply(self):
336 """apply self.fixups. update self.linelog, self.finalcontents.
336 """apply self.fixups. update self.linelog, self.finalcontents.
337
337
338 call this only once, before getfinalcontent(), after diffwith().
338 call this only once, before getfinalcontent(), after diffwith().
339 """
339 """
340 # the following is unnecessary, as it's done by "diffwith":
340 # the following is unnecessary, as it's done by "diffwith":
341 # self.linelog.annotate(self.linelog.maxrev)
341 # self.linelog.annotate(self.linelog.maxrev)
342 for rev, a1, a2, b1, b2 in reversed(self.fixups):
342 for rev, a1, a2, b1, b2 in reversed(self.fixups):
343 blines = self.targetlines[b1:b2]
343 blines = self.targetlines[b1:b2]
344 if self.ui.debugflag:
344 if self.ui.debugflag:
345 idx = (max(rev - 1, 0)) // 2
345 idx = (max(rev - 1, 0)) // 2
346 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
346 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
347 % (node.short(self.fctxs[idx].node()),
347 % (node.short(self.fctxs[idx].node()),
348 a1, a2, len(blines)))
348 a1, a2, len(blines)))
349 self.linelog.replacelines(rev, a1, a2, b1, b2)
349 self.linelog.replacelines(rev, a1, a2, b1, b2)
350 if self.opts.get('edit_lines', False):
350 if self.opts.get('edit_lines', False):
351 self.finalcontents = self._checkoutlinelogwithedits()
351 self.finalcontents = self._checkoutlinelogwithedits()
352 else:
352 else:
353 self.finalcontents = self._checkoutlinelog()
353 self.finalcontents = self._checkoutlinelog()
354
354
355 def getfinalcontent(self, fctx):
355 def getfinalcontent(self, fctx):
356 """(fctx) -> str. get modified file content for a given filecontext"""
356 """(fctx) -> str. get modified file content for a given filecontext"""
357 idx = self.fctxs.index(fctx)
357 idx = self.fctxs.index(fctx)
358 return self.finalcontents[idx]
358 return self.finalcontents[idx]
359
359
360 def _analysediffchunk(self, chunk, annotated):
360 def _analysediffchunk(self, chunk, annotated):
361 """analyse a different chunk and return new fixups found
361 """analyse a different chunk and return new fixups found
362
362
363 return [] if no lines from the chunk can be safely applied.
363 return [] if no lines from the chunk can be safely applied.
364
364
365 the chunk (or lines) cannot be safely applied, if, for example:
365 the chunk (or lines) cannot be safely applied, if, for example:
366 - the modified (deleted) lines belong to a public changeset
366 - the modified (deleted) lines belong to a public changeset
367 (self.fctxs[0])
367 (self.fctxs[0])
368 - the chunk is a pure insertion and the adjacent lines (at most 2
368 - the chunk is a pure insertion and the adjacent lines (at most 2
369 lines) belong to different non-public changesets, or do not belong
369 lines) belong to different non-public changesets, or do not belong
370 to any non-public changesets.
370 to any non-public changesets.
371 - the chunk is modifying lines from different changesets.
371 - the chunk is modifying lines from different changesets.
372 in this case, if the number of lines deleted equals to the number
372 in this case, if the number of lines deleted equals to the number
373 of lines added, assume it's a simple 1:1 map (could be wrong).
373 of lines added, assume it's a simple 1:1 map (could be wrong).
374 otherwise, give up.
374 otherwise, give up.
375 - the chunk is modifying lines from a single non-public changeset,
375 - the chunk is modifying lines from a single non-public changeset,
376 but other revisions touch the area as well. i.e. the lines are
376 but other revisions touch the area as well. i.e. the lines are
377 not continuous as seen from the linelog.
377 not continuous as seen from the linelog.
378 """
378 """
379 a1, a2, b1, b2 = chunk
379 a1, a2, b1, b2 = chunk
380 # find involved indexes from annotate result
380 # find involved indexes from annotate result
381 involved = annotated[a1:a2]
381 involved = annotated[a1:a2]
382 if not involved and annotated: # a1 == a2 and a is not empty
382 if not involved and annotated: # a1 == a2 and a is not empty
383 # pure insertion, check nearby lines. ignore lines belong
383 # pure insertion, check nearby lines. ignore lines belong
384 # to the public (first) changeset (i.e. annotated[i][0] == 1)
384 # to the public (first) changeset (i.e. annotated[i][0] == 1)
385 nearbylinenums = {a2, max(0, a1 - 1)}
385 nearbylinenums = {a2, max(0, a1 - 1)}
386 involved = [annotated[i]
386 involved = [annotated[i]
387 for i in nearbylinenums if annotated[i][0] != 1]
387 for i in nearbylinenums if annotated[i][0] != 1]
388 involvedrevs = list(set(r for r, l in involved))
388 involvedrevs = list(set(r for r, l in involved))
389 newfixups = []
389 newfixups = []
390 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
390 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
391 # chunk belongs to a single revision
391 # chunk belongs to a single revision
392 rev = involvedrevs[0]
392 rev = involvedrevs[0]
393 if rev > 1:
393 if rev > 1:
394 fixuprev = rev + 1
394 fixuprev = rev + 1
395 newfixups.append((fixuprev, a1, a2, b1, b2))
395 newfixups.append((fixuprev, a1, a2, b1, b2))
396 elif a2 - a1 == b2 - b1 or b1 == b2:
396 elif a2 - a1 == b2 - b1 or b1 == b2:
397 # 1:1 line mapping, or chunk was deleted
397 # 1:1 line mapping, or chunk was deleted
398 for i in pycompat.xrange(a1, a2):
398 for i in pycompat.xrange(a1, a2):
399 rev, linenum = annotated[i]
399 rev, linenum = annotated[i]
400 if rev > 1:
400 if rev > 1:
401 if b1 == b2: # deletion, simply remove that single line
401 if b1 == b2: # deletion, simply remove that single line
402 nb1 = nb2 = 0
402 nb1 = nb2 = 0
403 else: # 1:1 line mapping, change the corresponding rev
403 else: # 1:1 line mapping, change the corresponding rev
404 nb1 = b1 + i - a1
404 nb1 = b1 + i - a1
405 nb2 = nb1 + 1
405 nb2 = nb1 + 1
406 fixuprev = rev + 1
406 fixuprev = rev + 1
407 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
407 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
408 return self._optimizefixups(newfixups)
408 return self._optimizefixups(newfixups)
409
409
410 @staticmethod
410 @staticmethod
411 def _alldiffchunks(a, b, alines, blines):
411 def _alldiffchunks(a, b, alines, blines):
412 """like mdiff.allblocks, but only care about differences"""
412 """like mdiff.allblocks, but only care about differences"""
413 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
413 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
414 for chunk, btype in blocks:
414 for chunk, btype in blocks:
415 if btype != '!':
415 if btype != '!':
416 continue
416 continue
417 yield chunk
417 yield chunk
418
418
419 def _buildlinelog(self):
419 def _buildlinelog(self):
420 """calculate the initial linelog based on self.content{,line}s.
420 """calculate the initial linelog based on self.content{,line}s.
421 this is similar to running a partial "annotate".
421 this is similar to running a partial "annotate".
422 """
422 """
423 llog = linelog.linelog()
423 llog = linelog.linelog()
424 a, alines = '', []
424 a, alines = '', []
425 for i in pycompat.xrange(len(self.contents)):
425 for i in pycompat.xrange(len(self.contents)):
426 b, blines = self.contents[i], self.contentlines[i]
426 b, blines = self.contents[i], self.contentlines[i]
427 llrev = i * 2 + 1
427 llrev = i * 2 + 1
428 chunks = self._alldiffchunks(a, b, alines, blines)
428 chunks = self._alldiffchunks(a, b, alines, blines)
429 for a1, a2, b1, b2 in reversed(list(chunks)):
429 for a1, a2, b1, b2 in reversed(list(chunks)):
430 llog.replacelines(llrev, a1, a2, b1, b2)
430 llog.replacelines(llrev, a1, a2, b1, b2)
431 a, alines = b, blines
431 a, alines = b, blines
432 return llog
432 return llog
433
433
434 def _checkoutlinelog(self):
434 def _checkoutlinelog(self):
435 """() -> [str]. check out file contents from linelog"""
435 """() -> [str]. check out file contents from linelog"""
436 contents = []
436 contents = []
437 for i in pycompat.xrange(len(self.contents)):
437 for i in pycompat.xrange(len(self.contents)):
438 rev = (i + 1) * 2
438 rev = (i + 1) * 2
439 self.linelog.annotate(rev)
439 self.linelog.annotate(rev)
440 content = ''.join(map(self._getline, self.linelog.annotateresult))
440 content = ''.join(map(self._getline, self.linelog.annotateresult))
441 contents.append(content)
441 contents.append(content)
442 return contents
442 return contents
443
443
444 def _checkoutlinelogwithedits(self):
444 def _checkoutlinelogwithedits(self):
445 """() -> [str]. prompt all lines for edit"""
445 """() -> [str]. prompt all lines for edit"""
446 alllines = self.linelog.getalllines()
446 alllines = self.linelog.getalllines()
447 # header
447 # header
448 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
448 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
449 'exists in the changeset to the top\nHG:\n')
449 'exists in the changeset to the top\nHG:\n')
450 % self.fctxs[-1].path())
450 % self.fctxs[-1].path())
451 # [(idx, fctx)]. hide the dummy emptyfilecontext
451 # [(idx, fctx)]. hide the dummy emptyfilecontext
452 visiblefctxs = [(i, f)
452 visiblefctxs = [(i, f)
453 for i, f in enumerate(self.fctxs)
453 for i, f in enumerate(self.fctxs)
454 if not isinstance(f, emptyfilecontext)]
454 if not isinstance(f, emptyfilecontext)]
455 for i, (j, f) in enumerate(visiblefctxs):
455 for i, (j, f) in enumerate(visiblefctxs):
456 editortext += (_('HG: %s/%s %s %s\n') %
456 editortext += (_('HG: %s/%s %s %s\n') %
457 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
457 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
458 node.short(f.node()),
458 node.short(f.node()),
459 f.description().split('\n',1)[0]))
459 f.description().split('\n',1)[0]))
460 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
460 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
461 # figure out the lifetime of a line, this is relatively inefficient,
461 # figure out the lifetime of a line, this is relatively inefficient,
462 # but probably fine
462 # but probably fine
463 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
463 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
464 for i, f in visiblefctxs:
464 for i, f in visiblefctxs:
465 self.linelog.annotate((i + 1) * 2)
465 self.linelog.annotate((i + 1) * 2)
466 for l in self.linelog.annotateresult:
466 for l in self.linelog.annotateresult:
467 lineset[l].add(i)
467 lineset[l].add(i)
468 # append lines
468 # append lines
469 for l in alllines:
469 for l in alllines:
470 editortext += (' %s : %s' %
470 editortext += (' %s : %s' %
471 (''.join([('y' if i in lineset[l] else ' ')
471 (''.join([('y' if i in lineset[l] else ' ')
472 for i, _f in visiblefctxs]),
472 for i, _f in visiblefctxs]),
473 self._getline(l)))
473 self._getline(l)))
474 # run editor
474 # run editor
475 editedtext = self.ui.edit(editortext, '', action='absorb')
475 editedtext = self.ui.edit(editortext, '', action='absorb')
476 if not editedtext:
476 if not editedtext:
477 raise error.Abort(_('empty editor text'))
477 raise error.Abort(_('empty editor text'))
478 # parse edited result
478 # parse edited result
479 contents = ['' for i in self.fctxs]
479 contents = ['' for i in self.fctxs]
480 leftpadpos = 4
480 leftpadpos = 4
481 colonpos = leftpadpos + len(visiblefctxs) + 1
481 colonpos = leftpadpos + len(visiblefctxs) + 1
482 for l in mdiff.splitnewlines(editedtext):
482 for l in mdiff.splitnewlines(editedtext):
483 if l.startswith('HG:'):
483 if l.startswith('HG:'):
484 continue
484 continue
485 if l[colonpos - 1:colonpos + 2] != ' : ':
485 if l[colonpos - 1:colonpos + 2] != ' : ':
486 raise error.Abort(_('malformed line: %s') % l)
486 raise error.Abort(_('malformed line: %s') % l)
487 linecontent = l[colonpos + 2:]
487 linecontent = l[colonpos + 2:]
488 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
488 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
489 if ch == 'y':
489 if ch == 'y':
490 contents[visiblefctxs[i][0]] += linecontent
490 contents[visiblefctxs[i][0]] += linecontent
491 # chunkstats is hard to calculate if anything changes, therefore
491 # chunkstats is hard to calculate if anything changes, therefore
492 # set them to just a simple value (1, 1).
492 # set them to just a simple value (1, 1).
493 if editedtext != editortext:
493 if editedtext != editortext:
494 self.chunkstats = [1, 1]
494 self.chunkstats = [1, 1]
495 return contents
495 return contents
496
496
497 def _getline(self, lineinfo):
497 def _getline(self, lineinfo):
498 """((rev, linenum)) -> str. convert rev+line number to line content"""
498 """((rev, linenum)) -> str. convert rev+line number to line content"""
499 rev, linenum = lineinfo
499 rev, linenum = lineinfo
500 if rev & 1: # odd: original line taken from fctxs
500 if rev & 1: # odd: original line taken from fctxs
501 return self.contentlines[rev // 2][linenum]
501 return self.contentlines[rev // 2][linenum]
502 else: # even: fixup line from targetfctx
502 else: # even: fixup line from targetfctx
503 return self.targetlines[linenum]
503 return self.targetlines[linenum]
504
504
505 def _iscontinuous(self, a1, a2, closedinterval=False):
505 def _iscontinuous(self, a1, a2, closedinterval=False):
506 """(a1, a2 : int) -> bool
506 """(a1, a2 : int) -> bool
507
507
508 check if these lines are continuous. i.e. no other insertions or
508 check if these lines are continuous. i.e. no other insertions or
509 deletions (from other revisions) among these lines.
509 deletions (from other revisions) among these lines.
510
510
511 closedinterval decides whether a2 should be included or not. i.e. is
511 closedinterval decides whether a2 should be included or not. i.e. is
512 it [a1, a2), or [a1, a2] ?
512 it [a1, a2), or [a1, a2] ?
513 """
513 """
514 if a1 >= a2:
514 if a1 >= a2:
515 return True
515 return True
516 llog = self.linelog
516 llog = self.linelog
517 offset1 = llog.getoffset(a1)
517 offset1 = llog.getoffset(a1)
518 offset2 = llog.getoffset(a2) + int(closedinterval)
518 offset2 = llog.getoffset(a2) + int(closedinterval)
519 linesinbetween = llog.getalllines(offset1, offset2)
519 linesinbetween = llog.getalllines(offset1, offset2)
520 return len(linesinbetween) == a2 - a1 + int(closedinterval)
520 return len(linesinbetween) == a2 - a1 + int(closedinterval)
521
521
522 def _optimizefixups(self, fixups):
522 def _optimizefixups(self, fixups):
523 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
523 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
524 merge adjacent fixups to make them less fragmented.
524 merge adjacent fixups to make them less fragmented.
525 """
525 """
526 result = []
526 result = []
527 pcurrentchunk = [[-1, -1, -1, -1, -1]]
527 pcurrentchunk = [[-1, -1, -1, -1, -1]]
528
528
529 def pushchunk():
529 def pushchunk():
530 if pcurrentchunk[0][0] != -1:
530 if pcurrentchunk[0][0] != -1:
531 result.append(tuple(pcurrentchunk[0]))
531 result.append(tuple(pcurrentchunk[0]))
532
532
533 for i, chunk in enumerate(fixups):
533 for i, chunk in enumerate(fixups):
534 rev, a1, a2, b1, b2 = chunk
534 rev, a1, a2, b1, b2 = chunk
535 lastrev = pcurrentchunk[0][0]
535 lastrev = pcurrentchunk[0][0]
536 lasta2 = pcurrentchunk[0][2]
536 lasta2 = pcurrentchunk[0][2]
537 lastb2 = pcurrentchunk[0][4]
537 lastb2 = pcurrentchunk[0][4]
538 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
538 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
539 self._iscontinuous(max(a1 - 1, 0), a1)):
539 self._iscontinuous(max(a1 - 1, 0), a1)):
540 # merge into currentchunk
540 # merge into currentchunk
541 pcurrentchunk[0][2] = a2
541 pcurrentchunk[0][2] = a2
542 pcurrentchunk[0][4] = b2
542 pcurrentchunk[0][4] = b2
543 else:
543 else:
544 pushchunk()
544 pushchunk()
545 pcurrentchunk[0] = list(chunk)
545 pcurrentchunk[0] = list(chunk)
546 pushchunk()
546 pushchunk()
547 return result
547 return result
548
548
549 def _showchanges(self, alines, blines, chunk, fixups):
549 def _showchanges(self, alines, blines, chunk, fixups):
550 ui = self.ui
550 ui = self.ui
551
551
552 def label(line, label):
552 def label(line, label):
553 if line.endswith('\n'):
553 if line.endswith('\n'):
554 line = line[:-1]
554 line = line[:-1]
555 return ui.label(line, label)
555 return ui.label(line, label)
556
556
557 # this is not optimized for perf but _showchanges only gets executed
557 # this is not optimized for perf but _showchanges only gets executed
558 # with an extra command-line flag.
558 # with an extra command-line flag.
559 a1, a2, b1, b2 = chunk
559 a1, a2, b1, b2 = chunk
560 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
560 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
561 for idx, fa1, fa2, fb1, fb2 in fixups:
561 for idx, fa1, fa2, fb1, fb2 in fixups:
562 for i in pycompat.xrange(fa1, fa2):
562 for i in pycompat.xrange(fa1, fa2):
563 aidxs[i - a1] = (max(idx, 1) - 1) // 2
563 aidxs[i - a1] = (max(idx, 1) - 1) // 2
564 for i in pycompat.xrange(fb1, fb2):
564 for i in pycompat.xrange(fb1, fb2):
565 bidxs[i - b1] = (max(idx, 1) - 1) // 2
565 bidxs[i - b1] = (max(idx, 1) - 1) // 2
566
566
567 buf = [] # [(idx, content)]
567 buf = [] # [(idx, content)]
568 buf.append((0, label('@@ -%d,%d +%d,%d @@'
568 buf.append((0, label('@@ -%d,%d +%d,%d @@'
569 % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
569 % (a1, a2 - a1, b1, b2 - b1), 'diff.hunk')))
570 buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
570 buf += [(aidxs[i - a1], label('-' + alines[i], 'diff.deleted'))
571 for i in pycompat.xrange(a1, a2)]
571 for i in pycompat.xrange(a1, a2)]
572 buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
572 buf += [(bidxs[i - b1], label('+' + blines[i], 'diff.inserted'))
573 for i in pycompat.xrange(b1, b2)]
573 for i in pycompat.xrange(b1, b2)]
574 for idx, line in buf:
574 for idx, line in buf:
575 shortnode = idx and node.short(self.fctxs[idx].node()) or ''
575 shortnode = idx and node.short(self.fctxs[idx].node()) or ''
576 ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
576 ui.write(ui.label(shortnode[0:7].ljust(8), 'absorb.node') +
577 line + '\n')
577 line + '\n')
578
578
579 class fixupstate(object):
579 class fixupstate(object):
580 """state needed to run absorb
580 """state needed to run absorb
581
581
582 internally, it keeps paths and filefixupstates.
582 internally, it keeps paths and filefixupstates.
583
583
584 a typical use is like filefixupstates:
584 a typical use is like filefixupstates:
585
585
586 1. call diffwith, to calculate fixups
586 1. call diffwith, to calculate fixups
587 2. (optionally), present fixups to the user, or edit fixups
587 2. (optionally), present fixups to the user, or edit fixups
588 3. call apply, to apply changes to memory
588 3. call apply, to apply changes to memory
589 4. call commit, to commit changes to hg database
589 4. call commit, to commit changes to hg database
590 """
590 """
591
591
592 def __init__(self, stack, ui=None, opts=None):
592 def __init__(self, stack, ui=None, opts=None):
593 """([ctx], ui or None) -> None
593 """([ctx], ui or None) -> None
594
594
595 stack: should be linear, and sorted by topo order - oldest first.
595 stack: should be linear, and sorted by topo order - oldest first.
596 all commits in stack are considered mutable.
596 all commits in stack are considered mutable.
597 """
597 """
598 assert stack
598 assert stack
599 self.ui = ui or nullui()
599 self.ui = ui or nullui()
600 self.opts = opts or {}
600 self.opts = opts or {}
601 self.stack = stack
601 self.stack = stack
602 self.repo = stack[-1].repo().unfiltered()
602 self.repo = stack[-1].repo().unfiltered()
603
603
604 # following fields will be filled later
604 # following fields will be filled later
605 self.paths = [] # [str]
605 self.paths = [] # [str]
606 self.status = None # ctx.status output
606 self.status = None # ctx.status output
607 self.fctxmap = {} # {path: {ctx: fctx}}
607 self.fctxmap = {} # {path: {ctx: fctx}}
608 self.fixupmap = {} # {path: filefixupstate}
608 self.fixupmap = {} # {path: filefixupstate}
609 self.replacemap = {} # {oldnode: newnode or None}
609 self.replacemap = {} # {oldnode: newnode or None}
610 self.finalnode = None # head after all fixups
610 self.finalnode = None # head after all fixups
611
611
612 def diffwith(self, targetctx, match=None, showchanges=False):
612 def diffwith(self, targetctx, match=None, showchanges=False):
613 """diff and prepare fixups. update self.fixupmap, self.paths"""
613 """diff and prepare fixups. update self.fixupmap, self.paths"""
614 # only care about modified files
614 # only care about modified files
615 self.status = self.stack[-1].status(targetctx, match)
615 self.status = self.stack[-1].status(targetctx, match)
616 self.paths = []
616 self.paths = []
617 # but if --edit-lines is used, the user may want to edit files
617 # but if --edit-lines is used, the user may want to edit files
618 # even if they are not modified
618 # even if they are not modified
619 editopt = self.opts.get('edit_lines')
619 editopt = self.opts.get('edit_lines')
620 if not self.status.modified and editopt and match:
620 if not self.status.modified and editopt and match:
621 interestingpaths = match.files()
621 interestingpaths = match.files()
622 else:
622 else:
623 interestingpaths = self.status.modified
623 interestingpaths = self.status.modified
624 # prepare the filefixupstate
624 # prepare the filefixupstate
625 seenfctxs = set()
625 seenfctxs = set()
626 # sorting is necessary to eliminate ambiguity for the "double move"
626 # sorting is necessary to eliminate ambiguity for the "double move"
627 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
627 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
628 for path in sorted(interestingpaths):
628 for path in sorted(interestingpaths):
629 self.ui.debug('calculating fixups for %s\n' % path)
629 self.ui.debug('calculating fixups for %s\n' % path)
630 targetfctx = targetctx[path]
630 targetfctx = targetctx[path]
631 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
631 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
632 # ignore symbolic links or binary, or unchanged files
632 # ignore symbolic links or binary, or unchanged files
633 if any(f.islink() or stringutil.binary(f.data())
633 if any(f.islink() or stringutil.binary(f.data())
634 for f in [targetfctx] + fctxs
634 for f in [targetfctx] + fctxs
635 if not isinstance(f, emptyfilecontext)):
635 if not isinstance(f, emptyfilecontext)):
636 continue
636 continue
637 if targetfctx.data() == fctxs[-1].data() and not editopt:
637 if targetfctx.data() == fctxs[-1].data() and not editopt:
638 continue
638 continue
639 seenfctxs.update(fctxs[1:])
639 seenfctxs.update(fctxs[1:])
640 self.fctxmap[path] = ctx2fctx
640 self.fctxmap[path] = ctx2fctx
641 fstate = filefixupstate(fctxs, ui=self.ui, opts=self.opts)
641 fstate = filefixupstate(fctxs, ui=self.ui, opts=self.opts)
642 if showchanges:
642 if showchanges:
643 colorpath = self.ui.label(path, 'absorb.path')
643 colorpath = self.ui.label(path, 'absorb.path')
644 header = 'showing changes for ' + colorpath
644 header = 'showing changes for ' + colorpath
645 self.ui.write(header + '\n')
645 self.ui.write(header + '\n')
646 fstate.diffwith(targetfctx, showchanges=showchanges)
646 fstate.diffwith(targetfctx, showchanges=showchanges)
647 self.fixupmap[path] = fstate
647 self.fixupmap[path] = fstate
648 self.paths.append(path)
648 self.paths.append(path)
649
649
650 def apply(self):
650 def apply(self):
651 """apply fixups to individual filefixupstates"""
651 """apply fixups to individual filefixupstates"""
652 for path, state in self.fixupmap.iteritems():
652 for path, state in self.fixupmap.iteritems():
653 if self.ui.debugflag:
653 if self.ui.debugflag:
654 self.ui.write(_('applying fixups to %s\n') % path)
654 self.ui.write(_('applying fixups to %s\n') % path)
655 state.apply()
655 state.apply()
656
656
657 @property
657 @property
658 def chunkstats(self):
658 def chunkstats(self):
659 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
659 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
660 return dict((path, state.chunkstats)
660 return dict((path, state.chunkstats)
661 for path, state in self.fixupmap.iteritems())
661 for path, state in self.fixupmap.iteritems())
662
662
663 def commit(self):
663 def commit(self):
664 """commit changes. update self.finalnode, self.replacemap"""
664 """commit changes. update self.finalnode, self.replacemap"""
665 with self.repo.wlock(), self.repo.lock():
665 with self.repo.wlock(), self.repo.lock():
666 with self.repo.transaction('absorb') as tr:
666 with self.repo.transaction('absorb') as tr:
667 self._commitstack()
667 self._commitstack()
668 self._movebookmarks(tr)
668 self._movebookmarks(tr)
669 if self.repo['.'].node() in self.replacemap:
669 if self.repo['.'].node() in self.replacemap:
670 self._moveworkingdirectoryparent()
670 self._moveworkingdirectoryparent()
671 if self._useobsolete:
671 if self._useobsolete:
672 self._obsoleteoldcommits()
672 self._obsoleteoldcommits()
673 if not self._useobsolete: # strip must be outside transactions
673 if not self._useobsolete: # strip must be outside transactions
674 self._stripoldcommits()
674 self._stripoldcommits()
675 return self.finalnode
675 return self.finalnode
676
676
677 def printchunkstats(self):
677 def printchunkstats(self):
678 """print things like '1 of 2 chunk(s) applied'"""
678 """print things like '1 of 2 chunk(s) applied'"""
679 ui = self.ui
679 ui = self.ui
680 chunkstats = self.chunkstats
680 chunkstats = self.chunkstats
681 if ui.verbose:
681 if ui.verbose:
682 # chunkstats for each file
682 # chunkstats for each file
683 for path, stat in chunkstats.iteritems():
683 for path, stat in chunkstats.iteritems():
684 if stat[0]:
684 if stat[0]:
685 ui.write(_('%s: %d of %d chunk(s) applied\n')
685 ui.write(_('%s: %d of %d chunk(s) applied\n')
686 % (path, stat[0], stat[1]))
686 % (path, stat[0], stat[1]))
687 elif not ui.quiet:
687 elif not ui.quiet:
688 # a summary for all files
688 # a summary for all files
689 stats = chunkstats.values()
689 stats = chunkstats.values()
690 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
690 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
691 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
691 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
692
692
693 def _commitstack(self):
693 def _commitstack(self):
694 """make new commits. update self.finalnode, self.replacemap.
694 """make new commits. update self.finalnode, self.replacemap.
695 it is splitted from "commit" to avoid too much indentation.
695 it is splitted from "commit" to avoid too much indentation.
696 """
696 """
697 # last node (20-char) committed by us
697 # last node (20-char) committed by us
698 lastcommitted = None
698 lastcommitted = None
699 # p1 which overrides the parent of the next commit, "None" means use
699 # p1 which overrides the parent of the next commit, "None" means use
700 # the original parent unchanged
700 # the original parent unchanged
701 nextp1 = None
701 nextp1 = None
702 for ctx in self.stack:
702 for ctx in self.stack:
703 memworkingcopy = self._getnewfilecontents(ctx)
703 memworkingcopy = self._getnewfilecontents(ctx)
704 if not memworkingcopy and not lastcommitted:
704 if not memworkingcopy and not lastcommitted:
705 # nothing changed, nothing commited
705 # nothing changed, nothing commited
706 nextp1 = ctx
706 nextp1 = ctx
707 continue
707 continue
708 msg = ''
708 msg = ''
709 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
709 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
710 # changeset is no longer necessary
710 # changeset is no longer necessary
711 self.replacemap[ctx.node()] = None
711 self.replacemap[ctx.node()] = None
712 msg = _('became empty and was dropped')
712 msg = _('became empty and was dropped')
713 else:
713 else:
714 # changeset needs re-commit
714 # changeset needs re-commit
715 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
715 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
716 lastcommitted = self.repo[nodestr]
716 lastcommitted = self.repo[nodestr]
717 nextp1 = lastcommitted
717 nextp1 = lastcommitted
718 self.replacemap[ctx.node()] = lastcommitted.node()
718 self.replacemap[ctx.node()] = lastcommitted.node()
719 if memworkingcopy:
719 if memworkingcopy:
720 msg = _('%d file(s) changed, became %s') % (
720 msg = _('%d file(s) changed, became %s') % (
721 len(memworkingcopy), self._ctx2str(lastcommitted))
721 len(memworkingcopy), self._ctx2str(lastcommitted))
722 else:
722 else:
723 msg = _('became %s') % self._ctx2str(lastcommitted)
723 msg = _('became %s') % self._ctx2str(lastcommitted)
724 if self.ui.verbose and msg:
724 if self.ui.verbose and msg:
725 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
725 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
726 self.finalnode = lastcommitted and lastcommitted.node()
726 self.finalnode = lastcommitted and lastcommitted.node()
727
727
728 def _ctx2str(self, ctx):
728 def _ctx2str(self, ctx):
729 if self.ui.debugflag:
729 if self.ui.debugflag:
730 return ctx.hex()
730 return ctx.hex()
731 else:
731 else:
732 return node.short(ctx.node())
732 return node.short(ctx.node())
733
733
734 def _getnewfilecontents(self, ctx):
734 def _getnewfilecontents(self, ctx):
735 """(ctx) -> {path: str}
735 """(ctx) -> {path: str}
736
736
737 fetch file contents from filefixupstates.
737 fetch file contents from filefixupstates.
738 return the working copy overrides - files different from ctx.
738 return the working copy overrides - files different from ctx.
739 """
739 """
740 result = {}
740 result = {}
741 for path in self.paths:
741 for path in self.paths:
742 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
742 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
743 if ctx not in ctx2fctx:
743 if ctx not in ctx2fctx:
744 continue
744 continue
745 fctx = ctx2fctx[ctx]
745 fctx = ctx2fctx[ctx]
746 content = fctx.data()
746 content = fctx.data()
747 newcontent = self.fixupmap[path].getfinalcontent(fctx)
747 newcontent = self.fixupmap[path].getfinalcontent(fctx)
748 if content != newcontent:
748 if content != newcontent:
749 result[fctx.path()] = newcontent
749 result[fctx.path()] = newcontent
750 return result
750 return result
751
751
752 def _movebookmarks(self, tr):
752 def _movebookmarks(self, tr):
753 repo = self.repo
753 repo = self.repo
754 needupdate = [(name, self.replacemap[hsh])
754 needupdate = [(name, self.replacemap[hsh])
755 for name, hsh in repo._bookmarks.iteritems()
755 for name, hsh in repo._bookmarks.iteritems()
756 if hsh in self.replacemap]
756 if hsh in self.replacemap]
757 changes = []
757 changes = []
758 for name, hsh in needupdate:
758 for name, hsh in needupdate:
759 if hsh:
759 if hsh:
760 changes.append((name, hsh))
760 changes.append((name, hsh))
761 if self.ui.verbose:
761 if self.ui.verbose:
762 self.ui.write(_('moving bookmark %s to %s\n')
762 self.ui.write(_('moving bookmark %s to %s\n')
763 % (name, node.hex(hsh)))
763 % (name, node.hex(hsh)))
764 else:
764 else:
765 changes.append((name, None))
765 changes.append((name, None))
766 if self.ui.verbose:
766 if self.ui.verbose:
767 self.ui.write(_('deleting bookmark %s\n') % name)
767 self.ui.write(_('deleting bookmark %s\n') % name)
768 repo._bookmarks.applychanges(repo, tr, changes)
768 repo._bookmarks.applychanges(repo, tr, changes)
769
769
770 def _moveworkingdirectoryparent(self):
770 def _moveworkingdirectoryparent(self):
771 if not self.finalnode:
771 if not self.finalnode:
772 # Find the latest not-{obsoleted,stripped} parent.
772 # Find the latest not-{obsoleted,stripped} parent.
773 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
773 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
774 ctx = self.repo[revs.first()]
774 ctx = self.repo[revs.first()]
775 self.finalnode = ctx.node()
775 self.finalnode = ctx.node()
776 else:
776 else:
777 ctx = self.repo[self.finalnode]
777 ctx = self.repo[self.finalnode]
778
778
779 dirstate = self.repo.dirstate
779 dirstate = self.repo.dirstate
780 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
780 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
781 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
781 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
782 noop = lambda: 0
782 noop = lambda: 0
783 restore = noop
783 restore = noop
784 if util.safehasattr(dirstate, '_fsmonitorstate'):
784 if util.safehasattr(dirstate, '_fsmonitorstate'):
785 bak = dirstate._fsmonitorstate.invalidate
785 bak = dirstate._fsmonitorstate.invalidate
786 def restore():
786 def restore():
787 dirstate._fsmonitorstate.invalidate = bak
787 dirstate._fsmonitorstate.invalidate = bak
788 dirstate._fsmonitorstate.invalidate = noop
788 dirstate._fsmonitorstate.invalidate = noop
789 try:
789 try:
790 with dirstate.parentchange():
790 with dirstate.parentchange():
791 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
791 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
792 finally:
792 finally:
793 restore()
793 restore()
794
794
795 @staticmethod
795 @staticmethod
796 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
796 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
797 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
797 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
798
798
799 if it will become an empty commit (does not change anything, after the
799 if it will become an empty commit (does not change anything, after the
800 memworkingcopy overrides), return True. otherwise return False.
800 memworkingcopy overrides), return True. otherwise return False.
801 """
801 """
802 if not pctx:
802 if not pctx:
803 parents = ctx.parents()
803 parents = ctx.parents()
804 if len(parents) != 1:
804 if len(parents) != 1:
805 return False
805 return False
806 pctx = parents[0]
806 pctx = parents[0]
807 # ctx changes more files (not a subset of memworkingcopy)
807 # ctx changes more files (not a subset of memworkingcopy)
808 if not set(ctx.files()).issubset(set(memworkingcopy.iterkeys())):
808 if not set(ctx.files()).issubset(set(memworkingcopy)):
809 return False
809 return False
810 for path, content in memworkingcopy.iteritems():
810 for path, content in memworkingcopy.iteritems():
811 if path not in pctx or path not in ctx:
811 if path not in pctx or path not in ctx:
812 return False
812 return False
813 fctx = ctx[path]
813 fctx = ctx[path]
814 pfctx = pctx[path]
814 pfctx = pctx[path]
815 if pfctx.flags() != fctx.flags():
815 if pfctx.flags() != fctx.flags():
816 return False
816 return False
817 if pfctx.data() != content:
817 if pfctx.data() != content:
818 return False
818 return False
819 return True
819 return True
820
820
821 def _commitsingle(self, memworkingcopy, ctx, p1=None):
821 def _commitsingle(self, memworkingcopy, ctx, p1=None):
822 """(ctx, {path: content}, node) -> node. make a single commit
822 """(ctx, {path: content}, node) -> node. make a single commit
823
823
824 the commit is a clone from ctx, with a (optionally) different p1, and
824 the commit is a clone from ctx, with a (optionally) different p1, and
825 different file contents replaced by memworkingcopy.
825 different file contents replaced by memworkingcopy.
826 """
826 """
827 parents = p1 and (p1, node.nullid)
827 parents = p1 and (p1, node.nullid)
828 extra = ctx.extra()
828 extra = ctx.extra()
829 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
829 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
830 extra['absorb_source'] = ctx.hex()
830 extra['absorb_source'] = ctx.hex()
831 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
831 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
832 # preserve phase
832 # preserve phase
833 with mctx.repo().ui.configoverride({
833 with mctx.repo().ui.configoverride({
834 ('phases', 'new-commit'): ctx.phase()}):
834 ('phases', 'new-commit'): ctx.phase()}):
835 return mctx.commit()
835 return mctx.commit()
836
836
837 @util.propertycache
837 @util.propertycache
838 def _useobsolete(self):
838 def _useobsolete(self):
839 """() -> bool"""
839 """() -> bool"""
840 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
840 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
841
841
842 def _obsoleteoldcommits(self):
842 def _obsoleteoldcommits(self):
843 relations = [(self.repo[k], v and (self.repo[v],) or ())
843 relations = [(self.repo[k], v and (self.repo[v],) or ())
844 for k, v in self.replacemap.iteritems()]
844 for k, v in self.replacemap.iteritems()]
845 if relations:
845 if relations:
846 obsolete.createmarkers(self.repo, relations)
846 obsolete.createmarkers(self.repo, relations)
847
847
848 def _stripoldcommits(self):
848 def _stripoldcommits(self):
849 nodelist = self.replacemap.keys()
849 nodelist = self.replacemap.keys()
850 # make sure we don't strip innocent children
850 # make sure we don't strip innocent children
851 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
851 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
852 nodelist, nodelist)
852 nodelist, nodelist)
853 tonode = self.repo.changelog.node
853 tonode = self.repo.changelog.node
854 nodelist = [tonode(r) for r in revs]
854 nodelist = [tonode(r) for r in revs]
855 if nodelist:
855 if nodelist:
856 repair.strip(self.repo.ui, self.repo, nodelist)
856 repair.strip(self.repo.ui, self.repo, nodelist)
857
857
858 def _parsechunk(hunk):
858 def _parsechunk(hunk):
859 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
859 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
860 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
860 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
861 return None, None
861 return None, None
862 path = hunk.header.filename()
862 path = hunk.header.filename()
863 a1 = hunk.fromline + len(hunk.before) - 1
863 a1 = hunk.fromline + len(hunk.before) - 1
864 # remove before and after context
864 # remove before and after context
865 hunk.before = hunk.after = []
865 hunk.before = hunk.after = []
866 buf = util.stringio()
866 buf = util.stringio()
867 hunk.write(buf)
867 hunk.write(buf)
868 patchlines = mdiff.splitnewlines(buf.getvalue())
868 patchlines = mdiff.splitnewlines(buf.getvalue())
869 # hunk.prettystr() will update hunk.removed
869 # hunk.prettystr() will update hunk.removed
870 a2 = a1 + hunk.removed
870 a2 = a1 + hunk.removed
871 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
871 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
872 return path, (a1, a2, blines)
872 return path, (a1, a2, blines)
873
873
874 def overlaydiffcontext(ctx, chunks):
874 def overlaydiffcontext(ctx, chunks):
875 """(ctx, [crecord.uihunk]) -> memctx
875 """(ctx, [crecord.uihunk]) -> memctx
876
876
877 return a memctx with some [1] patches (chunks) applied to ctx.
877 return a memctx with some [1] patches (chunks) applied to ctx.
878 [1]: modifications are handled. renames, mode changes, etc. are ignored.
878 [1]: modifications are handled. renames, mode changes, etc. are ignored.
879 """
879 """
880 # sadly the applying-patch logic is hardly reusable, and messy:
880 # sadly the applying-patch logic is hardly reusable, and messy:
881 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
881 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
882 # needs a file stream of a patch and will re-parse it, while we have
882 # needs a file stream of a patch and will re-parse it, while we have
883 # structured hunk objects at hand.
883 # structured hunk objects at hand.
884 # 2. a lot of different implementations about "chunk" (patch.hunk,
884 # 2. a lot of different implementations about "chunk" (patch.hunk,
885 # patch.recordhunk, crecord.uihunk)
885 # patch.recordhunk, crecord.uihunk)
886 # as we only care about applying changes to modified files, no mode
886 # as we only care about applying changes to modified files, no mode
887 # change, no binary diff, and no renames, it's probably okay to
887 # change, no binary diff, and no renames, it's probably okay to
888 # re-invent the logic using much simpler code here.
888 # re-invent the logic using much simpler code here.
889 memworkingcopy = {} # {path: content}
889 memworkingcopy = {} # {path: content}
890 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
890 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
891 for path, info in map(_parsechunk, chunks):
891 for path, info in map(_parsechunk, chunks):
892 if not path or not info:
892 if not path or not info:
893 continue
893 continue
894 patchmap[path].append(info)
894 patchmap[path].append(info)
895 for path, patches in patchmap.iteritems():
895 for path, patches in patchmap.iteritems():
896 if path not in ctx or not patches:
896 if path not in ctx or not patches:
897 continue
897 continue
898 patches.sort(reverse=True)
898 patches.sort(reverse=True)
899 lines = mdiff.splitnewlines(ctx[path].data())
899 lines = mdiff.splitnewlines(ctx[path].data())
900 for a1, a2, blines in patches:
900 for a1, a2, blines in patches:
901 lines[a1:a2] = blines
901 lines[a1:a2] = blines
902 memworkingcopy[path] = ''.join(lines)
902 memworkingcopy[path] = ''.join(lines)
903 return overlaycontext(memworkingcopy, ctx)
903 return overlaycontext(memworkingcopy, ctx)
904
904
905 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
905 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
906 """pick fixup chunks from targetctx, apply them to stack.
906 """pick fixup chunks from targetctx, apply them to stack.
907
907
908 if targetctx is None, the working copy context will be used.
908 if targetctx is None, the working copy context will be used.
909 if stack is None, the current draft stack will be used.
909 if stack is None, the current draft stack will be used.
910 return fixupstate.
910 return fixupstate.
911 """
911 """
912 if stack is None:
912 if stack is None:
913 limit = ui.configint('absorb', 'max-stack-size')
913 limit = ui.configint('absorb', 'max-stack-size')
914 stack = getdraftstack(repo['.'], limit)
914 stack = getdraftstack(repo['.'], limit)
915 if limit and len(stack) >= limit:
915 if limit and len(stack) >= limit:
916 ui.warn(_('absorb: only the recent %d changesets will '
916 ui.warn(_('absorb: only the recent %d changesets will '
917 'be analysed\n')
917 'be analysed\n')
918 % limit)
918 % limit)
919 if not stack:
919 if not stack:
920 raise error.Abort(_('no changeset to change'))
920 raise error.Abort(_('no changeset to change'))
921 if targetctx is None: # default to working copy
921 if targetctx is None: # default to working copy
922 targetctx = repo[None]
922 targetctx = repo[None]
923 if pats is None:
923 if pats is None:
924 pats = ()
924 pats = ()
925 if opts is None:
925 if opts is None:
926 opts = {}
926 opts = {}
927 state = fixupstate(stack, ui=ui, opts=opts)
927 state = fixupstate(stack, ui=ui, opts=opts)
928 matcher = scmutil.match(targetctx, pats, opts)
928 matcher = scmutil.match(targetctx, pats, opts)
929 if opts.get('interactive'):
929 if opts.get('interactive'):
930 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
930 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
931 origchunks = patch.parsepatch(diff)
931 origchunks = patch.parsepatch(diff)
932 chunks = cmdutil.recordfilter(ui, origchunks)[0]
932 chunks = cmdutil.recordfilter(ui, origchunks)[0]
933 targetctx = overlaydiffcontext(stack[-1], chunks)
933 targetctx = overlaydiffcontext(stack[-1], chunks)
934 state.diffwith(targetctx, matcher, showchanges=opts.get('print_changes'))
934 state.diffwith(targetctx, matcher, showchanges=opts.get('print_changes'))
935 if not opts.get('dry_run'):
935 if not opts.get('dry_run'):
936 state.apply()
936 state.apply()
937 if state.commit():
937 if state.commit():
938 state.printchunkstats()
938 state.printchunkstats()
939 elif not ui.quiet:
939 elif not ui.quiet:
940 ui.write(_('nothing applied\n'))
940 ui.write(_('nothing applied\n'))
941 return state
941 return state
942
942
943 @command('^absorb',
943 @command('^absorb',
944 [('p', 'print-changes', None,
944 [('p', 'print-changes', None,
945 _('print which changesets are modified by which changes')),
945 _('print which changesets are modified by which changes')),
946 ('i', 'interactive', None,
946 ('i', 'interactive', None,
947 _('interactively select which chunks to apply (EXPERIMENTAL)')),
947 _('interactively select which chunks to apply (EXPERIMENTAL)')),
948 ('e', 'edit-lines', None,
948 ('e', 'edit-lines', None,
949 _('edit what lines belong to which changesets before commit '
949 _('edit what lines belong to which changesets before commit '
950 '(EXPERIMENTAL)')),
950 '(EXPERIMENTAL)')),
951 ] + commands.dryrunopts + commands.walkopts,
951 ] + commands.dryrunopts + commands.walkopts,
952 _('hg absorb [OPTION] [FILE]...'))
952 _('hg absorb [OPTION] [FILE]...'))
953 def absorbcmd(ui, repo, *pats, **opts):
953 def absorbcmd(ui, repo, *pats, **opts):
954 """incorporate corrections into the stack of draft changesets
954 """incorporate corrections into the stack of draft changesets
955
955
956 absorb analyzes each change in your working directory and attempts to
956 absorb analyzes each change in your working directory and attempts to
957 amend the changed lines into the changesets in your stack that first
957 amend the changed lines into the changesets in your stack that first
958 introduced those lines.
958 introduced those lines.
959
959
960 If absorb cannot find an unambiguous changeset to amend for a change,
960 If absorb cannot find an unambiguous changeset to amend for a change,
961 that change will be left in the working directory, untouched. They can be
961 that change will be left in the working directory, untouched. They can be
962 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
962 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
963 absorb does not write to the working directory.
963 absorb does not write to the working directory.
964
964
965 Changesets outside the revset `::. and not public() and not merge()` will
965 Changesets outside the revset `::. and not public() and not merge()` will
966 not be changed.
966 not be changed.
967
967
968 Changesets that become empty after applying the changes will be deleted.
968 Changesets that become empty after applying the changes will be deleted.
969
969
970 If in doubt, run :hg:`absorb -pn` to preview what changesets will
970 If in doubt, run :hg:`absorb -pn` to preview what changesets will
971 be amended by what changed lines, without actually changing anything.
971 be amended by what changed lines, without actually changing anything.
972
972
973 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
973 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
974 """
974 """
975 state = absorb(ui, repo, pats=pats, opts=opts)
975 state = absorb(ui, repo, pats=pats, opts=opts)
976 if sum(s[0] for s in state.chunkstats.values()) == 0:
976 if sum(s[0] for s in state.chunkstats.values()) == 0:
977 return 1
977 return 1
General Comments 0
You need to be logged in to leave comments. Login now