##// END OF EJS Templates
py3: drop b'' from debug message "moving bookmarks"
Yuya Nishihara -
r36854:a00c38b3 default
parent child Browse files
Show More
@@ -1,406 +1,407 b''
1 test-abort-checkin.t
1 test-abort-checkin.t
2 test-add.t
2 test-add.t
3 test-addremove-similar.t
3 test-addremove-similar.t
4 test-addremove.t
4 test-addremove.t
5 test-amend-subrepo.t
5 test-amend-subrepo.t
6 test-amend.t
6 test-amend.t
7 test-ancestor.py
7 test-ancestor.py
8 test-annotate.py
8 test-annotate.py
9 test-annotate.t
9 test-annotate.t
10 test-archive-symlinks.t
10 test-archive-symlinks.t
11 test-atomictempfile.py
11 test-atomictempfile.py
12 test-audit-path.t
12 test-audit-path.t
13 test-audit-subrepo.t
13 test-audit-subrepo.t
14 test-automv.t
14 test-automv.t
15 test-backout.t
15 test-backout.t
16 test-backwards-remove.t
16 test-backwards-remove.t
17 test-basic.t
17 test-basic.t
18 test-bheads.t
18 test-bheads.t
19 test-bisect.t
19 test-bisect.t
20 test-bisect2.t
20 test-bisect2.t
21 test-bisect3.t
21 test-bisect3.t
22 test-blackbox.t
22 test-blackbox.t
23 test-bookmarks-current.t
23 test-bookmarks-current.t
24 test-bookmarks-merge.t
24 test-bookmarks-merge.t
25 test-bookmarks-rebase.t
25 test-bookmarks-rebase.t
26 test-bookmarks-strip.t
26 test-bookmarks-strip.t
27 test-bookmarks.t
27 test-bookmarks.t
28 test-branch-change.t
28 test-branch-option.t
29 test-branch-option.t
29 test-branch-tag-confict.t
30 test-branch-tag-confict.t
30 test-branches.t
31 test-branches.t
31 test-bundle-phases.t
32 test-bundle-phases.t
32 test-bundle-type.t
33 test-bundle-type.t
33 test-bundle-vs-outgoing.t
34 test-bundle-vs-outgoing.t
34 test-bundle2-multiple-changegroups.t
35 test-bundle2-multiple-changegroups.t
35 test-cappedreader.py
36 test-cappedreader.py
36 test-casecollision.t
37 test-casecollision.t
37 test-cat.t
38 test-cat.t
38 test-censor.t
39 test-censor.t
39 test-changelog-exec.t
40 test-changelog-exec.t
40 test-check-commit.t
41 test-check-commit.t
41 test-check-execute.t
42 test-check-execute.t
42 test-check-module-imports.t
43 test-check-module-imports.t
43 test-check-pyflakes.t
44 test-check-pyflakes.t
44 test-check-pylint.t
45 test-check-pylint.t
45 test-check-shbang.t
46 test-check-shbang.t
46 test-children.t
47 test-children.t
47 test-clone-pull-corruption.t
48 test-clone-pull-corruption.t
48 test-clone-r.t
49 test-clone-r.t
49 test-clone-update-order.t
50 test-clone-update-order.t
50 test-command-template.t
51 test-command-template.t
51 test-commit-amend.t
52 test-commit-amend.t
52 test-commit-multiple.t
53 test-commit-multiple.t
53 test-commit-unresolved.t
54 test-commit-unresolved.t
54 test-commit.t
55 test-commit.t
55 test-committer.t
56 test-committer.t
56 test-completion.t
57 test-completion.t
57 test-config-env.py
58 test-config-env.py
58 test-config.t
59 test-config.t
59 test-conflict.t
60 test-conflict.t
60 test-confused-revert.t
61 test-confused-revert.t
61 test-contrib-check-code.t
62 test-contrib-check-code.t
62 test-contrib-check-commit.t
63 test-contrib-check-commit.t
63 test-convert-authormap.t
64 test-convert-authormap.t
64 test-convert-clonebranches.t
65 test-convert-clonebranches.t
65 test-convert-datesort.t
66 test-convert-datesort.t
66 test-convert-filemap.t
67 test-convert-filemap.t
67 test-convert-hg-sink.t
68 test-convert-hg-sink.t
68 test-convert-hg-source.t
69 test-convert-hg-source.t
69 test-convert-hg-startrev.t
70 test-convert-hg-startrev.t
70 test-copy-move-merge.t
71 test-copy-move-merge.t
71 test-copy.t
72 test-copy.t
72 test-copytrace-heuristics.t
73 test-copytrace-heuristics.t
73 test-debugbuilddag.t
74 test-debugbuilddag.t
74 test-debugbundle.t
75 test-debugbundle.t
75 test-debugextensions.t
76 test-debugextensions.t
76 test-debugindexdot.t
77 test-debugindexdot.t
77 test-debugrename.t
78 test-debugrename.t
78 test-default-push.t
79 test-default-push.t
79 test-diff-binary-file.t
80 test-diff-binary-file.t
80 test-diff-change.t
81 test-diff-change.t
81 test-diff-copy-depth.t
82 test-diff-copy-depth.t
82 test-diff-hashes.t
83 test-diff-hashes.t
83 test-diff-issue2761.t
84 test-diff-issue2761.t
84 test-diff-newlines.t
85 test-diff-newlines.t
85 test-diff-reverse.t
86 test-diff-reverse.t
86 test-diff-subdir.t
87 test-diff-subdir.t
87 test-diffdir.t
88 test-diffdir.t
88 test-directaccess.t
89 test-directaccess.t
89 test-dirstate-backup.t
90 test-dirstate-backup.t
90 test-dirstate-nonnormalset.t
91 test-dirstate-nonnormalset.t
91 test-doctest.py
92 test-doctest.py
92 test-double-merge.t
93 test-double-merge.t
93 test-drawdag.t
94 test-drawdag.t
94 test-duplicateoptions.py
95 test-duplicateoptions.py
95 test-empty-dir.t
96 test-empty-dir.t
96 test-empty-file.t
97 test-empty-file.t
97 test-empty-group.t
98 test-empty-group.t
98 test-empty.t
99 test-empty.t
99 test-encode.t
100 test-encode.t
100 test-encoding-func.py
101 test-encoding-func.py
101 test-encoding.t
102 test-encoding.t
102 test-eol-add.t
103 test-eol-add.t
103 test-eol-clone.t
104 test-eol-clone.t
104 test-eol-hook.t
105 test-eol-hook.t
105 test-eol-tag.t
106 test-eol-tag.t
106 test-eol-update.t
107 test-eol-update.t
107 test-excessive-merge.t
108 test-excessive-merge.t
108 test-exchange-obsmarkers-case-A1.t
109 test-exchange-obsmarkers-case-A1.t
109 test-exchange-obsmarkers-case-A2.t
110 test-exchange-obsmarkers-case-A2.t
110 test-exchange-obsmarkers-case-A3.t
111 test-exchange-obsmarkers-case-A3.t
111 test-exchange-obsmarkers-case-A4.t
112 test-exchange-obsmarkers-case-A4.t
112 test-exchange-obsmarkers-case-A5.t
113 test-exchange-obsmarkers-case-A5.t
113 test-exchange-obsmarkers-case-A6.t
114 test-exchange-obsmarkers-case-A6.t
114 test-exchange-obsmarkers-case-A7.t
115 test-exchange-obsmarkers-case-A7.t
115 test-exchange-obsmarkers-case-B1.t
116 test-exchange-obsmarkers-case-B1.t
116 test-exchange-obsmarkers-case-B2.t
117 test-exchange-obsmarkers-case-B2.t
117 test-exchange-obsmarkers-case-B3.t
118 test-exchange-obsmarkers-case-B3.t
118 test-exchange-obsmarkers-case-B4.t
119 test-exchange-obsmarkers-case-B4.t
119 test-exchange-obsmarkers-case-B5.t
120 test-exchange-obsmarkers-case-B5.t
120 test-exchange-obsmarkers-case-B6.t
121 test-exchange-obsmarkers-case-B6.t
121 test-exchange-obsmarkers-case-B7.t
122 test-exchange-obsmarkers-case-B7.t
122 test-exchange-obsmarkers-case-C1.t
123 test-exchange-obsmarkers-case-C1.t
123 test-exchange-obsmarkers-case-C2.t
124 test-exchange-obsmarkers-case-C2.t
124 test-exchange-obsmarkers-case-C3.t
125 test-exchange-obsmarkers-case-C3.t
125 test-exchange-obsmarkers-case-C4.t
126 test-exchange-obsmarkers-case-C4.t
126 test-exchange-obsmarkers-case-D1.t
127 test-exchange-obsmarkers-case-D1.t
127 test-exchange-obsmarkers-case-D2.t
128 test-exchange-obsmarkers-case-D2.t
128 test-exchange-obsmarkers-case-D3.t
129 test-exchange-obsmarkers-case-D3.t
129 test-exchange-obsmarkers-case-D4.t
130 test-exchange-obsmarkers-case-D4.t
130 test-execute-bit.t
131 test-execute-bit.t
131 test-extdiff.t
132 test-extdiff.t
132 test-extra-filelog-entry.t
133 test-extra-filelog-entry.t
133 test-filebranch.t
134 test-filebranch.t
134 test-fileset-generated.t
135 test-fileset-generated.t
135 test-flags.t
136 test-flags.t
136 test-generaldelta.t
137 test-generaldelta.t
137 test-getbundle.t
138 test-getbundle.t
138 test-git-export.t
139 test-git-export.t
139 test-glog-topological.t
140 test-glog-topological.t
140 test-gpg.t
141 test-gpg.t
141 test-graft.t
142 test-graft.t
142 test-hghave.t
143 test-hghave.t
143 test-hgignore.t
144 test-hgignore.t
144 test-hgk.t
145 test-hgk.t
145 test-hgweb-bundle.t
146 test-hgweb-bundle.t
146 test-hgweb-descend-empties.t
147 test-hgweb-descend-empties.t
147 test-hgweb-removed.t
148 test-hgweb-removed.t
148 test-histedit-arguments.t
149 test-histedit-arguments.t
149 test-histedit-base.t
150 test-histedit-base.t
150 test-histedit-bookmark-motion.t
151 test-histedit-bookmark-motion.t
151 test-histedit-commute.t
152 test-histedit-commute.t
152 test-histedit-drop.t
153 test-histedit-drop.t
153 test-histedit-edit.t
154 test-histedit-edit.t
154 test-histedit-fold-non-commute.t
155 test-histedit-fold-non-commute.t
155 test-histedit-fold.t
156 test-histedit-fold.t
156 test-histedit-no-change.t
157 test-histedit-no-change.t
157 test-histedit-non-commute-abort.t
158 test-histedit-non-commute-abort.t
158 test-histedit-non-commute.t
159 test-histedit-non-commute.t
159 test-histedit-obsolete.t
160 test-histedit-obsolete.t
160 test-histedit-outgoing.t
161 test-histedit-outgoing.t
161 test-histedit-templates.t
162 test-histedit-templates.t
162 test-http-branchmap.t
163 test-http-branchmap.t
163 test-http-bundle1.t
164 test-http-bundle1.t
164 test-http-clone-r.t
165 test-http-clone-r.t
165 test-identify.t
166 test-identify.t
166 test-import-unknown.t
167 test-import-unknown.t
167 test-imports-checker.t
168 test-imports-checker.t
168 test-inherit-mode.t
169 test-inherit-mode.t
169 test-issue1089.t
170 test-issue1089.t
170 test-issue1102.t
171 test-issue1102.t
171 test-issue1175.t
172 test-issue1175.t
172 test-issue1306.t
173 test-issue1306.t
173 test-issue1438.t
174 test-issue1438.t
174 test-issue1502.t
175 test-issue1502.t
175 test-issue1802.t
176 test-issue1802.t
176 test-issue1877.t
177 test-issue1877.t
177 test-issue1993.t
178 test-issue1993.t
178 test-issue2137.t
179 test-issue2137.t
179 test-issue3084.t
180 test-issue3084.t
180 test-issue4074.t
181 test-issue4074.t
181 test-issue522.t
182 test-issue522.t
182 test-issue586.t
183 test-issue586.t
183 test-issue612.t
184 test-issue612.t
184 test-issue619.t
185 test-issue619.t
185 test-issue672.t
186 test-issue672.t
186 test-issue842.t
187 test-issue842.t
187 test-journal-exists.t
188 test-journal-exists.t
188 test-largefiles-cache.t
189 test-largefiles-cache.t
189 test-largefiles-misc.t
190 test-largefiles-misc.t
190 test-largefiles-small-disk.t
191 test-largefiles-small-disk.t
191 test-largefiles-update.t
192 test-largefiles-update.t
192 test-lfs-largefiles.t
193 test-lfs-largefiles.t
193 test-locate.t
194 test-locate.t
194 test-lock-badness.t
195 test-lock-badness.t
195 test-log.t
196 test-log.t
196 test-logexchange.t
197 test-logexchange.t
197 test-lrucachedict.py
198 test-lrucachedict.py
198 test-mactext.t
199 test-mactext.t
199 test-manifest-merging.t
200 test-manifest-merging.t
200 test-manifest.py
201 test-manifest.py
201 test-manifest.t
202 test-manifest.t
202 test-match.py
203 test-match.py
203 test-mdiff.py
204 test-mdiff.py
204 test-merge-changedelete.t
205 test-merge-changedelete.t
205 test-merge-closedheads.t
206 test-merge-closedheads.t
206 test-merge-commit.t
207 test-merge-commit.t
207 test-merge-criss-cross.t
208 test-merge-criss-cross.t
208 test-merge-default.t
209 test-merge-default.t
209 test-merge-force.t
210 test-merge-force.t
210 test-merge-halt.t
211 test-merge-halt.t
211 test-merge-internal-tools-pattern.t
212 test-merge-internal-tools-pattern.t
212 test-merge-local.t
213 test-merge-local.t
213 test-merge-remove.t
214 test-merge-remove.t
214 test-merge-revert.t
215 test-merge-revert.t
215 test-merge-revert2.t
216 test-merge-revert2.t
216 test-merge-subrepos.t
217 test-merge-subrepos.t
217 test-merge-symlinks.t
218 test-merge-symlinks.t
218 test-merge-types.t
219 test-merge-types.t
219 test-merge1.t
220 test-merge1.t
220 test-merge10.t
221 test-merge10.t
221 test-merge2.t
222 test-merge2.t
222 test-merge4.t
223 test-merge4.t
223 test-merge5.t
224 test-merge5.t
224 test-merge6.t
225 test-merge6.t
225 test-merge7.t
226 test-merge7.t
226 test-merge8.t
227 test-merge8.t
227 test-merge9.t
228 test-merge9.t
228 test-mq-git.t
229 test-mq-git.t
229 test-mq-header-date.t
230 test-mq-header-date.t
230 test-mq-header-from.t
231 test-mq-header-from.t
231 test-mq-pull-from-bundle.t
232 test-mq-pull-from-bundle.t
232 test-mq-qdiff.t
233 test-mq-qdiff.t
233 test-mq-qfold.t
234 test-mq-qfold.t
234 test-mq-qgoto.t
235 test-mq-qgoto.t
235 test-mq-qimport-fail-cleanup.t
236 test-mq-qimport-fail-cleanup.t
236 test-mq-qpush-exact.t
237 test-mq-qpush-exact.t
237 test-mq-qqueue.t
238 test-mq-qqueue.t
238 test-mq-qrefresh-interactive.t
239 test-mq-qrefresh-interactive.t
239 test-mq-qrefresh-replace-log-message.t
240 test-mq-qrefresh-replace-log-message.t
240 test-mq-qrefresh.t
241 test-mq-qrefresh.t
241 test-mq-qrename.t
242 test-mq-qrename.t
242 test-mq-qsave.t
243 test-mq-qsave.t
243 test-mq-safety.t
244 test-mq-safety.t
244 test-mq-subrepo.t
245 test-mq-subrepo.t
245 test-mq-symlinks.t
246 test-mq-symlinks.t
246 test-mv-cp-st-diff.t
247 test-mv-cp-st-diff.t
247 test-narrow-archive.t
248 test-narrow-archive.t
248 test-narrow-clone-no-ellipsis.t
249 test-narrow-clone-no-ellipsis.t
249 test-narrow-clone-nonlinear.t
250 test-narrow-clone-nonlinear.t
250 test-narrow-clone.t
251 test-narrow-clone.t
251 test-narrow-commit.t
252 test-narrow-commit.t
252 test-narrow-copies.t
253 test-narrow-copies.t
253 test-narrow-debugcommands.t
254 test-narrow-debugcommands.t
254 test-narrow-debugrebuilddirstate.t
255 test-narrow-debugrebuilddirstate.t
255 test-narrow-exchange-merges.t
256 test-narrow-exchange-merges.t
256 test-narrow-exchange.t
257 test-narrow-exchange.t
257 test-narrow-expanddirstate.t
258 test-narrow-expanddirstate.t
258 test-narrow-merge.t
259 test-narrow-merge.t
259 test-narrow-patch.t
260 test-narrow-patch.t
260 test-narrow-patterns.t
261 test-narrow-patterns.t
261 test-narrow-pull.t
262 test-narrow-pull.t
262 test-narrow-rebase.t
263 test-narrow-rebase.t
263 test-narrow-shallow-merges.t
264 test-narrow-shallow-merges.t
264 test-narrow-shallow.t
265 test-narrow-shallow.t
265 test-narrow-strip.t
266 test-narrow-strip.t
266 test-narrow-update.t
267 test-narrow-update.t
267 test-nested-repo.t
268 test-nested-repo.t
268 test-newbranch.t
269 test-newbranch.t
269 test-obshistory.t
270 test-obshistory.t
270 test-obsmarker-template.t
271 test-obsmarker-template.t
271 test-obsmarkers-effectflag.t
272 test-obsmarkers-effectflag.t
272 test-obsolete-bundle-strip.t
273 test-obsolete-bundle-strip.t
273 test-obsolete-changeset-exchange.t
274 test-obsolete-changeset-exchange.t
274 test-obsolete-checkheads.t
275 test-obsolete-checkheads.t
275 test-obsolete-distributed.t
276 test-obsolete-distributed.t
276 test-obsolete-tag-cache.t
277 test-obsolete-tag-cache.t
277 test-parents.t
278 test-parents.t
278 test-pathconflicts-merge.t
279 test-pathconflicts-merge.t
279 test-pathconflicts-update.t
280 test-pathconflicts-update.t
280 test-pending.t
281 test-pending.t
281 test-permissions.t
282 test-permissions.t
282 test-phases.t
283 test-phases.t
283 test-pull-branch.t
284 test-pull-branch.t
284 test-pull-http.t
285 test-pull-http.t
285 test-pull-permission.t
286 test-pull-permission.t
286 test-pull-pull-corruption.t
287 test-pull-pull-corruption.t
287 test-pull-r.t
288 test-pull-r.t
288 test-pull-update.t
289 test-pull-update.t
289 test-purge.t
290 test-purge.t
290 test-push-checkheads-partial-C1.t
291 test-push-checkheads-partial-C1.t
291 test-push-checkheads-partial-C2.t
292 test-push-checkheads-partial-C2.t
292 test-push-checkheads-partial-C3.t
293 test-push-checkheads-partial-C3.t
293 test-push-checkheads-partial-C4.t
294 test-push-checkheads-partial-C4.t
294 test-push-checkheads-pruned-B1.t
295 test-push-checkheads-pruned-B1.t
295 test-push-checkheads-pruned-B2.t
296 test-push-checkheads-pruned-B2.t
296 test-push-checkheads-pruned-B3.t
297 test-push-checkheads-pruned-B3.t
297 test-push-checkheads-pruned-B4.t
298 test-push-checkheads-pruned-B4.t
298 test-push-checkheads-pruned-B5.t
299 test-push-checkheads-pruned-B5.t
299 test-push-checkheads-pruned-B6.t
300 test-push-checkheads-pruned-B6.t
300 test-push-checkheads-pruned-B7.t
301 test-push-checkheads-pruned-B7.t
301 test-push-checkheads-pruned-B8.t
302 test-push-checkheads-pruned-B8.t
302 test-push-checkheads-superceed-A1.t
303 test-push-checkheads-superceed-A1.t
303 test-push-checkheads-superceed-A2.t
304 test-push-checkheads-superceed-A2.t
304 test-push-checkheads-superceed-A3.t
305 test-push-checkheads-superceed-A3.t
305 test-push-checkheads-superceed-A4.t
306 test-push-checkheads-superceed-A4.t
306 test-push-checkheads-superceed-A5.t
307 test-push-checkheads-superceed-A5.t
307 test-push-checkheads-superceed-A6.t
308 test-push-checkheads-superceed-A6.t
308 test-push-checkheads-superceed-A7.t
309 test-push-checkheads-superceed-A7.t
309 test-push-checkheads-superceed-A8.t
310 test-push-checkheads-superceed-A8.t
310 test-push-checkheads-unpushed-D1.t
311 test-push-checkheads-unpushed-D1.t
311 test-push-checkheads-unpushed-D2.t
312 test-push-checkheads-unpushed-D2.t
312 test-push-checkheads-unpushed-D3.t
313 test-push-checkheads-unpushed-D3.t
313 test-push-checkheads-unpushed-D4.t
314 test-push-checkheads-unpushed-D4.t
314 test-push-checkheads-unpushed-D5.t
315 test-push-checkheads-unpushed-D5.t
315 test-push-checkheads-unpushed-D6.t
316 test-push-checkheads-unpushed-D6.t
316 test-push-checkheads-unpushed-D7.t
317 test-push-checkheads-unpushed-D7.t
317 test-push-http.t
318 test-push-http.t
318 test-push-warn.t
319 test-push-warn.t
319 test-pushvars.t
320 test-pushvars.t
320 test-rebase-abort.t
321 test-rebase-abort.t
321 test-rebase-base-flag.t
322 test-rebase-base-flag.t
322 test-rebase-bookmarks.t
323 test-rebase-bookmarks.t
323 test-rebase-brute-force.t
324 test-rebase-brute-force.t
324 test-rebase-cache.t
325 test-rebase-cache.t
325 test-rebase-check-restore.t
326 test-rebase-check-restore.t
326 test-rebase-collapse.t
327 test-rebase-collapse.t
327 test-rebase-dest.t
328 test-rebase-dest.t
328 test-rebase-detach.t
329 test-rebase-detach.t
329 test-rebase-emptycommit.t
330 test-rebase-emptycommit.t
330 test-rebase-inmemory.t
331 test-rebase-inmemory.t
331 test-rebase-interruptions.t
332 test-rebase-interruptions.t
332 test-rebase-issue-noparam-single-rev.t
333 test-rebase-issue-noparam-single-rev.t
333 test-rebase-legacy.t
334 test-rebase-legacy.t
334 test-rebase-mq-skip.t
335 test-rebase-mq-skip.t
335 test-rebase-named-branches.t
336 test-rebase-named-branches.t
336 test-rebase-newancestor.t
337 test-rebase-newancestor.t
337 test-rebase-obsolete.t
338 test-rebase-obsolete.t
338 test-rebase-parameters.t
339 test-rebase-parameters.t
339 test-rebase-partial.t
340 test-rebase-partial.t
340 test-rebase-pull.t
341 test-rebase-pull.t
341 test-rebase-rename.t
342 test-rebase-rename.t
342 test-rebase-scenario-global.t
343 test-rebase-scenario-global.t
343 test-rebase-templates.t
344 test-rebase-templates.t
344 test-rebase-transaction.t
345 test-rebase-transaction.t
345 test-record.t
346 test-record.t
346 test-relink.t
347 test-relink.t
347 test-remove.t
348 test-remove.t
348 test-rename-after-merge.t
349 test-rename-after-merge.t
349 test-rename-dir-merge.t
350 test-rename-dir-merge.t
350 test-rename-merge1.t
351 test-rename-merge1.t
351 test-rename.t
352 test-rename.t
352 test-repair-strip.t
353 test-repair-strip.t
353 test-repo-compengines.t
354 test-repo-compengines.t
354 test-resolve.t
355 test-resolve.t
355 test-revert-flags.t
356 test-revert-flags.t
356 test-revert-unknown.t
357 test-revert-unknown.t
357 test-revlog-ancestry.py
358 test-revlog-ancestry.py
358 test-revlog-group-emptyiter.t
359 test-revlog-group-emptyiter.t
359 test-revlog-mmapindex.t
360 test-revlog-mmapindex.t
360 test-revlog-packentry.t
361 test-revlog-packentry.t
361 test-revset-dirstate-parents.t
362 test-revset-dirstate-parents.t
362 test-revset-outgoing.t
363 test-revset-outgoing.t
363 test-rollback.t
364 test-rollback.t
364 test-run-tests.py
365 test-run-tests.py
365 test-schemes.t
366 test-schemes.t
366 test-serve.t
367 test-serve.t
367 test-share.t
368 test-share.t
368 test-show-stack.t
369 test-show-stack.t
369 test-show-work.t
370 test-show-work.t
370 test-show.t
371 test-show.t
371 test-simple-update.t
372 test-simple-update.t
372 test-single-head.t
373 test-single-head.t
373 test-sparse-clear.t
374 test-sparse-clear.t
374 test-sparse-merges.t
375 test-sparse-merges.t
375 test-sparse-requirement.t
376 test-sparse-requirement.t
376 test-sparse-verbose-json.t
377 test-sparse-verbose-json.t
377 test-ssh-clone-r.t
378 test-ssh-clone-r.t
378 test-ssh-proto.t
379 test-ssh-proto.t
379 test-sshserver.py
380 test-sshserver.py
380 test-status-rev.t
381 test-status-rev.t
381 test-status-terse.t
382 test-status-terse.t
382 test-strip-cross.t
383 test-strip-cross.t
383 test-strip.t
384 test-strip.t
384 test-subrepo-deep-nested-change.t
385 test-subrepo-deep-nested-change.t
385 test-subrepo.t
386 test-subrepo.t
386 test-symlinks.t
387 test-symlinks.t
387 test-tag.t
388 test-tag.t
388 test-tags.t
389 test-tags.t
389 test-treemanifest.t
390 test-treemanifest.t
390 test-unamend.t
391 test-unamend.t
391 test-uncommit.t
392 test-uncommit.t
392 test-unified-test.t
393 test-unified-test.t
393 test-unrelated-pull.t
394 test-unrelated-pull.t
394 test-up-local-change.t
395 test-up-local-change.t
395 test-update-branches.t
396 test-update-branches.t
396 test-update-dest.t
397 test-update-dest.t
397 test-update-issue1456.t
398 test-update-issue1456.t
398 test-update-names.t
399 test-update-names.t
399 test-update-reverse.t
400 test-update-reverse.t
400 test-upgrade-repo.t
401 test-upgrade-repo.t
401 test-url-rev.t
402 test-url-rev.t
402 test-username-newline.t
403 test-username-newline.t
403 test-verify.t
404 test-verify.t
404 test-websub.t
405 test-websub.t
405 test-win32text.t
406 test-win32text.t
406 test-xdg.t
407 test-xdg.t
@@ -1,1425 +1,1426 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n")
165 ui.warn(_("abort: %s: %s\n")
166 % (inst.desc or util.forcebytestr(inst.filename), reason))
166 % (inst.desc or util.forcebytestr(inst.filename), reason))
167 if not inst.locker:
167 if not inst.locker:
168 ui.warn(_("(lock might be very busy)\n"))
168 ui.warn(_("(lock might be very busy)\n"))
169 except error.LockUnavailable as inst:
169 except error.LockUnavailable as inst:
170 ui.warn(_("abort: could not lock %s: %s\n") %
170 ui.warn(_("abort: could not lock %s: %s\n") %
171 (inst.desc or util.forcebytestr(inst.filename),
171 (inst.desc or util.forcebytestr(inst.filename),
172 encoding.strtolocal(inst.strerror)))
172 encoding.strtolocal(inst.strerror)))
173 except error.OutOfBandError as inst:
173 except error.OutOfBandError as inst:
174 if inst.args:
174 if inst.args:
175 msg = _("abort: remote error:\n")
175 msg = _("abort: remote error:\n")
176 else:
176 else:
177 msg = _("abort: remote error\n")
177 msg = _("abort: remote error\n")
178 ui.warn(msg)
178 ui.warn(msg)
179 if inst.args:
179 if inst.args:
180 ui.warn(''.join(inst.args))
180 ui.warn(''.join(inst.args))
181 if inst.hint:
181 if inst.hint:
182 ui.warn('(%s)\n' % inst.hint)
182 ui.warn('(%s)\n' % inst.hint)
183 except error.RepoError as inst:
183 except error.RepoError as inst:
184 ui.warn(_("abort: %s!\n") % inst)
184 ui.warn(_("abort: %s!\n") % inst)
185 if inst.hint:
185 if inst.hint:
186 ui.warn(_("(%s)\n") % inst.hint)
186 ui.warn(_("(%s)\n") % inst.hint)
187 except error.ResponseError as inst:
187 except error.ResponseError as inst:
188 ui.warn(_("abort: %s") % inst.args[0])
188 ui.warn(_("abort: %s") % inst.args[0])
189 msg = inst.args[1]
189 msg = inst.args[1]
190 if isinstance(msg, type(u'')):
190 if isinstance(msg, type(u'')):
191 msg = pycompat.sysbytes(msg)
191 msg = pycompat.sysbytes(msg)
192 if not isinstance(msg, bytes):
192 if not isinstance(msg, bytes):
193 ui.warn(" %r\n" % (msg,))
193 ui.warn(" %r\n" % (msg,))
194 elif not msg:
194 elif not msg:
195 ui.warn(_(" empty string\n"))
195 ui.warn(_(" empty string\n"))
196 else:
196 else:
197 ui.warn("\n%r\n" % util.ellipsis(msg))
197 ui.warn("\n%r\n" % util.ellipsis(msg))
198 except error.CensoredNodeError as inst:
198 except error.CensoredNodeError as inst:
199 ui.warn(_("abort: file censored %s!\n") % inst)
199 ui.warn(_("abort: file censored %s!\n") % inst)
200 except error.RevlogError as inst:
200 except error.RevlogError as inst:
201 ui.warn(_("abort: %s!\n") % inst)
201 ui.warn(_("abort: %s!\n") % inst)
202 except error.InterventionRequired as inst:
202 except error.InterventionRequired as inst:
203 ui.warn("%s\n" % inst)
203 ui.warn("%s\n" % inst)
204 if inst.hint:
204 if inst.hint:
205 ui.warn(_("(%s)\n") % inst.hint)
205 ui.warn(_("(%s)\n") % inst.hint)
206 return 1
206 return 1
207 except error.WdirUnsupported:
207 except error.WdirUnsupported:
208 ui.warn(_("abort: working directory revision cannot be specified\n"))
208 ui.warn(_("abort: working directory revision cannot be specified\n"))
209 except error.Abort as inst:
209 except error.Abort as inst:
210 ui.warn(_("abort: %s\n") % inst)
210 ui.warn(_("abort: %s\n") % inst)
211 if inst.hint:
211 if inst.hint:
212 ui.warn(_("(%s)\n") % inst.hint)
212 ui.warn(_("(%s)\n") % inst.hint)
213 except ImportError as inst:
213 except ImportError as inst:
214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
214 ui.warn(_("abort: %s!\n") % util.forcebytestr(inst))
215 m = util.forcebytestr(inst).split()[-1]
215 m = util.forcebytestr(inst).split()[-1]
216 if m in "mpatch bdiff".split():
216 if m in "mpatch bdiff".split():
217 ui.warn(_("(did you forget to compile extensions?)\n"))
217 ui.warn(_("(did you forget to compile extensions?)\n"))
218 elif m in "zlib".split():
218 elif m in "zlib".split():
219 ui.warn(_("(is your Python install correct?)\n"))
219 ui.warn(_("(is your Python install correct?)\n"))
220 except IOError as inst:
220 except IOError as inst:
221 if util.safehasattr(inst, "code"):
221 if util.safehasattr(inst, "code"):
222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
222 ui.warn(_("abort: %s\n") % util.forcebytestr(inst))
223 elif util.safehasattr(inst, "reason"):
223 elif util.safehasattr(inst, "reason"):
224 try: # usually it is in the form (errno, strerror)
224 try: # usually it is in the form (errno, strerror)
225 reason = inst.reason.args[1]
225 reason = inst.reason.args[1]
226 except (AttributeError, IndexError):
226 except (AttributeError, IndexError):
227 # it might be anything, for example a string
227 # it might be anything, for example a string
228 reason = inst.reason
228 reason = inst.reason
229 if isinstance(reason, unicode):
229 if isinstance(reason, unicode):
230 # SSLError of Python 2.7.9 contains a unicode
230 # SSLError of Python 2.7.9 contains a unicode
231 reason = encoding.unitolocal(reason)
231 reason = encoding.unitolocal(reason)
232 ui.warn(_("abort: error: %s\n") % reason)
232 ui.warn(_("abort: error: %s\n") % reason)
233 elif (util.safehasattr(inst, "args")
233 elif (util.safehasattr(inst, "args")
234 and inst.args and inst.args[0] == errno.EPIPE):
234 and inst.args and inst.args[0] == errno.EPIPE):
235 pass
235 pass
236 elif getattr(inst, "strerror", None):
236 elif getattr(inst, "strerror", None):
237 if getattr(inst, "filename", None):
237 if getattr(inst, "filename", None):
238 ui.warn(_("abort: %s: %s\n") % (
238 ui.warn(_("abort: %s: %s\n") % (
239 encoding.strtolocal(inst.strerror),
239 encoding.strtolocal(inst.strerror),
240 util.forcebytestr(inst.filename)))
240 util.forcebytestr(inst.filename)))
241 else:
241 else:
242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
242 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
243 else:
243 else:
244 raise
244 raise
245 except OSError as inst:
245 except OSError as inst:
246 if getattr(inst, "filename", None) is not None:
246 if getattr(inst, "filename", None) is not None:
247 ui.warn(_("abort: %s: '%s'\n") % (
247 ui.warn(_("abort: %s: '%s'\n") % (
248 encoding.strtolocal(inst.strerror),
248 encoding.strtolocal(inst.strerror),
249 util.forcebytestr(inst.filename)))
249 util.forcebytestr(inst.filename)))
250 else:
250 else:
251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
251 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
252 except MemoryError:
252 except MemoryError:
253 ui.warn(_("abort: out of memory\n"))
253 ui.warn(_("abort: out of memory\n"))
254 except SystemExit as inst:
254 except SystemExit as inst:
255 # Commands shouldn't sys.exit directly, but give a return code.
255 # Commands shouldn't sys.exit directly, but give a return code.
256 # Just in case catch this and and pass exit code to caller.
256 # Just in case catch this and and pass exit code to caller.
257 return inst.code
257 return inst.code
258 except socket.error as inst:
258 except socket.error as inst:
259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
259 ui.warn(_("abort: %s\n") % util.forcebytestr(inst.args[-1]))
260
260
261 return -1
261 return -1
262
262
263 def checknewlabel(repo, lbl, kind):
263 def checknewlabel(repo, lbl, kind):
264 # Do not use the "kind" parameter in ui output.
264 # Do not use the "kind" parameter in ui output.
265 # It makes strings difficult to translate.
265 # It makes strings difficult to translate.
266 if lbl in ['tip', '.', 'null']:
266 if lbl in ['tip', '.', 'null']:
267 raise error.Abort(_("the name '%s' is reserved") % lbl)
267 raise error.Abort(_("the name '%s' is reserved") % lbl)
268 for c in (':', '\0', '\n', '\r'):
268 for c in (':', '\0', '\n', '\r'):
269 if c in lbl:
269 if c in lbl:
270 raise error.Abort(
270 raise error.Abort(
271 _("%r cannot be used in a name") % pycompat.bytestr(c))
271 _("%r cannot be used in a name") % pycompat.bytestr(c))
272 try:
272 try:
273 int(lbl)
273 int(lbl)
274 raise error.Abort(_("cannot use an integer as a name"))
274 raise error.Abort(_("cannot use an integer as a name"))
275 except ValueError:
275 except ValueError:
276 pass
276 pass
277 if lbl.strip() != lbl:
277 if lbl.strip() != lbl:
278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
278 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
279
279
280 def checkfilename(f):
280 def checkfilename(f):
281 '''Check that the filename f is an acceptable filename for a tracked file'''
281 '''Check that the filename f is an acceptable filename for a tracked file'''
282 if '\r' in f or '\n' in f:
282 if '\r' in f or '\n' in f:
283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
283 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
284
284
285 def checkportable(ui, f):
285 def checkportable(ui, f):
286 '''Check if filename f is portable and warn or abort depending on config'''
286 '''Check if filename f is portable and warn or abort depending on config'''
287 checkfilename(f)
287 checkfilename(f)
288 abort, warn = checkportabilityalert(ui)
288 abort, warn = checkportabilityalert(ui)
289 if abort or warn:
289 if abort or warn:
290 msg = util.checkwinfilename(f)
290 msg = util.checkwinfilename(f)
291 if msg:
291 if msg:
292 msg = "%s: %s" % (msg, util.shellquote(f))
292 msg = "%s: %s" % (msg, util.shellquote(f))
293 if abort:
293 if abort:
294 raise error.Abort(msg)
294 raise error.Abort(msg)
295 ui.warn(_("warning: %s\n") % msg)
295 ui.warn(_("warning: %s\n") % msg)
296
296
297 def checkportabilityalert(ui):
297 def checkportabilityalert(ui):
298 '''check if the user's config requests nothing, a warning, or abort for
298 '''check if the user's config requests nothing, a warning, or abort for
299 non-portable filenames'''
299 non-portable filenames'''
300 val = ui.config('ui', 'portablefilenames')
300 val = ui.config('ui', 'portablefilenames')
301 lval = val.lower()
301 lval = val.lower()
302 bval = util.parsebool(val)
302 bval = util.parsebool(val)
303 abort = pycompat.iswindows or lval == 'abort'
303 abort = pycompat.iswindows or lval == 'abort'
304 warn = bval or lval == 'warn'
304 warn = bval or lval == 'warn'
305 if bval is None and not (warn or abort or lval == 'ignore'):
305 if bval is None and not (warn or abort or lval == 'ignore'):
306 raise error.ConfigError(
306 raise error.ConfigError(
307 _("ui.portablefilenames value is invalid ('%s')") % val)
307 _("ui.portablefilenames value is invalid ('%s')") % val)
308 return abort, warn
308 return abort, warn
309
309
310 class casecollisionauditor(object):
310 class casecollisionauditor(object):
311 def __init__(self, ui, abort, dirstate):
311 def __init__(self, ui, abort, dirstate):
312 self._ui = ui
312 self._ui = ui
313 self._abort = abort
313 self._abort = abort
314 allfiles = '\0'.join(dirstate._map)
314 allfiles = '\0'.join(dirstate._map)
315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
315 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
316 self._dirstate = dirstate
316 self._dirstate = dirstate
317 # The purpose of _newfiles is so that we don't complain about
317 # The purpose of _newfiles is so that we don't complain about
318 # case collisions if someone were to call this object with the
318 # case collisions if someone were to call this object with the
319 # same filename twice.
319 # same filename twice.
320 self._newfiles = set()
320 self._newfiles = set()
321
321
322 def __call__(self, f):
322 def __call__(self, f):
323 if f in self._newfiles:
323 if f in self._newfiles:
324 return
324 return
325 fl = encoding.lower(f)
325 fl = encoding.lower(f)
326 if fl in self._loweredfiles and f not in self._dirstate:
326 if fl in self._loweredfiles and f not in self._dirstate:
327 msg = _('possible case-folding collision for %s') % f
327 msg = _('possible case-folding collision for %s') % f
328 if self._abort:
328 if self._abort:
329 raise error.Abort(msg)
329 raise error.Abort(msg)
330 self._ui.warn(_("warning: %s\n") % msg)
330 self._ui.warn(_("warning: %s\n") % msg)
331 self._loweredfiles.add(fl)
331 self._loweredfiles.add(fl)
332 self._newfiles.add(f)
332 self._newfiles.add(f)
333
333
334 def filteredhash(repo, maxrev):
334 def filteredhash(repo, maxrev):
335 """build hash of filtered revisions in the current repoview.
335 """build hash of filtered revisions in the current repoview.
336
336
337 Multiple caches perform up-to-date validation by checking that the
337 Multiple caches perform up-to-date validation by checking that the
338 tiprev and tipnode stored in the cache file match the current repository.
338 tiprev and tipnode stored in the cache file match the current repository.
339 However, this is not sufficient for validating repoviews because the set
339 However, this is not sufficient for validating repoviews because the set
340 of revisions in the view may change without the repository tiprev and
340 of revisions in the view may change without the repository tiprev and
341 tipnode changing.
341 tipnode changing.
342
342
343 This function hashes all the revs filtered from the view and returns
343 This function hashes all the revs filtered from the view and returns
344 that SHA-1 digest.
344 that SHA-1 digest.
345 """
345 """
346 cl = repo.changelog
346 cl = repo.changelog
347 if not cl.filteredrevs:
347 if not cl.filteredrevs:
348 return None
348 return None
349 key = None
349 key = None
350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
350 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
351 if revs:
351 if revs:
352 s = hashlib.sha1()
352 s = hashlib.sha1()
353 for rev in revs:
353 for rev in revs:
354 s.update('%d;' % rev)
354 s.update('%d;' % rev)
355 key = s.digest()
355 key = s.digest()
356 return key
356 return key
357
357
358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
359 '''yield every hg repository under path, always recursively.
359 '''yield every hg repository under path, always recursively.
360 The recurse flag will only control recursion into repo working dirs'''
360 The recurse flag will only control recursion into repo working dirs'''
361 def errhandler(err):
361 def errhandler(err):
362 if err.filename == path:
362 if err.filename == path:
363 raise err
363 raise err
364 samestat = getattr(os.path, 'samestat', None)
364 samestat = getattr(os.path, 'samestat', None)
365 if followsym and samestat is not None:
365 if followsym and samestat is not None:
366 def adddir(dirlst, dirname):
366 def adddir(dirlst, dirname):
367 dirstat = os.stat(dirname)
367 dirstat = os.stat(dirname)
368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
368 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
369 if not match:
369 if not match:
370 dirlst.append(dirstat)
370 dirlst.append(dirstat)
371 return not match
371 return not match
372 else:
372 else:
373 followsym = False
373 followsym = False
374
374
375 if (seen_dirs is None) and followsym:
375 if (seen_dirs is None) and followsym:
376 seen_dirs = []
376 seen_dirs = []
377 adddir(seen_dirs, path)
377 adddir(seen_dirs, path)
378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
378 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
379 dirs.sort()
379 dirs.sort()
380 if '.hg' in dirs:
380 if '.hg' in dirs:
381 yield root # found a repository
381 yield root # found a repository
382 qroot = os.path.join(root, '.hg', 'patches')
382 qroot = os.path.join(root, '.hg', 'patches')
383 if os.path.isdir(os.path.join(qroot, '.hg')):
383 if os.path.isdir(os.path.join(qroot, '.hg')):
384 yield qroot # we have a patch queue repo here
384 yield qroot # we have a patch queue repo here
385 if recurse:
385 if recurse:
386 # avoid recursing inside the .hg directory
386 # avoid recursing inside the .hg directory
387 dirs.remove('.hg')
387 dirs.remove('.hg')
388 else:
388 else:
389 dirs[:] = [] # don't descend further
389 dirs[:] = [] # don't descend further
390 elif followsym:
390 elif followsym:
391 newdirs = []
391 newdirs = []
392 for d in dirs:
392 for d in dirs:
393 fname = os.path.join(root, d)
393 fname = os.path.join(root, d)
394 if adddir(seen_dirs, fname):
394 if adddir(seen_dirs, fname):
395 if os.path.islink(fname):
395 if os.path.islink(fname):
396 for hgname in walkrepos(fname, True, seen_dirs):
396 for hgname in walkrepos(fname, True, seen_dirs):
397 yield hgname
397 yield hgname
398 else:
398 else:
399 newdirs.append(d)
399 newdirs.append(d)
400 dirs[:] = newdirs
400 dirs[:] = newdirs
401
401
402 def binnode(ctx):
402 def binnode(ctx):
403 """Return binary node id for a given basectx"""
403 """Return binary node id for a given basectx"""
404 node = ctx.node()
404 node = ctx.node()
405 if node is None:
405 if node is None:
406 return wdirid
406 return wdirid
407 return node
407 return node
408
408
409 def intrev(ctx):
409 def intrev(ctx):
410 """Return integer for a given basectx that can be used in comparison or
410 """Return integer for a given basectx that can be used in comparison or
411 arithmetic operation"""
411 arithmetic operation"""
412 rev = ctx.rev()
412 rev = ctx.rev()
413 if rev is None:
413 if rev is None:
414 return wdirrev
414 return wdirrev
415 return rev
415 return rev
416
416
417 def formatchangeid(ctx):
417 def formatchangeid(ctx):
418 """Format changectx as '{rev}:{node|formatnode}', which is the default
418 """Format changectx as '{rev}:{node|formatnode}', which is the default
419 template provided by logcmdutil.changesettemplater"""
419 template provided by logcmdutil.changesettemplater"""
420 repo = ctx.repo()
420 repo = ctx.repo()
421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
421 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
422
422
423 def formatrevnode(ui, rev, node):
423 def formatrevnode(ui, rev, node):
424 """Format given revision and node depending on the current verbosity"""
424 """Format given revision and node depending on the current verbosity"""
425 if ui.debugflag:
425 if ui.debugflag:
426 hexfunc = hex
426 hexfunc = hex
427 else:
427 else:
428 hexfunc = short
428 hexfunc = short
429 return '%d:%s' % (rev, hexfunc(node))
429 return '%d:%s' % (rev, hexfunc(node))
430
430
431 def revsingle(repo, revspec, default='.', localalias=None):
431 def revsingle(repo, revspec, default='.', localalias=None):
432 if not revspec and revspec != 0:
432 if not revspec and revspec != 0:
433 return repo[default]
433 return repo[default]
434
434
435 l = revrange(repo, [revspec], localalias=localalias)
435 l = revrange(repo, [revspec], localalias=localalias)
436 if not l:
436 if not l:
437 raise error.Abort(_('empty revision set'))
437 raise error.Abort(_('empty revision set'))
438 return repo[l.last()]
438 return repo[l.last()]
439
439
440 def _pairspec(revspec):
440 def _pairspec(revspec):
441 tree = revsetlang.parse(revspec)
441 tree = revsetlang.parse(revspec)
442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
442 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
443
443
444 def revpair(repo, revs):
444 def revpair(repo, revs):
445 if not revs:
445 if not revs:
446 return repo.dirstate.p1(), None
446 return repo.dirstate.p1(), None
447
447
448 l = revrange(repo, revs)
448 l = revrange(repo, revs)
449
449
450 if not l:
450 if not l:
451 first = second = None
451 first = second = None
452 elif l.isascending():
452 elif l.isascending():
453 first = l.min()
453 first = l.min()
454 second = l.max()
454 second = l.max()
455 elif l.isdescending():
455 elif l.isdescending():
456 first = l.max()
456 first = l.max()
457 second = l.min()
457 second = l.min()
458 else:
458 else:
459 first = l.first()
459 first = l.first()
460 second = l.last()
460 second = l.last()
461
461
462 if first is None:
462 if first is None:
463 raise error.Abort(_('empty revision range'))
463 raise error.Abort(_('empty revision range'))
464 if (first == second and len(revs) >= 2
464 if (first == second and len(revs) >= 2
465 and not all(revrange(repo, [r]) for r in revs)):
465 and not all(revrange(repo, [r]) for r in revs)):
466 raise error.Abort(_('empty revision on one side of range'))
466 raise error.Abort(_('empty revision on one side of range'))
467
467
468 # if top-level is range expression, the result must always be a pair
468 # if top-level is range expression, the result must always be a pair
469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
469 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
470 return repo.lookup(first), None
470 return repo.lookup(first), None
471
471
472 return repo.lookup(first), repo.lookup(second)
472 return repo.lookup(first), repo.lookup(second)
473
473
474 def revrange(repo, specs, localalias=None):
474 def revrange(repo, specs, localalias=None):
475 """Execute 1 to many revsets and return the union.
475 """Execute 1 to many revsets and return the union.
476
476
477 This is the preferred mechanism for executing revsets using user-specified
477 This is the preferred mechanism for executing revsets using user-specified
478 config options, such as revset aliases.
478 config options, such as revset aliases.
479
479
480 The revsets specified by ``specs`` will be executed via a chained ``OR``
480 The revsets specified by ``specs`` will be executed via a chained ``OR``
481 expression. If ``specs`` is empty, an empty result is returned.
481 expression. If ``specs`` is empty, an empty result is returned.
482
482
483 ``specs`` can contain integers, in which case they are assumed to be
483 ``specs`` can contain integers, in which case they are assumed to be
484 revision numbers.
484 revision numbers.
485
485
486 It is assumed the revsets are already formatted. If you have arguments
486 It is assumed the revsets are already formatted. If you have arguments
487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
487 that need to be expanded in the revset, call ``revsetlang.formatspec()``
488 and pass the result as an element of ``specs``.
488 and pass the result as an element of ``specs``.
489
489
490 Specifying a single revset is allowed.
490 Specifying a single revset is allowed.
491
491
492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
492 Returns a ``revset.abstractsmartset`` which is a list-like interface over
493 integer revisions.
493 integer revisions.
494 """
494 """
495 allspecs = []
495 allspecs = []
496 for spec in specs:
496 for spec in specs:
497 if isinstance(spec, int):
497 if isinstance(spec, int):
498 spec = revsetlang.formatspec('rev(%d)', spec)
498 spec = revsetlang.formatspec('rev(%d)', spec)
499 allspecs.append(spec)
499 allspecs.append(spec)
500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
500 return repo.anyrevs(allspecs, user=True, localalias=localalias)
501
501
502 def meaningfulparents(repo, ctx):
502 def meaningfulparents(repo, ctx):
503 """Return list of meaningful (or all if debug) parentrevs for rev.
503 """Return list of meaningful (or all if debug) parentrevs for rev.
504
504
505 For merges (two non-nullrev revisions) both parents are meaningful.
505 For merges (two non-nullrev revisions) both parents are meaningful.
506 Otherwise the first parent revision is considered meaningful if it
506 Otherwise the first parent revision is considered meaningful if it
507 is not the preceding revision.
507 is not the preceding revision.
508 """
508 """
509 parents = ctx.parents()
509 parents = ctx.parents()
510 if len(parents) > 1:
510 if len(parents) > 1:
511 return parents
511 return parents
512 if repo.ui.debugflag:
512 if repo.ui.debugflag:
513 return [parents[0], repo['null']]
513 return [parents[0], repo['null']]
514 if parents[0].rev() >= intrev(ctx) - 1:
514 if parents[0].rev() >= intrev(ctx) - 1:
515 return []
515 return []
516 return parents
516 return parents
517
517
518 def expandpats(pats):
518 def expandpats(pats):
519 '''Expand bare globs when running on windows.
519 '''Expand bare globs when running on windows.
520 On posix we assume it already has already been done by sh.'''
520 On posix we assume it already has already been done by sh.'''
521 if not util.expandglobs:
521 if not util.expandglobs:
522 return list(pats)
522 return list(pats)
523 ret = []
523 ret = []
524 for kindpat in pats:
524 for kindpat in pats:
525 kind, pat = matchmod._patsplit(kindpat, None)
525 kind, pat = matchmod._patsplit(kindpat, None)
526 if kind is None:
526 if kind is None:
527 try:
527 try:
528 globbed = glob.glob(pat)
528 globbed = glob.glob(pat)
529 except re.error:
529 except re.error:
530 globbed = [pat]
530 globbed = [pat]
531 if globbed:
531 if globbed:
532 ret.extend(globbed)
532 ret.extend(globbed)
533 continue
533 continue
534 ret.append(kindpat)
534 ret.append(kindpat)
535 return ret
535 return ret
536
536
537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
538 badfn=None):
538 badfn=None):
539 '''Return a matcher and the patterns that were used.
539 '''Return a matcher and the patterns that were used.
540 The matcher will warn about bad matches, unless an alternate badfn callback
540 The matcher will warn about bad matches, unless an alternate badfn callback
541 is provided.'''
541 is provided.'''
542 if pats == ("",):
542 if pats == ("",):
543 pats = []
543 pats = []
544 if opts is None:
544 if opts is None:
545 opts = {}
545 opts = {}
546 if not globbed and default == 'relpath':
546 if not globbed and default == 'relpath':
547 pats = expandpats(pats or [])
547 pats = expandpats(pats or [])
548
548
549 def bad(f, msg):
549 def bad(f, msg):
550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
550 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
551
551
552 if badfn is None:
552 if badfn is None:
553 badfn = bad
553 badfn = bad
554
554
555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
555 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
556 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
557
557
558 if m.always():
558 if m.always():
559 pats = []
559 pats = []
560 return m, pats
560 return m, pats
561
561
562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
562 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
563 badfn=None):
563 badfn=None):
564 '''Return a matcher that will warn about bad matches.'''
564 '''Return a matcher that will warn about bad matches.'''
565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
565 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
566
566
567 def matchall(repo):
567 def matchall(repo):
568 '''Return a matcher that will efficiently match everything.'''
568 '''Return a matcher that will efficiently match everything.'''
569 return matchmod.always(repo.root, repo.getcwd())
569 return matchmod.always(repo.root, repo.getcwd())
570
570
571 def matchfiles(repo, files, badfn=None):
571 def matchfiles(repo, files, badfn=None):
572 '''Return a matcher that will efficiently match exactly these files.'''
572 '''Return a matcher that will efficiently match exactly these files.'''
573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
573 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
574
574
575 def parsefollowlinespattern(repo, rev, pat, msg):
575 def parsefollowlinespattern(repo, rev, pat, msg):
576 """Return a file name from `pat` pattern suitable for usage in followlines
576 """Return a file name from `pat` pattern suitable for usage in followlines
577 logic.
577 logic.
578 """
578 """
579 if not matchmod.patkind(pat):
579 if not matchmod.patkind(pat):
580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
580 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
581 else:
581 else:
582 ctx = repo[rev]
582 ctx = repo[rev]
583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
583 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
584 files = [f for f in ctx if m(f)]
584 files = [f for f in ctx if m(f)]
585 if len(files) != 1:
585 if len(files) != 1:
586 raise error.ParseError(msg)
586 raise error.ParseError(msg)
587 return files[0]
587 return files[0]
588
588
589 def origpath(ui, repo, filepath):
589 def origpath(ui, repo, filepath):
590 '''customize where .orig files are created
590 '''customize where .orig files are created
591
591
592 Fetch user defined path from config file: [ui] origbackuppath = <path>
592 Fetch user defined path from config file: [ui] origbackuppath = <path>
593 Fall back to default (filepath with .orig suffix) if not specified
593 Fall back to default (filepath with .orig suffix) if not specified
594 '''
594 '''
595 origbackuppath = ui.config('ui', 'origbackuppath')
595 origbackuppath = ui.config('ui', 'origbackuppath')
596 if not origbackuppath:
596 if not origbackuppath:
597 return filepath + ".orig"
597 return filepath + ".orig"
598
598
599 # Convert filepath from an absolute path into a path inside the repo.
599 # Convert filepath from an absolute path into a path inside the repo.
600 filepathfromroot = util.normpath(os.path.relpath(filepath,
600 filepathfromroot = util.normpath(os.path.relpath(filepath,
601 start=repo.root))
601 start=repo.root))
602
602
603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
603 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
604 origbackupdir = origvfs.dirname(filepathfromroot)
604 origbackupdir = origvfs.dirname(filepathfromroot)
605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
605 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
606 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
607
607
608 # Remove any files that conflict with the backup file's path
608 # Remove any files that conflict with the backup file's path
609 for f in reversed(list(util.finddirs(filepathfromroot))):
609 for f in reversed(list(util.finddirs(filepathfromroot))):
610 if origvfs.isfileorlink(f):
610 if origvfs.isfileorlink(f):
611 ui.note(_('removing conflicting file: %s\n')
611 ui.note(_('removing conflicting file: %s\n')
612 % origvfs.join(f))
612 % origvfs.join(f))
613 origvfs.unlink(f)
613 origvfs.unlink(f)
614 break
614 break
615
615
616 origvfs.makedirs(origbackupdir)
616 origvfs.makedirs(origbackupdir)
617
617
618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
618 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
619 ui.note(_('removing conflicting directory: %s\n')
619 ui.note(_('removing conflicting directory: %s\n')
620 % origvfs.join(filepathfromroot))
620 % origvfs.join(filepathfromroot))
621 origvfs.rmtree(filepathfromroot, forcibly=True)
621 origvfs.rmtree(filepathfromroot, forcibly=True)
622
622
623 return origvfs.join(filepathfromroot)
623 return origvfs.join(filepathfromroot)
624
624
625 class _containsnode(object):
625 class _containsnode(object):
626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
626 """proxy __contains__(node) to container.__contains__ which accepts revs"""
627
627
628 def __init__(self, repo, revcontainer):
628 def __init__(self, repo, revcontainer):
629 self._torev = repo.changelog.rev
629 self._torev = repo.changelog.rev
630 self._revcontains = revcontainer.__contains__
630 self._revcontains = revcontainer.__contains__
631
631
632 def __contains__(self, node):
632 def __contains__(self, node):
633 return self._revcontains(self._torev(node))
633 return self._revcontains(self._torev(node))
634
634
635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
635 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
636 """do common cleanups when old nodes are replaced by new nodes
636 """do common cleanups when old nodes are replaced by new nodes
637
637
638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
638 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
639 (we might also want to move working directory parent in the future)
639 (we might also want to move working directory parent in the future)
640
640
641 By default, bookmark moves are calculated automatically from 'replacements',
641 By default, bookmark moves are calculated automatically from 'replacements',
642 but 'moves' can be used to override that. Also, 'moves' may include
642 but 'moves' can be used to override that. Also, 'moves' may include
643 additional bookmark moves that should not have associated obsmarkers.
643 additional bookmark moves that should not have associated obsmarkers.
644
644
645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
645 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
646 have replacements. operation is a string, like "rebase".
646 have replacements. operation is a string, like "rebase".
647
647
648 metadata is dictionary containing metadata to be stored in obsmarker if
648 metadata is dictionary containing metadata to be stored in obsmarker if
649 obsolescence is enabled.
649 obsolescence is enabled.
650 """
650 """
651 if not replacements and not moves:
651 if not replacements and not moves:
652 return
652 return
653
653
654 # translate mapping's other forms
654 # translate mapping's other forms
655 if not util.safehasattr(replacements, 'items'):
655 if not util.safehasattr(replacements, 'items'):
656 replacements = {n: () for n in replacements}
656 replacements = {n: () for n in replacements}
657
657
658 # Calculate bookmark movements
658 # Calculate bookmark movements
659 if moves is None:
659 if moves is None:
660 moves = {}
660 moves = {}
661 # Unfiltered repo is needed since nodes in replacements might be hidden.
661 # Unfiltered repo is needed since nodes in replacements might be hidden.
662 unfi = repo.unfiltered()
662 unfi = repo.unfiltered()
663 for oldnode, newnodes in replacements.items():
663 for oldnode, newnodes in replacements.items():
664 if oldnode in moves:
664 if oldnode in moves:
665 continue
665 continue
666 if len(newnodes) > 1:
666 if len(newnodes) > 1:
667 # usually a split, take the one with biggest rev number
667 # usually a split, take the one with biggest rev number
668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
668 newnode = next(unfi.set('max(%ln)', newnodes)).node()
669 elif len(newnodes) == 0:
669 elif len(newnodes) == 0:
670 # move bookmark backwards
670 # move bookmark backwards
671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
671 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
672 list(replacements)))
672 list(replacements)))
673 if roots:
673 if roots:
674 newnode = roots[0].node()
674 newnode = roots[0].node()
675 else:
675 else:
676 newnode = nullid
676 newnode = nullid
677 else:
677 else:
678 newnode = newnodes[0]
678 newnode = newnodes[0]
679 moves[oldnode] = newnode
679 moves[oldnode] = newnode
680
680
681 with repo.transaction('cleanup') as tr:
681 with repo.transaction('cleanup') as tr:
682 # Move bookmarks
682 # Move bookmarks
683 bmarks = repo._bookmarks
683 bmarks = repo._bookmarks
684 bmarkchanges = []
684 bmarkchanges = []
685 allnewnodes = [n for ns in replacements.values() for n in ns]
685 allnewnodes = [n for ns in replacements.values() for n in ns]
686 for oldnode, newnode in moves.items():
686 for oldnode, newnode in moves.items():
687 oldbmarks = repo.nodebookmarks(oldnode)
687 oldbmarks = repo.nodebookmarks(oldnode)
688 if not oldbmarks:
688 if not oldbmarks:
689 continue
689 continue
690 from . import bookmarks # avoid import cycle
690 from . import bookmarks # avoid import cycle
691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
691 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
692 (oldbmarks, hex(oldnode), hex(newnode)))
692 (util.rapply(pycompat.maybebytestr, oldbmarks),
693 hex(oldnode), hex(newnode)))
693 # Delete divergent bookmarks being parents of related newnodes
694 # Delete divergent bookmarks being parents of related newnodes
694 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
695 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
695 allnewnodes, newnode, oldnode)
696 allnewnodes, newnode, oldnode)
696 deletenodes = _containsnode(repo, deleterevs)
697 deletenodes = _containsnode(repo, deleterevs)
697 for name in oldbmarks:
698 for name in oldbmarks:
698 bmarkchanges.append((name, newnode))
699 bmarkchanges.append((name, newnode))
699 for b in bookmarks.divergent2delete(repo, deletenodes, name):
700 for b in bookmarks.divergent2delete(repo, deletenodes, name):
700 bmarkchanges.append((b, None))
701 bmarkchanges.append((b, None))
701
702
702 if bmarkchanges:
703 if bmarkchanges:
703 bmarks.applychanges(repo, tr, bmarkchanges)
704 bmarks.applychanges(repo, tr, bmarkchanges)
704
705
705 # Obsolete or strip nodes
706 # Obsolete or strip nodes
706 if obsolete.isenabled(repo, obsolete.createmarkersopt):
707 if obsolete.isenabled(repo, obsolete.createmarkersopt):
707 # If a node is already obsoleted, and we want to obsolete it
708 # If a node is already obsoleted, and we want to obsolete it
708 # without a successor, skip that obssolete request since it's
709 # without a successor, skip that obssolete request since it's
709 # unnecessary. That's the "if s or not isobs(n)" check below.
710 # unnecessary. That's the "if s or not isobs(n)" check below.
710 # Also sort the node in topology order, that might be useful for
711 # Also sort the node in topology order, that might be useful for
711 # some obsstore logic.
712 # some obsstore logic.
712 # NOTE: the filtering and sorting might belong to createmarkers.
713 # NOTE: the filtering and sorting might belong to createmarkers.
713 isobs = unfi.obsstore.successors.__contains__
714 isobs = unfi.obsstore.successors.__contains__
714 torev = unfi.changelog.rev
715 torev = unfi.changelog.rev
715 sortfunc = lambda ns: torev(ns[0])
716 sortfunc = lambda ns: torev(ns[0])
716 rels = [(unfi[n], tuple(unfi[m] for m in s))
717 rels = [(unfi[n], tuple(unfi[m] for m in s))
717 for n, s in sorted(replacements.items(), key=sortfunc)
718 for n, s in sorted(replacements.items(), key=sortfunc)
718 if s or not isobs(n)]
719 if s or not isobs(n)]
719 if rels:
720 if rels:
720 obsolete.createmarkers(repo, rels, operation=operation,
721 obsolete.createmarkers(repo, rels, operation=operation,
721 metadata=metadata)
722 metadata=metadata)
722 else:
723 else:
723 from . import repair # avoid import cycle
724 from . import repair # avoid import cycle
724 tostrip = list(replacements)
725 tostrip = list(replacements)
725 if tostrip:
726 if tostrip:
726 repair.delayedstrip(repo.ui, repo, tostrip, operation)
727 repair.delayedstrip(repo.ui, repo, tostrip, operation)
727
728
728 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
729 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
729 if opts is None:
730 if opts is None:
730 opts = {}
731 opts = {}
731 m = matcher
732 m = matcher
732 if dry_run is None:
733 if dry_run is None:
733 dry_run = opts.get('dry_run')
734 dry_run = opts.get('dry_run')
734 if similarity is None:
735 if similarity is None:
735 similarity = float(opts.get('similarity') or 0)
736 similarity = float(opts.get('similarity') or 0)
736
737
737 ret = 0
738 ret = 0
738 join = lambda f: os.path.join(prefix, f)
739 join = lambda f: os.path.join(prefix, f)
739
740
740 wctx = repo[None]
741 wctx = repo[None]
741 for subpath in sorted(wctx.substate):
742 for subpath in sorted(wctx.substate):
742 submatch = matchmod.subdirmatcher(subpath, m)
743 submatch = matchmod.subdirmatcher(subpath, m)
743 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
744 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
744 sub = wctx.sub(subpath)
745 sub = wctx.sub(subpath)
745 try:
746 try:
746 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
747 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
747 ret = 1
748 ret = 1
748 except error.LookupError:
749 except error.LookupError:
749 repo.ui.status(_("skipping missing subrepository: %s\n")
750 repo.ui.status(_("skipping missing subrepository: %s\n")
750 % join(subpath))
751 % join(subpath))
751
752
752 rejected = []
753 rejected = []
753 def badfn(f, msg):
754 def badfn(f, msg):
754 if f in m.files():
755 if f in m.files():
755 m.bad(f, msg)
756 m.bad(f, msg)
756 rejected.append(f)
757 rejected.append(f)
757
758
758 badmatch = matchmod.badmatch(m, badfn)
759 badmatch = matchmod.badmatch(m, badfn)
759 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
760 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
760 badmatch)
761 badmatch)
761
762
762 unknownset = set(unknown + forgotten)
763 unknownset = set(unknown + forgotten)
763 toprint = unknownset.copy()
764 toprint = unknownset.copy()
764 toprint.update(deleted)
765 toprint.update(deleted)
765 for abs in sorted(toprint):
766 for abs in sorted(toprint):
766 if repo.ui.verbose or not m.exact(abs):
767 if repo.ui.verbose or not m.exact(abs):
767 if abs in unknownset:
768 if abs in unknownset:
768 status = _('adding %s\n') % m.uipath(abs)
769 status = _('adding %s\n') % m.uipath(abs)
769 else:
770 else:
770 status = _('removing %s\n') % m.uipath(abs)
771 status = _('removing %s\n') % m.uipath(abs)
771 repo.ui.status(status)
772 repo.ui.status(status)
772
773
773 renames = _findrenames(repo, m, added + unknown, removed + deleted,
774 renames = _findrenames(repo, m, added + unknown, removed + deleted,
774 similarity)
775 similarity)
775
776
776 if not dry_run:
777 if not dry_run:
777 _markchanges(repo, unknown + forgotten, deleted, renames)
778 _markchanges(repo, unknown + forgotten, deleted, renames)
778
779
779 for f in rejected:
780 for f in rejected:
780 if f in m.files():
781 if f in m.files():
781 return 1
782 return 1
782 return ret
783 return ret
783
784
784 def marktouched(repo, files, similarity=0.0):
785 def marktouched(repo, files, similarity=0.0):
785 '''Assert that files have somehow been operated upon. files are relative to
786 '''Assert that files have somehow been operated upon. files are relative to
786 the repo root.'''
787 the repo root.'''
787 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
788 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
788 rejected = []
789 rejected = []
789
790
790 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
791 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
791
792
792 if repo.ui.verbose:
793 if repo.ui.verbose:
793 unknownset = set(unknown + forgotten)
794 unknownset = set(unknown + forgotten)
794 toprint = unknownset.copy()
795 toprint = unknownset.copy()
795 toprint.update(deleted)
796 toprint.update(deleted)
796 for abs in sorted(toprint):
797 for abs in sorted(toprint):
797 if abs in unknownset:
798 if abs in unknownset:
798 status = _('adding %s\n') % abs
799 status = _('adding %s\n') % abs
799 else:
800 else:
800 status = _('removing %s\n') % abs
801 status = _('removing %s\n') % abs
801 repo.ui.status(status)
802 repo.ui.status(status)
802
803
803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
804 renames = _findrenames(repo, m, added + unknown, removed + deleted,
804 similarity)
805 similarity)
805
806
806 _markchanges(repo, unknown + forgotten, deleted, renames)
807 _markchanges(repo, unknown + forgotten, deleted, renames)
807
808
808 for f in rejected:
809 for f in rejected:
809 if f in m.files():
810 if f in m.files():
810 return 1
811 return 1
811 return 0
812 return 0
812
813
813 def _interestingfiles(repo, matcher):
814 def _interestingfiles(repo, matcher):
814 '''Walk dirstate with matcher, looking for files that addremove would care
815 '''Walk dirstate with matcher, looking for files that addremove would care
815 about.
816 about.
816
817
817 This is different from dirstate.status because it doesn't care about
818 This is different from dirstate.status because it doesn't care about
818 whether files are modified or clean.'''
819 whether files are modified or clean.'''
819 added, unknown, deleted, removed, forgotten = [], [], [], [], []
820 added, unknown, deleted, removed, forgotten = [], [], [], [], []
820 audit_path = pathutil.pathauditor(repo.root, cached=True)
821 audit_path = pathutil.pathauditor(repo.root, cached=True)
821
822
822 ctx = repo[None]
823 ctx = repo[None]
823 dirstate = repo.dirstate
824 dirstate = repo.dirstate
824 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
825 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
825 unknown=True, ignored=False, full=False)
826 unknown=True, ignored=False, full=False)
826 for abs, st in walkresults.iteritems():
827 for abs, st in walkresults.iteritems():
827 dstate = dirstate[abs]
828 dstate = dirstate[abs]
828 if dstate == '?' and audit_path.check(abs):
829 if dstate == '?' and audit_path.check(abs):
829 unknown.append(abs)
830 unknown.append(abs)
830 elif dstate != 'r' and not st:
831 elif dstate != 'r' and not st:
831 deleted.append(abs)
832 deleted.append(abs)
832 elif dstate == 'r' and st:
833 elif dstate == 'r' and st:
833 forgotten.append(abs)
834 forgotten.append(abs)
834 # for finding renames
835 # for finding renames
835 elif dstate == 'r' and not st:
836 elif dstate == 'r' and not st:
836 removed.append(abs)
837 removed.append(abs)
837 elif dstate == 'a':
838 elif dstate == 'a':
838 added.append(abs)
839 added.append(abs)
839
840
840 return added, unknown, deleted, removed, forgotten
841 return added, unknown, deleted, removed, forgotten
841
842
842 def _findrenames(repo, matcher, added, removed, similarity):
843 def _findrenames(repo, matcher, added, removed, similarity):
843 '''Find renames from removed files to added ones.'''
844 '''Find renames from removed files to added ones.'''
844 renames = {}
845 renames = {}
845 if similarity > 0:
846 if similarity > 0:
846 for old, new, score in similar.findrenames(repo, added, removed,
847 for old, new, score in similar.findrenames(repo, added, removed,
847 similarity):
848 similarity):
848 if (repo.ui.verbose or not matcher.exact(old)
849 if (repo.ui.verbose or not matcher.exact(old)
849 or not matcher.exact(new)):
850 or not matcher.exact(new)):
850 repo.ui.status(_('recording removal of %s as rename to %s '
851 repo.ui.status(_('recording removal of %s as rename to %s '
851 '(%d%% similar)\n') %
852 '(%d%% similar)\n') %
852 (matcher.rel(old), matcher.rel(new),
853 (matcher.rel(old), matcher.rel(new),
853 score * 100))
854 score * 100))
854 renames[new] = old
855 renames[new] = old
855 return renames
856 return renames
856
857
857 def _markchanges(repo, unknown, deleted, renames):
858 def _markchanges(repo, unknown, deleted, renames):
858 '''Marks the files in unknown as added, the files in deleted as removed,
859 '''Marks the files in unknown as added, the files in deleted as removed,
859 and the files in renames as copied.'''
860 and the files in renames as copied.'''
860 wctx = repo[None]
861 wctx = repo[None]
861 with repo.wlock():
862 with repo.wlock():
862 wctx.forget(deleted)
863 wctx.forget(deleted)
863 wctx.add(unknown)
864 wctx.add(unknown)
864 for new, old in renames.iteritems():
865 for new, old in renames.iteritems():
865 wctx.copy(old, new)
866 wctx.copy(old, new)
866
867
867 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
868 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
868 """Update the dirstate to reflect the intent of copying src to dst. For
869 """Update the dirstate to reflect the intent of copying src to dst. For
869 different reasons it might not end with dst being marked as copied from src.
870 different reasons it might not end with dst being marked as copied from src.
870 """
871 """
871 origsrc = repo.dirstate.copied(src) or src
872 origsrc = repo.dirstate.copied(src) or src
872 if dst == origsrc: # copying back a copy?
873 if dst == origsrc: # copying back a copy?
873 if repo.dirstate[dst] not in 'mn' and not dryrun:
874 if repo.dirstate[dst] not in 'mn' and not dryrun:
874 repo.dirstate.normallookup(dst)
875 repo.dirstate.normallookup(dst)
875 else:
876 else:
876 if repo.dirstate[origsrc] == 'a' and origsrc == src:
877 if repo.dirstate[origsrc] == 'a' and origsrc == src:
877 if not ui.quiet:
878 if not ui.quiet:
878 ui.warn(_("%s has not been committed yet, so no copy "
879 ui.warn(_("%s has not been committed yet, so no copy "
879 "data will be stored for %s.\n")
880 "data will be stored for %s.\n")
880 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
881 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
881 if repo.dirstate[dst] in '?r' and not dryrun:
882 if repo.dirstate[dst] in '?r' and not dryrun:
882 wctx.add([dst])
883 wctx.add([dst])
883 elif not dryrun:
884 elif not dryrun:
884 wctx.copy(origsrc, dst)
885 wctx.copy(origsrc, dst)
885
886
886 def readrequires(opener, supported):
887 def readrequires(opener, supported):
887 '''Reads and parses .hg/requires and checks if all entries found
888 '''Reads and parses .hg/requires and checks if all entries found
888 are in the list of supported features.'''
889 are in the list of supported features.'''
889 requirements = set(opener.read("requires").splitlines())
890 requirements = set(opener.read("requires").splitlines())
890 missings = []
891 missings = []
891 for r in requirements:
892 for r in requirements:
892 if r not in supported:
893 if r not in supported:
893 if not r or not r[0:1].isalnum():
894 if not r or not r[0:1].isalnum():
894 raise error.RequirementError(_(".hg/requires file is corrupt"))
895 raise error.RequirementError(_(".hg/requires file is corrupt"))
895 missings.append(r)
896 missings.append(r)
896 missings.sort()
897 missings.sort()
897 if missings:
898 if missings:
898 raise error.RequirementError(
899 raise error.RequirementError(
899 _("repository requires features unknown to this Mercurial: %s")
900 _("repository requires features unknown to this Mercurial: %s")
900 % " ".join(missings),
901 % " ".join(missings),
901 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
902 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
902 " for more information"))
903 " for more information"))
903 return requirements
904 return requirements
904
905
905 def writerequires(opener, requirements):
906 def writerequires(opener, requirements):
906 with opener('requires', 'w') as fp:
907 with opener('requires', 'w') as fp:
907 for r in sorted(requirements):
908 for r in sorted(requirements):
908 fp.write("%s\n" % r)
909 fp.write("%s\n" % r)
909
910
910 class filecachesubentry(object):
911 class filecachesubentry(object):
911 def __init__(self, path, stat):
912 def __init__(self, path, stat):
912 self.path = path
913 self.path = path
913 self.cachestat = None
914 self.cachestat = None
914 self._cacheable = None
915 self._cacheable = None
915
916
916 if stat:
917 if stat:
917 self.cachestat = filecachesubentry.stat(self.path)
918 self.cachestat = filecachesubentry.stat(self.path)
918
919
919 if self.cachestat:
920 if self.cachestat:
920 self._cacheable = self.cachestat.cacheable()
921 self._cacheable = self.cachestat.cacheable()
921 else:
922 else:
922 # None means we don't know yet
923 # None means we don't know yet
923 self._cacheable = None
924 self._cacheable = None
924
925
925 def refresh(self):
926 def refresh(self):
926 if self.cacheable():
927 if self.cacheable():
927 self.cachestat = filecachesubentry.stat(self.path)
928 self.cachestat = filecachesubentry.stat(self.path)
928
929
929 def cacheable(self):
930 def cacheable(self):
930 if self._cacheable is not None:
931 if self._cacheable is not None:
931 return self._cacheable
932 return self._cacheable
932
933
933 # we don't know yet, assume it is for now
934 # we don't know yet, assume it is for now
934 return True
935 return True
935
936
936 def changed(self):
937 def changed(self):
937 # no point in going further if we can't cache it
938 # no point in going further if we can't cache it
938 if not self.cacheable():
939 if not self.cacheable():
939 return True
940 return True
940
941
941 newstat = filecachesubentry.stat(self.path)
942 newstat = filecachesubentry.stat(self.path)
942
943
943 # we may not know if it's cacheable yet, check again now
944 # we may not know if it's cacheable yet, check again now
944 if newstat and self._cacheable is None:
945 if newstat and self._cacheable is None:
945 self._cacheable = newstat.cacheable()
946 self._cacheable = newstat.cacheable()
946
947
947 # check again
948 # check again
948 if not self._cacheable:
949 if not self._cacheable:
949 return True
950 return True
950
951
951 if self.cachestat != newstat:
952 if self.cachestat != newstat:
952 self.cachestat = newstat
953 self.cachestat = newstat
953 return True
954 return True
954 else:
955 else:
955 return False
956 return False
956
957
957 @staticmethod
958 @staticmethod
958 def stat(path):
959 def stat(path):
959 try:
960 try:
960 return util.cachestat(path)
961 return util.cachestat(path)
961 except OSError as e:
962 except OSError as e:
962 if e.errno != errno.ENOENT:
963 if e.errno != errno.ENOENT:
963 raise
964 raise
964
965
965 class filecacheentry(object):
966 class filecacheentry(object):
966 def __init__(self, paths, stat=True):
967 def __init__(self, paths, stat=True):
967 self._entries = []
968 self._entries = []
968 for path in paths:
969 for path in paths:
969 self._entries.append(filecachesubentry(path, stat))
970 self._entries.append(filecachesubentry(path, stat))
970
971
971 def changed(self):
972 def changed(self):
972 '''true if any entry has changed'''
973 '''true if any entry has changed'''
973 for entry in self._entries:
974 for entry in self._entries:
974 if entry.changed():
975 if entry.changed():
975 return True
976 return True
976 return False
977 return False
977
978
978 def refresh(self):
979 def refresh(self):
979 for entry in self._entries:
980 for entry in self._entries:
980 entry.refresh()
981 entry.refresh()
981
982
982 class filecache(object):
983 class filecache(object):
983 '''A property like decorator that tracks files under .hg/ for updates.
984 '''A property like decorator that tracks files under .hg/ for updates.
984
985
985 Records stat info when called in _filecache.
986 Records stat info when called in _filecache.
986
987
987 On subsequent calls, compares old stat info with new info, and recreates the
988 On subsequent calls, compares old stat info with new info, and recreates the
988 object when any of the files changes, updating the new stat info in
989 object when any of the files changes, updating the new stat info in
989 _filecache.
990 _filecache.
990
991
991 Mercurial either atomic renames or appends for files under .hg,
992 Mercurial either atomic renames or appends for files under .hg,
992 so to ensure the cache is reliable we need the filesystem to be able
993 so to ensure the cache is reliable we need the filesystem to be able
993 to tell us if a file has been replaced. If it can't, we fallback to
994 to tell us if a file has been replaced. If it can't, we fallback to
994 recreating the object on every call (essentially the same behavior as
995 recreating the object on every call (essentially the same behavior as
995 propertycache).
996 propertycache).
996
997
997 '''
998 '''
998 def __init__(self, *paths):
999 def __init__(self, *paths):
999 self.paths = paths
1000 self.paths = paths
1000
1001
1001 def join(self, obj, fname):
1002 def join(self, obj, fname):
1002 """Used to compute the runtime path of a cached file.
1003 """Used to compute the runtime path of a cached file.
1003
1004
1004 Users should subclass filecache and provide their own version of this
1005 Users should subclass filecache and provide their own version of this
1005 function to call the appropriate join function on 'obj' (an instance
1006 function to call the appropriate join function on 'obj' (an instance
1006 of the class that its member function was decorated).
1007 of the class that its member function was decorated).
1007 """
1008 """
1008 raise NotImplementedError
1009 raise NotImplementedError
1009
1010
1010 def __call__(self, func):
1011 def __call__(self, func):
1011 self.func = func
1012 self.func = func
1012 self.name = func.__name__.encode('ascii')
1013 self.name = func.__name__.encode('ascii')
1013 return self
1014 return self
1014
1015
1015 def __get__(self, obj, type=None):
1016 def __get__(self, obj, type=None):
1016 # if accessed on the class, return the descriptor itself.
1017 # if accessed on the class, return the descriptor itself.
1017 if obj is None:
1018 if obj is None:
1018 return self
1019 return self
1019 # do we need to check if the file changed?
1020 # do we need to check if the file changed?
1020 if self.name in obj.__dict__:
1021 if self.name in obj.__dict__:
1021 assert self.name in obj._filecache, self.name
1022 assert self.name in obj._filecache, self.name
1022 return obj.__dict__[self.name]
1023 return obj.__dict__[self.name]
1023
1024
1024 entry = obj._filecache.get(self.name)
1025 entry = obj._filecache.get(self.name)
1025
1026
1026 if entry:
1027 if entry:
1027 if entry.changed():
1028 if entry.changed():
1028 entry.obj = self.func(obj)
1029 entry.obj = self.func(obj)
1029 else:
1030 else:
1030 paths = [self.join(obj, path) for path in self.paths]
1031 paths = [self.join(obj, path) for path in self.paths]
1031
1032
1032 # We stat -before- creating the object so our cache doesn't lie if
1033 # We stat -before- creating the object so our cache doesn't lie if
1033 # a writer modified between the time we read and stat
1034 # a writer modified between the time we read and stat
1034 entry = filecacheentry(paths, True)
1035 entry = filecacheentry(paths, True)
1035 entry.obj = self.func(obj)
1036 entry.obj = self.func(obj)
1036
1037
1037 obj._filecache[self.name] = entry
1038 obj._filecache[self.name] = entry
1038
1039
1039 obj.__dict__[self.name] = entry.obj
1040 obj.__dict__[self.name] = entry.obj
1040 return entry.obj
1041 return entry.obj
1041
1042
1042 def __set__(self, obj, value):
1043 def __set__(self, obj, value):
1043 if self.name not in obj._filecache:
1044 if self.name not in obj._filecache:
1044 # we add an entry for the missing value because X in __dict__
1045 # we add an entry for the missing value because X in __dict__
1045 # implies X in _filecache
1046 # implies X in _filecache
1046 paths = [self.join(obj, path) for path in self.paths]
1047 paths = [self.join(obj, path) for path in self.paths]
1047 ce = filecacheentry(paths, False)
1048 ce = filecacheentry(paths, False)
1048 obj._filecache[self.name] = ce
1049 obj._filecache[self.name] = ce
1049 else:
1050 else:
1050 ce = obj._filecache[self.name]
1051 ce = obj._filecache[self.name]
1051
1052
1052 ce.obj = value # update cached copy
1053 ce.obj = value # update cached copy
1053 obj.__dict__[self.name] = value # update copy returned by obj.x
1054 obj.__dict__[self.name] = value # update copy returned by obj.x
1054
1055
1055 def __delete__(self, obj):
1056 def __delete__(self, obj):
1056 try:
1057 try:
1057 del obj.__dict__[self.name]
1058 del obj.__dict__[self.name]
1058 except KeyError:
1059 except KeyError:
1059 raise AttributeError(self.name)
1060 raise AttributeError(self.name)
1060
1061
1061 def extdatasource(repo, source):
1062 def extdatasource(repo, source):
1062 """Gather a map of rev -> value dict from the specified source
1063 """Gather a map of rev -> value dict from the specified source
1063
1064
1064 A source spec is treated as a URL, with a special case shell: type
1065 A source spec is treated as a URL, with a special case shell: type
1065 for parsing the output from a shell command.
1066 for parsing the output from a shell command.
1066
1067
1067 The data is parsed as a series of newline-separated records where
1068 The data is parsed as a series of newline-separated records where
1068 each record is a revision specifier optionally followed by a space
1069 each record is a revision specifier optionally followed by a space
1069 and a freeform string value. If the revision is known locally, it
1070 and a freeform string value. If the revision is known locally, it
1070 is converted to a rev, otherwise the record is skipped.
1071 is converted to a rev, otherwise the record is skipped.
1071
1072
1072 Note that both key and value are treated as UTF-8 and converted to
1073 Note that both key and value are treated as UTF-8 and converted to
1073 the local encoding. This allows uniformity between local and
1074 the local encoding. This allows uniformity between local and
1074 remote data sources.
1075 remote data sources.
1075 """
1076 """
1076
1077
1077 spec = repo.ui.config("extdata", source)
1078 spec = repo.ui.config("extdata", source)
1078 if not spec:
1079 if not spec:
1079 raise error.Abort(_("unknown extdata source '%s'") % source)
1080 raise error.Abort(_("unknown extdata source '%s'") % source)
1080
1081
1081 data = {}
1082 data = {}
1082 src = proc = None
1083 src = proc = None
1083 try:
1084 try:
1084 if spec.startswith("shell:"):
1085 if spec.startswith("shell:"):
1085 # external commands should be run relative to the repo root
1086 # external commands should be run relative to the repo root
1086 cmd = spec[6:]
1087 cmd = spec[6:]
1087 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1088 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1088 close_fds=util.closefds,
1089 close_fds=util.closefds,
1089 stdout=subprocess.PIPE, cwd=repo.root)
1090 stdout=subprocess.PIPE, cwd=repo.root)
1090 src = proc.stdout
1091 src = proc.stdout
1091 else:
1092 else:
1092 # treat as a URL or file
1093 # treat as a URL or file
1093 src = url.open(repo.ui, spec)
1094 src = url.open(repo.ui, spec)
1094 for l in src:
1095 for l in src:
1095 if " " in l:
1096 if " " in l:
1096 k, v = l.strip().split(" ", 1)
1097 k, v = l.strip().split(" ", 1)
1097 else:
1098 else:
1098 k, v = l.strip(), ""
1099 k, v = l.strip(), ""
1099
1100
1100 k = encoding.tolocal(k)
1101 k = encoding.tolocal(k)
1101 try:
1102 try:
1102 data[repo[k].rev()] = encoding.tolocal(v)
1103 data[repo[k].rev()] = encoding.tolocal(v)
1103 except (error.LookupError, error.RepoLookupError):
1104 except (error.LookupError, error.RepoLookupError):
1104 pass # we ignore data for nodes that don't exist locally
1105 pass # we ignore data for nodes that don't exist locally
1105 finally:
1106 finally:
1106 if proc:
1107 if proc:
1107 proc.communicate()
1108 proc.communicate()
1108 if src:
1109 if src:
1109 src.close()
1110 src.close()
1110 if proc and proc.returncode != 0:
1111 if proc and proc.returncode != 0:
1111 raise error.Abort(_("extdata command '%s' failed: %s")
1112 raise error.Abort(_("extdata command '%s' failed: %s")
1112 % (cmd, util.explainexit(proc.returncode)[0]))
1113 % (cmd, util.explainexit(proc.returncode)[0]))
1113
1114
1114 return data
1115 return data
1115
1116
1116 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1117 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1117 if lock is None:
1118 if lock is None:
1118 raise error.LockInheritanceContractViolation(
1119 raise error.LockInheritanceContractViolation(
1119 'lock can only be inherited while held')
1120 'lock can only be inherited while held')
1120 if environ is None:
1121 if environ is None:
1121 environ = {}
1122 environ = {}
1122 with lock.inherit() as locker:
1123 with lock.inherit() as locker:
1123 environ[envvar] = locker
1124 environ[envvar] = locker
1124 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1125 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1125
1126
1126 def wlocksub(repo, cmd, *args, **kwargs):
1127 def wlocksub(repo, cmd, *args, **kwargs):
1127 """run cmd as a subprocess that allows inheriting repo's wlock
1128 """run cmd as a subprocess that allows inheriting repo's wlock
1128
1129
1129 This can only be called while the wlock is held. This takes all the
1130 This can only be called while the wlock is held. This takes all the
1130 arguments that ui.system does, and returns the exit code of the
1131 arguments that ui.system does, and returns the exit code of the
1131 subprocess."""
1132 subprocess."""
1132 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1133 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1133 **kwargs)
1134 **kwargs)
1134
1135
1135 def gdinitconfig(ui):
1136 def gdinitconfig(ui):
1136 """helper function to know if a repo should be created as general delta
1137 """helper function to know if a repo should be created as general delta
1137 """
1138 """
1138 # experimental config: format.generaldelta
1139 # experimental config: format.generaldelta
1139 return (ui.configbool('format', 'generaldelta')
1140 return (ui.configbool('format', 'generaldelta')
1140 or ui.configbool('format', 'usegeneraldelta'))
1141 or ui.configbool('format', 'usegeneraldelta'))
1141
1142
1142 def gddeltaconfig(ui):
1143 def gddeltaconfig(ui):
1143 """helper function to know if incoming delta should be optimised
1144 """helper function to know if incoming delta should be optimised
1144 """
1145 """
1145 # experimental config: format.generaldelta
1146 # experimental config: format.generaldelta
1146 return ui.configbool('format', 'generaldelta')
1147 return ui.configbool('format', 'generaldelta')
1147
1148
1148 class simplekeyvaluefile(object):
1149 class simplekeyvaluefile(object):
1149 """A simple file with key=value lines
1150 """A simple file with key=value lines
1150
1151
1151 Keys must be alphanumerics and start with a letter, values must not
1152 Keys must be alphanumerics and start with a letter, values must not
1152 contain '\n' characters"""
1153 contain '\n' characters"""
1153 firstlinekey = '__firstline'
1154 firstlinekey = '__firstline'
1154
1155
1155 def __init__(self, vfs, path, keys=None):
1156 def __init__(self, vfs, path, keys=None):
1156 self.vfs = vfs
1157 self.vfs = vfs
1157 self.path = path
1158 self.path = path
1158
1159
1159 def read(self, firstlinenonkeyval=False):
1160 def read(self, firstlinenonkeyval=False):
1160 """Read the contents of a simple key-value file
1161 """Read the contents of a simple key-value file
1161
1162
1162 'firstlinenonkeyval' indicates whether the first line of file should
1163 'firstlinenonkeyval' indicates whether the first line of file should
1163 be treated as a key-value pair or reuturned fully under the
1164 be treated as a key-value pair or reuturned fully under the
1164 __firstline key."""
1165 __firstline key."""
1165 lines = self.vfs.readlines(self.path)
1166 lines = self.vfs.readlines(self.path)
1166 d = {}
1167 d = {}
1167 if firstlinenonkeyval:
1168 if firstlinenonkeyval:
1168 if not lines:
1169 if not lines:
1169 e = _("empty simplekeyvalue file")
1170 e = _("empty simplekeyvalue file")
1170 raise error.CorruptedState(e)
1171 raise error.CorruptedState(e)
1171 # we don't want to include '\n' in the __firstline
1172 # we don't want to include '\n' in the __firstline
1172 d[self.firstlinekey] = lines[0][:-1]
1173 d[self.firstlinekey] = lines[0][:-1]
1173 del lines[0]
1174 del lines[0]
1174
1175
1175 try:
1176 try:
1176 # the 'if line.strip()' part prevents us from failing on empty
1177 # the 'if line.strip()' part prevents us from failing on empty
1177 # lines which only contain '\n' therefore are not skipped
1178 # lines which only contain '\n' therefore are not skipped
1178 # by 'if line'
1179 # by 'if line'
1179 updatedict = dict(line[:-1].split('=', 1) for line in lines
1180 updatedict = dict(line[:-1].split('=', 1) for line in lines
1180 if line.strip())
1181 if line.strip())
1181 if self.firstlinekey in updatedict:
1182 if self.firstlinekey in updatedict:
1182 e = _("%r can't be used as a key")
1183 e = _("%r can't be used as a key")
1183 raise error.CorruptedState(e % self.firstlinekey)
1184 raise error.CorruptedState(e % self.firstlinekey)
1184 d.update(updatedict)
1185 d.update(updatedict)
1185 except ValueError as e:
1186 except ValueError as e:
1186 raise error.CorruptedState(str(e))
1187 raise error.CorruptedState(str(e))
1187 return d
1188 return d
1188
1189
1189 def write(self, data, firstline=None):
1190 def write(self, data, firstline=None):
1190 """Write key=>value mapping to a file
1191 """Write key=>value mapping to a file
1191 data is a dict. Keys must be alphanumerical and start with a letter.
1192 data is a dict. Keys must be alphanumerical and start with a letter.
1192 Values must not contain newline characters.
1193 Values must not contain newline characters.
1193
1194
1194 If 'firstline' is not None, it is written to file before
1195 If 'firstline' is not None, it is written to file before
1195 everything else, as it is, not in a key=value form"""
1196 everything else, as it is, not in a key=value form"""
1196 lines = []
1197 lines = []
1197 if firstline is not None:
1198 if firstline is not None:
1198 lines.append('%s\n' % firstline)
1199 lines.append('%s\n' % firstline)
1199
1200
1200 for k, v in data.items():
1201 for k, v in data.items():
1201 if k == self.firstlinekey:
1202 if k == self.firstlinekey:
1202 e = "key name '%s' is reserved" % self.firstlinekey
1203 e = "key name '%s' is reserved" % self.firstlinekey
1203 raise error.ProgrammingError(e)
1204 raise error.ProgrammingError(e)
1204 if not k[0:1].isalpha():
1205 if not k[0:1].isalpha():
1205 e = "keys must start with a letter in a key-value file"
1206 e = "keys must start with a letter in a key-value file"
1206 raise error.ProgrammingError(e)
1207 raise error.ProgrammingError(e)
1207 if not k.isalnum():
1208 if not k.isalnum():
1208 e = "invalid key name in a simple key-value file"
1209 e = "invalid key name in a simple key-value file"
1209 raise error.ProgrammingError(e)
1210 raise error.ProgrammingError(e)
1210 if '\n' in v:
1211 if '\n' in v:
1211 e = "invalid value in a simple key-value file"
1212 e = "invalid value in a simple key-value file"
1212 raise error.ProgrammingError(e)
1213 raise error.ProgrammingError(e)
1213 lines.append("%s=%s\n" % (k, v))
1214 lines.append("%s=%s\n" % (k, v))
1214 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1215 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1215 fp.write(''.join(lines))
1216 fp.write(''.join(lines))
1216
1217
1217 _reportobsoletedsource = [
1218 _reportobsoletedsource = [
1218 'debugobsolete',
1219 'debugobsolete',
1219 'pull',
1220 'pull',
1220 'push',
1221 'push',
1221 'serve',
1222 'serve',
1222 'unbundle',
1223 'unbundle',
1223 ]
1224 ]
1224
1225
1225 _reportnewcssource = [
1226 _reportnewcssource = [
1226 'pull',
1227 'pull',
1227 'unbundle',
1228 'unbundle',
1228 ]
1229 ]
1229
1230
1230 # a list of (repo, ctx, files) functions called by various commands to allow
1231 # a list of (repo, ctx, files) functions called by various commands to allow
1231 # extensions to ensure the corresponding files are available locally, before the
1232 # extensions to ensure the corresponding files are available locally, before the
1232 # command uses them.
1233 # command uses them.
1233 fileprefetchhooks = util.hooks()
1234 fileprefetchhooks = util.hooks()
1234
1235
1235 # A marker that tells the evolve extension to suppress its own reporting
1236 # A marker that tells the evolve extension to suppress its own reporting
1236 _reportstroubledchangesets = True
1237 _reportstroubledchangesets = True
1237
1238
1238 def registersummarycallback(repo, otr, txnname=''):
1239 def registersummarycallback(repo, otr, txnname=''):
1239 """register a callback to issue a summary after the transaction is closed
1240 """register a callback to issue a summary after the transaction is closed
1240 """
1241 """
1241 def txmatch(sources):
1242 def txmatch(sources):
1242 return any(txnname.startswith(source) for source in sources)
1243 return any(txnname.startswith(source) for source in sources)
1243
1244
1244 categories = []
1245 categories = []
1245
1246
1246 def reportsummary(func):
1247 def reportsummary(func):
1247 """decorator for report callbacks."""
1248 """decorator for report callbacks."""
1248 # The repoview life cycle is shorter than the one of the actual
1249 # The repoview life cycle is shorter than the one of the actual
1249 # underlying repository. So the filtered object can die before the
1250 # underlying repository. So the filtered object can die before the
1250 # weakref is used leading to troubles. We keep a reference to the
1251 # weakref is used leading to troubles. We keep a reference to the
1251 # unfiltered object and restore the filtering when retrieving the
1252 # unfiltered object and restore the filtering when retrieving the
1252 # repository through the weakref.
1253 # repository through the weakref.
1253 filtername = repo.filtername
1254 filtername = repo.filtername
1254 reporef = weakref.ref(repo.unfiltered())
1255 reporef = weakref.ref(repo.unfiltered())
1255 def wrapped(tr):
1256 def wrapped(tr):
1256 repo = reporef()
1257 repo = reporef()
1257 if filtername:
1258 if filtername:
1258 repo = repo.filtered(filtername)
1259 repo = repo.filtered(filtername)
1259 func(repo, tr)
1260 func(repo, tr)
1260 newcat = '%02i-txnreport' % len(categories)
1261 newcat = '%02i-txnreport' % len(categories)
1261 otr.addpostclose(newcat, wrapped)
1262 otr.addpostclose(newcat, wrapped)
1262 categories.append(newcat)
1263 categories.append(newcat)
1263 return wrapped
1264 return wrapped
1264
1265
1265 if txmatch(_reportobsoletedsource):
1266 if txmatch(_reportobsoletedsource):
1266 @reportsummary
1267 @reportsummary
1267 def reportobsoleted(repo, tr):
1268 def reportobsoleted(repo, tr):
1268 obsoleted = obsutil.getobsoleted(repo, tr)
1269 obsoleted = obsutil.getobsoleted(repo, tr)
1269 if obsoleted:
1270 if obsoleted:
1270 repo.ui.status(_('obsoleted %i changesets\n')
1271 repo.ui.status(_('obsoleted %i changesets\n')
1271 % len(obsoleted))
1272 % len(obsoleted))
1272
1273
1273 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1274 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1274 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1275 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1275 instabilitytypes = [
1276 instabilitytypes = [
1276 ('orphan', 'orphan'),
1277 ('orphan', 'orphan'),
1277 ('phase-divergent', 'phasedivergent'),
1278 ('phase-divergent', 'phasedivergent'),
1278 ('content-divergent', 'contentdivergent'),
1279 ('content-divergent', 'contentdivergent'),
1279 ]
1280 ]
1280
1281
1281 def getinstabilitycounts(repo):
1282 def getinstabilitycounts(repo):
1282 filtered = repo.changelog.filteredrevs
1283 filtered = repo.changelog.filteredrevs
1283 counts = {}
1284 counts = {}
1284 for instability, revset in instabilitytypes:
1285 for instability, revset in instabilitytypes:
1285 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1286 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1286 filtered)
1287 filtered)
1287 return counts
1288 return counts
1288
1289
1289 oldinstabilitycounts = getinstabilitycounts(repo)
1290 oldinstabilitycounts = getinstabilitycounts(repo)
1290 @reportsummary
1291 @reportsummary
1291 def reportnewinstabilities(repo, tr):
1292 def reportnewinstabilities(repo, tr):
1292 newinstabilitycounts = getinstabilitycounts(repo)
1293 newinstabilitycounts = getinstabilitycounts(repo)
1293 for instability, revset in instabilitytypes:
1294 for instability, revset in instabilitytypes:
1294 delta = (newinstabilitycounts[instability] -
1295 delta = (newinstabilitycounts[instability] -
1295 oldinstabilitycounts[instability])
1296 oldinstabilitycounts[instability])
1296 if delta > 0:
1297 if delta > 0:
1297 repo.ui.warn(_('%i new %s changesets\n') %
1298 repo.ui.warn(_('%i new %s changesets\n') %
1298 (delta, instability))
1299 (delta, instability))
1299
1300
1300 if txmatch(_reportnewcssource):
1301 if txmatch(_reportnewcssource):
1301 @reportsummary
1302 @reportsummary
1302 def reportnewcs(repo, tr):
1303 def reportnewcs(repo, tr):
1303 """Report the range of new revisions pulled/unbundled."""
1304 """Report the range of new revisions pulled/unbundled."""
1304 newrevs = tr.changes.get('revs', xrange(0, 0))
1305 newrevs = tr.changes.get('revs', xrange(0, 0))
1305 if not newrevs:
1306 if not newrevs:
1306 return
1307 return
1307
1308
1308 # Compute the bounds of new revisions' range, excluding obsoletes.
1309 # Compute the bounds of new revisions' range, excluding obsoletes.
1309 unfi = repo.unfiltered()
1310 unfi = repo.unfiltered()
1310 revs = unfi.revs('%ld and not obsolete()', newrevs)
1311 revs = unfi.revs('%ld and not obsolete()', newrevs)
1311 if not revs:
1312 if not revs:
1312 # Got only obsoletes.
1313 # Got only obsoletes.
1313 return
1314 return
1314 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1315 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1315
1316
1316 if minrev == maxrev:
1317 if minrev == maxrev:
1317 revrange = minrev
1318 revrange = minrev
1318 else:
1319 else:
1319 revrange = '%s:%s' % (minrev, maxrev)
1320 revrange = '%s:%s' % (minrev, maxrev)
1320 repo.ui.status(_('new changesets %s\n') % revrange)
1321 repo.ui.status(_('new changesets %s\n') % revrange)
1321
1322
1322 def nodesummaries(repo, nodes, maxnumnodes=4):
1323 def nodesummaries(repo, nodes, maxnumnodes=4):
1323 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1324 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1324 return ' '.join(short(h) for h in nodes)
1325 return ' '.join(short(h) for h in nodes)
1325 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1326 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1326 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1327 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1327
1328
1328 def enforcesinglehead(repo, tr, desc):
1329 def enforcesinglehead(repo, tr, desc):
1329 """check that no named branch has multiple heads"""
1330 """check that no named branch has multiple heads"""
1330 if desc in ('strip', 'repair'):
1331 if desc in ('strip', 'repair'):
1331 # skip the logic during strip
1332 # skip the logic during strip
1332 return
1333 return
1333 visible = repo.filtered('visible')
1334 visible = repo.filtered('visible')
1334 # possible improvement: we could restrict the check to affected branch
1335 # possible improvement: we could restrict the check to affected branch
1335 for name, heads in visible.branchmap().iteritems():
1336 for name, heads in visible.branchmap().iteritems():
1336 if len(heads) > 1:
1337 if len(heads) > 1:
1337 msg = _('rejecting multiple heads on branch "%s"')
1338 msg = _('rejecting multiple heads on branch "%s"')
1338 msg %= name
1339 msg %= name
1339 hint = _('%d heads: %s')
1340 hint = _('%d heads: %s')
1340 hint %= (len(heads), nodesummaries(repo, heads))
1341 hint %= (len(heads), nodesummaries(repo, heads))
1341 raise error.Abort(msg, hint=hint)
1342 raise error.Abort(msg, hint=hint)
1342
1343
1343 def wrapconvertsink(sink):
1344 def wrapconvertsink(sink):
1344 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1345 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1345 before it is used, whether or not the convert extension was formally loaded.
1346 before it is used, whether or not the convert extension was formally loaded.
1346 """
1347 """
1347 return sink
1348 return sink
1348
1349
1349 def unhidehashlikerevs(repo, specs, hiddentype):
1350 def unhidehashlikerevs(repo, specs, hiddentype):
1350 """parse the user specs and unhide changesets whose hash or revision number
1351 """parse the user specs and unhide changesets whose hash or revision number
1351 is passed.
1352 is passed.
1352
1353
1353 hiddentype can be: 1) 'warn': warn while unhiding changesets
1354 hiddentype can be: 1) 'warn': warn while unhiding changesets
1354 2) 'nowarn': don't warn while unhiding changesets
1355 2) 'nowarn': don't warn while unhiding changesets
1355
1356
1356 returns a repo object with the required changesets unhidden
1357 returns a repo object with the required changesets unhidden
1357 """
1358 """
1358 if not repo.filtername or not repo.ui.configbool('experimental',
1359 if not repo.filtername or not repo.ui.configbool('experimental',
1359 'directaccess'):
1360 'directaccess'):
1360 return repo
1361 return repo
1361
1362
1362 if repo.filtername not in ('visible', 'visible-hidden'):
1363 if repo.filtername not in ('visible', 'visible-hidden'):
1363 return repo
1364 return repo
1364
1365
1365 symbols = set()
1366 symbols = set()
1366 for spec in specs:
1367 for spec in specs:
1367 try:
1368 try:
1368 tree = revsetlang.parse(spec)
1369 tree = revsetlang.parse(spec)
1369 except error.ParseError: # will be reported by scmutil.revrange()
1370 except error.ParseError: # will be reported by scmutil.revrange()
1370 continue
1371 continue
1371
1372
1372 symbols.update(revsetlang.gethashlikesymbols(tree))
1373 symbols.update(revsetlang.gethashlikesymbols(tree))
1373
1374
1374 if not symbols:
1375 if not symbols:
1375 return repo
1376 return repo
1376
1377
1377 revs = _getrevsfromsymbols(repo, symbols)
1378 revs = _getrevsfromsymbols(repo, symbols)
1378
1379
1379 if not revs:
1380 if not revs:
1380 return repo
1381 return repo
1381
1382
1382 if hiddentype == 'warn':
1383 if hiddentype == 'warn':
1383 unfi = repo.unfiltered()
1384 unfi = repo.unfiltered()
1384 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1385 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1385 repo.ui.warn(_("warning: accessing hidden changesets for write "
1386 repo.ui.warn(_("warning: accessing hidden changesets for write "
1386 "operation: %s\n") % revstr)
1387 "operation: %s\n") % revstr)
1387
1388
1388 # we have to use new filtername to separate branch/tags cache until we can
1389 # we have to use new filtername to separate branch/tags cache until we can
1389 # disbale these cache when revisions are dynamically pinned.
1390 # disbale these cache when revisions are dynamically pinned.
1390 return repo.filtered('visible-hidden', revs)
1391 return repo.filtered('visible-hidden', revs)
1391
1392
1392 def _getrevsfromsymbols(repo, symbols):
1393 def _getrevsfromsymbols(repo, symbols):
1393 """parse the list of symbols and returns a set of revision numbers of hidden
1394 """parse the list of symbols and returns a set of revision numbers of hidden
1394 changesets present in symbols"""
1395 changesets present in symbols"""
1395 revs = set()
1396 revs = set()
1396 unfi = repo.unfiltered()
1397 unfi = repo.unfiltered()
1397 unficl = unfi.changelog
1398 unficl = unfi.changelog
1398 cl = repo.changelog
1399 cl = repo.changelog
1399 tiprev = len(unficl)
1400 tiprev = len(unficl)
1400 pmatch = unficl._partialmatch
1401 pmatch = unficl._partialmatch
1401 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1402 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1402 for s in symbols:
1403 for s in symbols:
1403 try:
1404 try:
1404 n = int(s)
1405 n = int(s)
1405 if n <= tiprev:
1406 if n <= tiprev:
1406 if not allowrevnums:
1407 if not allowrevnums:
1407 continue
1408 continue
1408 else:
1409 else:
1409 if n not in cl:
1410 if n not in cl:
1410 revs.add(n)
1411 revs.add(n)
1411 continue
1412 continue
1412 except ValueError:
1413 except ValueError:
1413 pass
1414 pass
1414
1415
1415 try:
1416 try:
1416 s = pmatch(s)
1417 s = pmatch(s)
1417 except error.LookupError:
1418 except error.LookupError:
1418 s = None
1419 s = None
1419
1420
1420 if s is not None:
1421 if s is not None:
1421 rev = unficl.rev(s)
1422 rev = unficl.rev(s)
1422 if rev not in cl:
1423 if rev not in cl:
1423 revs.add(rev)
1424 revs.add(rev)
1424
1425
1425 return revs
1426 return revs
General Comments 0
You need to be logged in to leave comments. Login now