##// END OF EJS Templates
rust: add Vfs trait...
Raphaël Gomès -
r52761:db7dbe6f default
parent child Browse files
Show More
@@ -1,1774 +1,1820
1 # This file is automatically @generated by Cargo.
1 # This file is automatically @generated by Cargo.
2 # It is not intended for manual editing.
2 # It is not intended for manual editing.
3 version = 3
3 version = 3
4
4
5 [[package]]
5 [[package]]
6 name = "adler"
6 name = "adler"
7 version = "1.0.2"
7 version = "1.0.2"
8 source = "registry+https://github.com/rust-lang/crates.io-index"
8 source = "registry+https://github.com/rust-lang/crates.io-index"
9 checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
9 checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
10
10
11 [[package]]
11 [[package]]
12 name = "ahash"
12 name = "ahash"
13 version = "0.8.2"
13 version = "0.8.2"
14 source = "registry+https://github.com/rust-lang/crates.io-index"
14 source = "registry+https://github.com/rust-lang/crates.io-index"
15 checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
15 checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107"
16 dependencies = [
16 dependencies = [
17 "cfg-if",
17 "cfg-if",
18 "once_cell",
18 "once_cell",
19 "version_check",
19 "version_check",
20 ]
20 ]
21
21
22 [[package]]
22 [[package]]
23 name = "aho-corasick"
23 name = "aho-corasick"
24 version = "0.7.19"
24 version = "0.7.19"
25 source = "registry+https://github.com/rust-lang/crates.io-index"
25 source = "registry+https://github.com/rust-lang/crates.io-index"
26 checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
26 checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
27 dependencies = [
27 dependencies = [
28 "memchr",
28 "memchr",
29 ]
29 ]
30
30
31 [[package]]
31 [[package]]
32 name = "android-tzdata"
32 name = "android-tzdata"
33 version = "0.1.1"
33 version = "0.1.1"
34 source = "registry+https://github.com/rust-lang/crates.io-index"
34 source = "registry+https://github.com/rust-lang/crates.io-index"
35 checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
35 checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
36
36
37 [[package]]
37 [[package]]
38 name = "android_system_properties"
38 name = "android_system_properties"
39 version = "0.1.5"
39 version = "0.1.5"
40 source = "registry+https://github.com/rust-lang/crates.io-index"
40 source = "registry+https://github.com/rust-lang/crates.io-index"
41 checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
41 checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
42 dependencies = [
42 dependencies = [
43 "libc",
43 "libc",
44 ]
44 ]
45
45
46 [[package]]
46 [[package]]
47 name = "atty"
47 name = "atty"
48 version = "0.2.14"
48 version = "0.2.14"
49 source = "registry+https://github.com/rust-lang/crates.io-index"
49 source = "registry+https://github.com/rust-lang/crates.io-index"
50 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
50 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
51 dependencies = [
51 dependencies = [
52 "hermit-abi",
52 "hermit-abi",
53 "libc",
53 "libc",
54 "winapi",
54 "winapi",
55 ]
55 ]
56
56
57 [[package]]
57 [[package]]
58 name = "autocfg"
58 name = "autocfg"
59 version = "1.1.0"
59 version = "1.1.0"
60 source = "registry+https://github.com/rust-lang/crates.io-index"
60 source = "registry+https://github.com/rust-lang/crates.io-index"
61 checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
61 checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
62
62
63 [[package]]
63 [[package]]
64 name = "bitflags"
64 name = "bitflags"
65 version = "1.3.2"
65 version = "1.3.2"
66 source = "registry+https://github.com/rust-lang/crates.io-index"
66 source = "registry+https://github.com/rust-lang/crates.io-index"
67 checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
67 checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
68
68
69 [[package]]
69 [[package]]
70 name = "bitflags"
70 name = "bitflags"
71 version = "2.6.0"
71 version = "2.6.0"
72 source = "registry+https://github.com/rust-lang/crates.io-index"
72 source = "registry+https://github.com/rust-lang/crates.io-index"
73 checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
73 checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
74
74
75 [[package]]
75 [[package]]
76 name = "bitmaps"
76 name = "bitmaps"
77 version = "2.1.0"
77 version = "2.1.0"
78 source = "registry+https://github.com/rust-lang/crates.io-index"
78 source = "registry+https://github.com/rust-lang/crates.io-index"
79 checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
79 checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
80 dependencies = [
80 dependencies = [
81 "typenum",
81 "typenum",
82 ]
82 ]
83
83
84 [[package]]
84 [[package]]
85 name = "bitvec"
85 name = "bitvec"
86 version = "1.0.1"
86 version = "1.0.1"
87 source = "registry+https://github.com/rust-lang/crates.io-index"
87 source = "registry+https://github.com/rust-lang/crates.io-index"
88 checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
88 checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
89 dependencies = [
89 dependencies = [
90 "funty",
90 "funty",
91 "radium",
91 "radium",
92 "tap",
92 "tap",
93 "wyz",
93 "wyz",
94 ]
94 ]
95
95
96 [[package]]
96 [[package]]
97 name = "block-buffer"
97 name = "block-buffer"
98 version = "0.9.0"
98 version = "0.9.0"
99 source = "registry+https://github.com/rust-lang/crates.io-index"
99 source = "registry+https://github.com/rust-lang/crates.io-index"
100 checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
100 checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
101 dependencies = [
101 dependencies = [
102 "generic-array",
102 "generic-array",
103 ]
103 ]
104
104
105 [[package]]
105 [[package]]
106 name = "block-buffer"
106 name = "block-buffer"
107 version = "0.10.3"
107 version = "0.10.3"
108 source = "registry+https://github.com/rust-lang/crates.io-index"
108 source = "registry+https://github.com/rust-lang/crates.io-index"
109 checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
109 checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
110 dependencies = [
110 dependencies = [
111 "generic-array",
111 "generic-array",
112 ]
112 ]
113
113
114 [[package]]
114 [[package]]
115 name = "bstr"
115 name = "bstr"
116 version = "1.6.0"
116 version = "1.6.0"
117 source = "registry+https://github.com/rust-lang/crates.io-index"
117 source = "registry+https://github.com/rust-lang/crates.io-index"
118 checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
118 checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
119 dependencies = [
119 dependencies = [
120 "memchr",
120 "memchr",
121 "regex-automata",
121 "regex-automata",
122 "serde",
122 "serde",
123 ]
123 ]
124
124
125 [[package]]
125 [[package]]
126 name = "bumpalo"
126 name = "bumpalo"
127 version = "3.11.1"
127 version = "3.11.1"
128 source = "registry+https://github.com/rust-lang/crates.io-index"
128 source = "registry+https://github.com/rust-lang/crates.io-index"
129 checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
129 checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
130
130
131 [[package]]
131 [[package]]
132 name = "byteorder"
132 name = "byteorder"
133 version = "1.4.3"
133 version = "1.4.3"
134 source = "registry+https://github.com/rust-lang/crates.io-index"
134 source = "registry+https://github.com/rust-lang/crates.io-index"
135 checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
135 checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
136
136
137 [[package]]
137 [[package]]
138 name = "bytes-cast"
138 name = "bytes-cast"
139 version = "0.3.0"
139 version = "0.3.0"
140 source = "registry+https://github.com/rust-lang/crates.io-index"
140 source = "registry+https://github.com/rust-lang/crates.io-index"
141 checksum = "a20de93b91d7703ca0e39e12930e310acec5ff4d715f4166e0ab026babb352e8"
141 checksum = "a20de93b91d7703ca0e39e12930e310acec5ff4d715f4166e0ab026babb352e8"
142 dependencies = [
142 dependencies = [
143 "bytes-cast-derive",
143 "bytes-cast-derive",
144 ]
144 ]
145
145
146 [[package]]
146 [[package]]
147 name = "bytes-cast-derive"
147 name = "bytes-cast-derive"
148 version = "0.2.0"
148 version = "0.2.0"
149 source = "registry+https://github.com/rust-lang/crates.io-index"
149 source = "registry+https://github.com/rust-lang/crates.io-index"
150 checksum = "7470a6fcce58cde3d62cce758bf71007978b75247e6becd9255c9b884bcb4f71"
150 checksum = "7470a6fcce58cde3d62cce758bf71007978b75247e6becd9255c9b884bcb4f71"
151 dependencies = [
151 dependencies = [
152 "proc-macro2",
152 "proc-macro2",
153 "quote",
153 "quote",
154 "syn",
154 "syn",
155 ]
155 ]
156
156
157 [[package]]
157 [[package]]
158 name = "cc"
158 name = "cc"
159 version = "1.0.76"
159 version = "1.0.76"
160 source = "registry+https://github.com/rust-lang/crates.io-index"
160 source = "registry+https://github.com/rust-lang/crates.io-index"
161 checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
161 checksum = "76a284da2e6fe2092f2353e51713435363112dfd60030e22add80be333fb928f"
162 dependencies = [
162 dependencies = [
163 "jobserver",
163 "jobserver",
164 ]
164 ]
165
165
166 [[package]]
166 [[package]]
167 name = "cfg-if"
167 name = "cfg-if"
168 version = "1.0.0"
168 version = "1.0.0"
169 source = "registry+https://github.com/rust-lang/crates.io-index"
169 source = "registry+https://github.com/rust-lang/crates.io-index"
170 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
170 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
171
171
172 [[package]]
172 [[package]]
173 name = "chrono"
173 name = "chrono"
174 version = "0.4.34"
174 version = "0.4.34"
175 source = "registry+https://github.com/rust-lang/crates.io-index"
175 source = "registry+https://github.com/rust-lang/crates.io-index"
176 checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
176 checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
177 dependencies = [
177 dependencies = [
178 "android-tzdata",
178 "android-tzdata",
179 "iana-time-zone",
179 "iana-time-zone",
180 "js-sys",
180 "js-sys",
181 "num-traits",
181 "num-traits",
182 "wasm-bindgen",
182 "wasm-bindgen",
183 "windows-targets 0.52.0",
183 "windows-targets 0.52.6",
184 ]
184 ]
185
185
186 [[package]]
186 [[package]]
187 name = "clap"
187 name = "clap"
188 version = "4.0.24"
188 version = "4.0.24"
189 source = "registry+https://github.com/rust-lang/crates.io-index"
189 source = "registry+https://github.com/rust-lang/crates.io-index"
190 checksum = "60494cedb60cb47462c0ff7be53de32c0e42a6fc2c772184554fa12bd9489c03"
190 checksum = "60494cedb60cb47462c0ff7be53de32c0e42a6fc2c772184554fa12bd9489c03"
191 dependencies = [
191 dependencies = [
192 "atty",
192 "atty",
193 "bitflags 1.3.2",
193 "bitflags 1.3.2",
194 "clap_derive",
194 "clap_derive",
195 "clap_lex",
195 "clap_lex",
196 "once_cell",
196 "once_cell",
197 "strsim",
197 "strsim",
198 "termcolor",
198 "termcolor",
199 ]
199 ]
200
200
201 [[package]]
201 [[package]]
202 name = "clap_derive"
202 name = "clap_derive"
203 version = "4.0.21"
203 version = "4.0.21"
204 source = "registry+https://github.com/rust-lang/crates.io-index"
204 source = "registry+https://github.com/rust-lang/crates.io-index"
205 checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
205 checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
206 dependencies = [
206 dependencies = [
207 "heck",
207 "heck",
208 "proc-macro-error",
208 "proc-macro-error",
209 "proc-macro2",
209 "proc-macro2",
210 "quote",
210 "quote",
211 "syn",
211 "syn",
212 ]
212 ]
213
213
214 [[package]]
214 [[package]]
215 name = "clap_lex"
215 name = "clap_lex"
216 version = "0.3.0"
216 version = "0.3.0"
217 source = "registry+https://github.com/rust-lang/crates.io-index"
217 source = "registry+https://github.com/rust-lang/crates.io-index"
218 checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
218 checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8"
219 dependencies = [
219 dependencies = [
220 "os_str_bytes",
220 "os_str_bytes",
221 ]
221 ]
222
222
223 [[package]]
223 [[package]]
224 name = "codespan-reporting"
224 name = "codespan-reporting"
225 version = "0.11.1"
225 version = "0.11.1"
226 source = "registry+https://github.com/rust-lang/crates.io-index"
226 source = "registry+https://github.com/rust-lang/crates.io-index"
227 checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
227 checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
228 dependencies = [
228 dependencies = [
229 "termcolor",
229 "termcolor",
230 "unicode-width",
230 "unicode-width",
231 ]
231 ]
232
232
233 [[package]]
233 [[package]]
234 name = "convert_case"
234 name = "convert_case"
235 version = "0.4.0"
235 version = "0.4.0"
236 source = "registry+https://github.com/rust-lang/crates.io-index"
236 source = "registry+https://github.com/rust-lang/crates.io-index"
237 checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
237 checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
238
238
239 [[package]]
239 [[package]]
240 name = "core-foundation-sys"
240 name = "core-foundation-sys"
241 version = "0.8.3"
241 version = "0.8.3"
242 source = "registry+https://github.com/rust-lang/crates.io-index"
242 source = "registry+https://github.com/rust-lang/crates.io-index"
243 checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
243 checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
244
244
245 [[package]]
245 [[package]]
246 name = "cpufeatures"
246 name = "cpufeatures"
247 version = "0.2.5"
247 version = "0.2.5"
248 source = "registry+https://github.com/rust-lang/crates.io-index"
248 source = "registry+https://github.com/rust-lang/crates.io-index"
249 checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
249 checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320"
250 dependencies = [
250 dependencies = [
251 "libc",
251 "libc",
252 ]
252 ]
253
253
254 [[package]]
254 [[package]]
255 name = "cpython"
255 name = "cpython"
256 version = "0.7.2"
256 version = "0.7.2"
257 source = "registry+https://github.com/rust-lang/crates.io-index"
257 source = "registry+https://github.com/rust-lang/crates.io-index"
258 checksum = "43b398a2c65baaf5892f10bb69b52508bf7a993380cc4ecd3785aaebb5c79389"
258 checksum = "43b398a2c65baaf5892f10bb69b52508bf7a993380cc4ecd3785aaebb5c79389"
259 dependencies = [
259 dependencies = [
260 "libc",
260 "libc",
261 "num-traits",
261 "num-traits",
262 "paste",
262 "paste",
263 "python3-sys",
263 "python3-sys",
264 ]
264 ]
265
265
266 [[package]]
266 [[package]]
267 name = "crc32fast"
267 name = "crc32fast"
268 version = "1.3.2"
268 version = "1.3.2"
269 source = "registry+https://github.com/rust-lang/crates.io-index"
269 source = "registry+https://github.com/rust-lang/crates.io-index"
270 checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
270 checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
271 dependencies = [
271 dependencies = [
272 "cfg-if",
272 "cfg-if",
273 ]
273 ]
274
274
275 [[package]]
275 [[package]]
276 name = "crossbeam-channel"
276 name = "crossbeam-channel"
277 version = "0.5.6"
277 version = "0.5.6"
278 source = "registry+https://github.com/rust-lang/crates.io-index"
278 source = "registry+https://github.com/rust-lang/crates.io-index"
279 checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
279 checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
280 dependencies = [
280 dependencies = [
281 "cfg-if",
281 "cfg-if",
282 "crossbeam-utils",
282 "crossbeam-utils",
283 ]
283 ]
284
284
285 [[package]]
285 [[package]]
286 name = "crossbeam-deque"
286 name = "crossbeam-deque"
287 version = "0.8.2"
287 version = "0.8.2"
288 source = "registry+https://github.com/rust-lang/crates.io-index"
288 source = "registry+https://github.com/rust-lang/crates.io-index"
289 checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
289 checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
290 dependencies = [
290 dependencies = [
291 "cfg-if",
291 "cfg-if",
292 "crossbeam-epoch",
292 "crossbeam-epoch",
293 "crossbeam-utils",
293 "crossbeam-utils",
294 ]
294 ]
295
295
296 [[package]]
296 [[package]]
297 name = "crossbeam-epoch"
297 name = "crossbeam-epoch"
298 version = "0.9.11"
298 version = "0.9.11"
299 source = "registry+https://github.com/rust-lang/crates.io-index"
299 source = "registry+https://github.com/rust-lang/crates.io-index"
300 checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
300 checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
301 dependencies = [
301 dependencies = [
302 "autocfg",
302 "autocfg",
303 "cfg-if",
303 "cfg-if",
304 "crossbeam-utils",
304 "crossbeam-utils",
305 "memoffset",
305 "memoffset",
306 "scopeguard",
306 "scopeguard",
307 ]
307 ]
308
308
309 [[package]]
309 [[package]]
310 name = "crossbeam-utils"
310 name = "crossbeam-utils"
311 version = "0.8.12"
311 version = "0.8.12"
312 source = "registry+https://github.com/rust-lang/crates.io-index"
312 source = "registry+https://github.com/rust-lang/crates.io-index"
313 checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
313 checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
314 dependencies = [
314 dependencies = [
315 "cfg-if",
315 "cfg-if",
316 ]
316 ]
317
317
318 [[package]]
318 [[package]]
319 name = "crypto-common"
319 name = "crypto-common"
320 version = "0.1.6"
320 version = "0.1.6"
321 source = "registry+https://github.com/rust-lang/crates.io-index"
321 source = "registry+https://github.com/rust-lang/crates.io-index"
322 checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
322 checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
323 dependencies = [
323 dependencies = [
324 "generic-array",
324 "generic-array",
325 "typenum",
325 "typenum",
326 ]
326 ]
327
327
328 [[package]]
328 [[package]]
329 name = "ctor"
329 name = "ctor"
330 version = "0.1.26"
330 version = "0.1.26"
331 source = "registry+https://github.com/rust-lang/crates.io-index"
331 source = "registry+https://github.com/rust-lang/crates.io-index"
332 checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
332 checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096"
333 dependencies = [
333 dependencies = [
334 "quote",
334 "quote",
335 "syn",
335 "syn",
336 ]
336 ]
337
337
338 [[package]]
338 [[package]]
339 name = "cxx"
339 name = "cxx"
340 version = "1.0.81"
340 version = "1.0.81"
341 source = "registry+https://github.com/rust-lang/crates.io-index"
341 source = "registry+https://github.com/rust-lang/crates.io-index"
342 checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
342 checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
343 dependencies = [
343 dependencies = [
344 "cc",
344 "cc",
345 "cxxbridge-flags",
345 "cxxbridge-flags",
346 "cxxbridge-macro",
346 "cxxbridge-macro",
347 "link-cplusplus",
347 "link-cplusplus",
348 ]
348 ]
349
349
350 [[package]]
350 [[package]]
351 name = "cxx-build"
351 name = "cxx-build"
352 version = "1.0.81"
352 version = "1.0.81"
353 source = "registry+https://github.com/rust-lang/crates.io-index"
353 source = "registry+https://github.com/rust-lang/crates.io-index"
354 checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
354 checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
355 dependencies = [
355 dependencies = [
356 "cc",
356 "cc",
357 "codespan-reporting",
357 "codespan-reporting",
358 "once_cell",
358 "once_cell",
359 "proc-macro2",
359 "proc-macro2",
360 "quote",
360 "quote",
361 "scratch",
361 "scratch",
362 "syn",
362 "syn",
363 ]
363 ]
364
364
365 [[package]]
365 [[package]]
366 name = "cxxbridge-flags"
366 name = "cxxbridge-flags"
367 version = "1.0.81"
367 version = "1.0.81"
368 source = "registry+https://github.com/rust-lang/crates.io-index"
368 source = "registry+https://github.com/rust-lang/crates.io-index"
369 checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
369 checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
370
370
371 [[package]]
371 [[package]]
372 name = "cxxbridge-macro"
372 name = "cxxbridge-macro"
373 version = "1.0.81"
373 version = "1.0.81"
374 source = "registry+https://github.com/rust-lang/crates.io-index"
374 source = "registry+https://github.com/rust-lang/crates.io-index"
375 checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
375 checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
376 dependencies = [
376 dependencies = [
377 "proc-macro2",
377 "proc-macro2",
378 "quote",
378 "quote",
379 "syn",
379 "syn",
380 ]
380 ]
381
381
382 [[package]]
382 [[package]]
383 name = "derive_more"
383 name = "derive_more"
384 version = "0.99.17"
384 version = "0.99.17"
385 source = "registry+https://github.com/rust-lang/crates.io-index"
385 source = "registry+https://github.com/rust-lang/crates.io-index"
386 checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
386 checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
387 dependencies = [
387 dependencies = [
388 "convert_case",
388 "convert_case",
389 "proc-macro2",
389 "proc-macro2",
390 "quote",
390 "quote",
391 "rustc_version",
391 "rustc_version",
392 "syn",
392 "syn",
393 ]
393 ]
394
394
395 [[package]]
395 [[package]]
396 name = "diff"
396 name = "diff"
397 version = "0.1.13"
397 version = "0.1.13"
398 source = "registry+https://github.com/rust-lang/crates.io-index"
398 source = "registry+https://github.com/rust-lang/crates.io-index"
399 checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
399 checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
400
400
401 [[package]]
401 [[package]]
402 name = "digest"
402 name = "digest"
403 version = "0.9.0"
403 version = "0.9.0"
404 source = "registry+https://github.com/rust-lang/crates.io-index"
404 source = "registry+https://github.com/rust-lang/crates.io-index"
405 checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
405 checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
406 dependencies = [
406 dependencies = [
407 "generic-array",
407 "generic-array",
408 ]
408 ]
409
409
410 [[package]]
410 [[package]]
411 name = "digest"
411 name = "digest"
412 version = "0.10.5"
412 version = "0.10.5"
413 source = "registry+https://github.com/rust-lang/crates.io-index"
413 source = "registry+https://github.com/rust-lang/crates.io-index"
414 checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
414 checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
415 dependencies = [
415 dependencies = [
416 "block-buffer 0.10.3",
416 "block-buffer 0.10.3",
417 "crypto-common",
417 "crypto-common",
418 ]
418 ]
419
419
420 [[package]]
420 [[package]]
421 name = "dirs"
421 name = "dirs"
422 version = "5.0.1"
422 version = "5.0.1"
423 source = "registry+https://github.com/rust-lang/crates.io-index"
423 source = "registry+https://github.com/rust-lang/crates.io-index"
424 checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
424 checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
425 dependencies = [
425 dependencies = [
426 "dirs-sys",
426 "dirs-sys",
427 ]
427 ]
428
428
429 [[package]]
429 [[package]]
430 name = "dirs-sys"
430 name = "dirs-sys"
431 version = "0.4.1"
431 version = "0.4.1"
432 source = "registry+https://github.com/rust-lang/crates.io-index"
432 source = "registry+https://github.com/rust-lang/crates.io-index"
433 checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
433 checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
434 dependencies = [
434 dependencies = [
435 "libc",
435 "libc",
436 "option-ext",
436 "option-ext",
437 "redox_users",
437 "redox_users",
438 "windows-sys",
438 "windows-sys 0.48.0",
439 ]
439 ]
440
440
441 [[package]]
441 [[package]]
442 name = "dyn-clone"
443 version = "1.0.17"
444 source = "registry+https://github.com/rust-lang/crates.io-index"
445 checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
446
447 [[package]]
442 name = "either"
448 name = "either"
443 version = "1.8.0"
449 version = "1.8.0"
444 source = "registry+https://github.com/rust-lang/crates.io-index"
450 source = "registry+https://github.com/rust-lang/crates.io-index"
445 checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
451 checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
446
452
447 [[package]]
453 [[package]]
448 name = "env_logger"
454 name = "env_logger"
449 version = "0.9.3"
455 version = "0.9.3"
450 source = "registry+https://github.com/rust-lang/crates.io-index"
456 source = "registry+https://github.com/rust-lang/crates.io-index"
451 checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
457 checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7"
452 dependencies = [
458 dependencies = [
453 "atty",
459 "atty",
454 "humantime",
460 "humantime",
455 "log",
461 "log",
456 "regex",
462 "regex",
457 "termcolor",
463 "termcolor",
458 ]
464 ]
459
465
460 [[package]]
466 [[package]]
461 name = "fastrand"
467 name = "fastrand"
462 version = "1.8.0"
468 version = "1.8.0"
463 source = "registry+https://github.com/rust-lang/crates.io-index"
469 source = "registry+https://github.com/rust-lang/crates.io-index"
464 checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
470 checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
465 dependencies = [
471 dependencies = [
466 "instant",
472 "instant",
467 ]
473 ]
468
474
469 [[package]]
475 [[package]]
476 name = "filetime"
477 version = "0.2.25"
478 source = "registry+https://github.com/rust-lang/crates.io-index"
479 checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
480 dependencies = [
481 "cfg-if",
482 "libc",
483 "libredox",
484 "windows-sys 0.59.0",
485 ]
486
487 [[package]]
470 name = "flate2"
488 name = "flate2"
471 version = "1.0.24"
489 version = "1.0.24"
472 source = "registry+https://github.com/rust-lang/crates.io-index"
490 source = "registry+https://github.com/rust-lang/crates.io-index"
473 checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
491 checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
474 dependencies = [
492 dependencies = [
475 "crc32fast",
493 "crc32fast",
476 "libz-sys",
494 "libz-sys",
477 "miniz_oxide",
495 "miniz_oxide",
478 ]
496 ]
479
497
480 [[package]]
498 [[package]]
481 name = "format-bytes"
499 name = "format-bytes"
482 version = "0.3.0"
500 version = "0.3.0"
483 source = "registry+https://github.com/rust-lang/crates.io-index"
501 source = "registry+https://github.com/rust-lang/crates.io-index"
484 checksum = "48942366ef93975da38e175ac9e10068c6fc08ca9e85930d4f098f4d5b14c2fd"
502 checksum = "48942366ef93975da38e175ac9e10068c6fc08ca9e85930d4f098f4d5b14c2fd"
485 dependencies = [
503 dependencies = [
486 "format-bytes-macros",
504 "format-bytes-macros",
487 ]
505 ]
488
506
489 [[package]]
507 [[package]]
490 name = "format-bytes-macros"
508 name = "format-bytes-macros"
491 version = "0.4.0"
509 version = "0.4.0"
492 source = "registry+https://github.com/rust-lang/crates.io-index"
510 source = "registry+https://github.com/rust-lang/crates.io-index"
493 checksum = "203aadebefcc73d12038296c228eabf830f99cba991b0032adf20e9fa6ce7e4f"
511 checksum = "203aadebefcc73d12038296c228eabf830f99cba991b0032adf20e9fa6ce7e4f"
494 dependencies = [
512 dependencies = [
495 "proc-macro2",
513 "proc-macro2",
496 "quote",
514 "quote",
497 "syn",
515 "syn",
498 ]
516 ]
499
517
500 [[package]]
518 [[package]]
501 name = "funty"
519 name = "funty"
502 version = "2.0.0"
520 version = "2.0.0"
503 source = "registry+https://github.com/rust-lang/crates.io-index"
521 source = "registry+https://github.com/rust-lang/crates.io-index"
504 checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
522 checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
505
523
506 [[package]]
524 [[package]]
507 name = "generic-array"
525 name = "generic-array"
508 version = "0.14.6"
526 version = "0.14.6"
509 source = "registry+https://github.com/rust-lang/crates.io-index"
527 source = "registry+https://github.com/rust-lang/crates.io-index"
510 checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
528 checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
511 dependencies = [
529 dependencies = [
512 "typenum",
530 "typenum",
513 "version_check",
531 "version_check",
514 ]
532 ]
515
533
516 [[package]]
534 [[package]]
517 name = "getrandom"
535 name = "getrandom"
518 version = "0.1.16"
536 version = "0.1.16"
519 source = "registry+https://github.com/rust-lang/crates.io-index"
537 source = "registry+https://github.com/rust-lang/crates.io-index"
520 checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
538 checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
521 dependencies = [
539 dependencies = [
522 "cfg-if",
540 "cfg-if",
523 "libc",
541 "libc",
524 "wasi 0.9.0+wasi-snapshot-preview1",
542 "wasi 0.9.0+wasi-snapshot-preview1",
525 ]
543 ]
526
544
527 [[package]]
545 [[package]]
528 name = "getrandom"
546 name = "getrandom"
529 version = "0.2.8"
547 version = "0.2.8"
530 source = "registry+https://github.com/rust-lang/crates.io-index"
548 source = "registry+https://github.com/rust-lang/crates.io-index"
531 checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
549 checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
532 dependencies = [
550 dependencies = [
533 "cfg-if",
551 "cfg-if",
534 "libc",
552 "libc",
535 "wasi 0.11.0+wasi-snapshot-preview1",
553 "wasi 0.11.0+wasi-snapshot-preview1",
536 ]
554 ]
537
555
538 [[package]]
556 [[package]]
539 name = "hashbrown"
557 name = "hashbrown"
540 version = "0.12.3"
558 version = "0.12.3"
541 source = "registry+https://github.com/rust-lang/crates.io-index"
559 source = "registry+https://github.com/rust-lang/crates.io-index"
542 checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
560 checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
543
561
544 [[package]]
562 [[package]]
545 name = "hashbrown"
563 name = "hashbrown"
546 version = "0.13.1"
564 version = "0.13.1"
547 source = "registry+https://github.com/rust-lang/crates.io-index"
565 source = "registry+https://github.com/rust-lang/crates.io-index"
548 checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
566 checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
549 dependencies = [
567 dependencies = [
550 "ahash",
568 "ahash",
551 "rayon",
569 "rayon",
552 ]
570 ]
553
571
554 [[package]]
572 [[package]]
555 name = "heck"
573 name = "heck"
556 version = "0.4.0"
574 version = "0.4.0"
557 source = "registry+https://github.com/rust-lang/crates.io-index"
575 source = "registry+https://github.com/rust-lang/crates.io-index"
558 checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
576 checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
559
577
560 [[package]]
578 [[package]]
561 name = "hermit-abi"
579 name = "hermit-abi"
562 version = "0.1.19"
580 version = "0.1.19"
563 source = "registry+https://github.com/rust-lang/crates.io-index"
581 source = "registry+https://github.com/rust-lang/crates.io-index"
564 checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
582 checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
565 dependencies = [
583 dependencies = [
566 "libc",
584 "libc",
567 ]
585 ]
568
586
569 [[package]]
587 [[package]]
570 name = "hex"
588 name = "hex"
571 version = "0.4.3"
589 version = "0.4.3"
572 source = "registry+https://github.com/rust-lang/crates.io-index"
590 source = "registry+https://github.com/rust-lang/crates.io-index"
573 checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
591 checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
574
592
575 [[package]]
593 [[package]]
576 name = "hg-core"
594 name = "hg-core"
577 version = "0.1.0"
595 version = "0.1.0"
578 dependencies = [
596 dependencies = [
579 "bitflags 1.3.2",
597 "bitflags 1.3.2",
580 "bitvec",
598 "bitvec",
581 "byteorder",
599 "byteorder",
582 "bytes-cast",
600 "bytes-cast",
583 "chrono",
601 "chrono",
584 "clap",
602 "clap",
585 "crossbeam-channel",
603 "crossbeam-channel",
586 "derive_more",
604 "derive_more",
605 "dyn-clone",
606 "filetime",
587 "flate2",
607 "flate2",
588 "format-bytes",
608 "format-bytes",
589 "hashbrown 0.13.1",
609 "hashbrown 0.13.1",
590 "home",
610 "home",
591 "im-rc",
611 "im-rc",
592 "itertools",
612 "itertools",
593 "lazy_static",
613 "lazy_static",
594 "libc",
614 "libc",
595 "log",
615 "log",
596 "logging_timer",
616 "logging_timer",
597 "memmap2",
617 "memmap2",
598 "once_cell",
618 "once_cell",
599 "pretty_assertions",
619 "pretty_assertions",
600 "rand 0.8.5",
620 "rand 0.8.5",
601 "rand_distr",
621 "rand_distr",
602 "rand_pcg",
622 "rand_pcg",
603 "rayon",
623 "rayon",
604 "regex",
624 "regex",
605 "same-file",
625 "same-file",
606 "self_cell",
626 "self_cell",
607 "serde",
627 "serde",
608 "sha-1 0.10.0",
628 "sha-1 0.10.0",
609 "tempfile",
629 "tempfile",
610 "thread_local",
630 "thread_local",
611 "toml",
631 "toml",
612 "twox-hash",
632 "twox-hash",
613 "zstd",
633 "zstd",
614 ]
634 ]
615
635
616 [[package]]
636 [[package]]
617 name = "hg-cpython"
637 name = "hg-cpython"
618 version = "0.1.0"
638 version = "0.1.0"
619 dependencies = [
639 dependencies = [
620 "cpython",
640 "cpython",
621 "crossbeam-channel",
641 "crossbeam-channel",
622 "env_logger",
642 "env_logger",
623 "hg-core",
643 "hg-core",
624 "libc",
644 "libc",
625 "log",
645 "log",
626 "stable_deref_trait",
646 "stable_deref_trait",
627 "vcsgraph",
647 "vcsgraph",
628 ]
648 ]
629
649
630 [[package]]
650 [[package]]
631 name = "home"
651 name = "home"
632 version = "0.5.4"
652 version = "0.5.4"
633 source = "registry+https://github.com/rust-lang/crates.io-index"
653 source = "registry+https://github.com/rust-lang/crates.io-index"
634 checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
654 checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
635 dependencies = [
655 dependencies = [
636 "winapi",
656 "winapi",
637 ]
657 ]
638
658
639 [[package]]
659 [[package]]
640 name = "humantime"
660 name = "humantime"
641 version = "2.1.0"
661 version = "2.1.0"
642 source = "registry+https://github.com/rust-lang/crates.io-index"
662 source = "registry+https://github.com/rust-lang/crates.io-index"
643 checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
663 checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
644
664
645 [[package]]
665 [[package]]
646 name = "iana-time-zone"
666 name = "iana-time-zone"
647 version = "0.1.53"
667 version = "0.1.53"
648 source = "registry+https://github.com/rust-lang/crates.io-index"
668 source = "registry+https://github.com/rust-lang/crates.io-index"
649 checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
669 checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
650 dependencies = [
670 dependencies = [
651 "android_system_properties",
671 "android_system_properties",
652 "core-foundation-sys",
672 "core-foundation-sys",
653 "iana-time-zone-haiku",
673 "iana-time-zone-haiku",
654 "js-sys",
674 "js-sys",
655 "wasm-bindgen",
675 "wasm-bindgen",
656 "winapi",
676 "winapi",
657 ]
677 ]
658
678
659 [[package]]
679 [[package]]
660 name = "iana-time-zone-haiku"
680 name = "iana-time-zone-haiku"
661 version = "0.1.1"
681 version = "0.1.1"
662 source = "registry+https://github.com/rust-lang/crates.io-index"
682 source = "registry+https://github.com/rust-lang/crates.io-index"
663 checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
683 checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
664 dependencies = [
684 dependencies = [
665 "cxx",
685 "cxx",
666 "cxx-build",
686 "cxx-build",
667 ]
687 ]
668
688
669 [[package]]
689 [[package]]
670 name = "im-rc"
690 name = "im-rc"
671 version = "15.1.0"
691 version = "15.1.0"
672 source = "registry+https://github.com/rust-lang/crates.io-index"
692 source = "registry+https://github.com/rust-lang/crates.io-index"
673 checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
693 checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe"
674 dependencies = [
694 dependencies = [
675 "bitmaps",
695 "bitmaps",
676 "rand_core 0.6.4",
696 "rand_core 0.6.4",
677 "rand_xoshiro",
697 "rand_xoshiro",
678 "sized-chunks",
698 "sized-chunks",
679 "typenum",
699 "typenum",
680 "version_check",
700 "version_check",
681 ]
701 ]
682
702
683 [[package]]
703 [[package]]
684 name = "indexmap"
704 name = "indexmap"
685 version = "1.9.2"
705 version = "1.9.2"
686 source = "registry+https://github.com/rust-lang/crates.io-index"
706 source = "registry+https://github.com/rust-lang/crates.io-index"
687 checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
707 checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
688 dependencies = [
708 dependencies = [
689 "autocfg",
709 "autocfg",
690 "hashbrown 0.12.3",
710 "hashbrown 0.12.3",
691 ]
711 ]
692
712
693 [[package]]
713 [[package]]
694 name = "instant"
714 name = "instant"
695 version = "0.1.12"
715 version = "0.1.12"
696 source = "registry+https://github.com/rust-lang/crates.io-index"
716 source = "registry+https://github.com/rust-lang/crates.io-index"
697 checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
717 checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
698 dependencies = [
718 dependencies = [
699 "cfg-if",
719 "cfg-if",
700 ]
720 ]
701
721
702 [[package]]
722 [[package]]
703 name = "itertools"
723 name = "itertools"
704 version = "0.10.5"
724 version = "0.10.5"
705 source = "registry+https://github.com/rust-lang/crates.io-index"
725 source = "registry+https://github.com/rust-lang/crates.io-index"
706 checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
726 checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
707 dependencies = [
727 dependencies = [
708 "either",
728 "either",
709 ]
729 ]
710
730
711 [[package]]
731 [[package]]
712 name = "jobserver"
732 name = "jobserver"
713 version = "0.1.25"
733 version = "0.1.25"
714 source = "registry+https://github.com/rust-lang/crates.io-index"
734 source = "registry+https://github.com/rust-lang/crates.io-index"
715 checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
735 checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b"
716 dependencies = [
736 dependencies = [
717 "libc",
737 "libc",
718 ]
738 ]
719
739
720 [[package]]
740 [[package]]
721 name = "js-sys"
741 name = "js-sys"
722 version = "0.3.60"
742 version = "0.3.60"
723 source = "registry+https://github.com/rust-lang/crates.io-index"
743 source = "registry+https://github.com/rust-lang/crates.io-index"
724 checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
744 checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
725 dependencies = [
745 dependencies = [
726 "wasm-bindgen",
746 "wasm-bindgen",
727 ]
747 ]
728
748
729 [[package]]
749 [[package]]
730 name = "lazy_static"
750 name = "lazy_static"
731 version = "1.4.0"
751 version = "1.4.0"
732 source = "registry+https://github.com/rust-lang/crates.io-index"
752 source = "registry+https://github.com/rust-lang/crates.io-index"
733 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
753 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
734
754
735 [[package]]
755 [[package]]
736 name = "libc"
756 name = "libc"
737 version = "0.2.155"
757 version = "0.2.155"
738 source = "registry+https://github.com/rust-lang/crates.io-index"
758 source = "registry+https://github.com/rust-lang/crates.io-index"
739 checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
759 checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
740
760
741 [[package]]
761 [[package]]
742 name = "libm"
762 name = "libm"
743 version = "0.2.6"
763 version = "0.2.6"
744 source = "registry+https://github.com/rust-lang/crates.io-index"
764 source = "registry+https://github.com/rust-lang/crates.io-index"
745 checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
765 checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
746
766
747 [[package]]
767 [[package]]
748 name = "libredox"
768 name = "libredox"
749 version = "0.1.3"
769 version = "0.1.3"
750 source = "registry+https://github.com/rust-lang/crates.io-index"
770 source = "registry+https://github.com/rust-lang/crates.io-index"
751 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
771 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
752 dependencies = [
772 dependencies = [
753 "bitflags 2.6.0",
773 "bitflags 2.6.0",
754 "libc",
774 "libc",
775 "redox_syscall 0.5.3",
755 ]
776 ]
756
777
757 [[package]]
778 [[package]]
758 name = "libz-sys"
779 name = "libz-sys"
759 version = "1.1.8"
780 version = "1.1.8"
760 source = "registry+https://github.com/rust-lang/crates.io-index"
781 source = "registry+https://github.com/rust-lang/crates.io-index"
761 checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
782 checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
762 dependencies = [
783 dependencies = [
763 "cc",
784 "cc",
764 "pkg-config",
785 "pkg-config",
765 "vcpkg",
786 "vcpkg",
766 ]
787 ]
767
788
768 [[package]]
789 [[package]]
769 name = "link-cplusplus"
790 name = "link-cplusplus"
770 version = "1.0.7"
791 version = "1.0.7"
771 source = "registry+https://github.com/rust-lang/crates.io-index"
792 source = "registry+https://github.com/rust-lang/crates.io-index"
772 checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
793 checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
773 dependencies = [
794 dependencies = [
774 "cc",
795 "cc",
775 ]
796 ]
776
797
777 [[package]]
798 [[package]]
778 name = "log"
799 name = "log"
779 version = "0.4.17"
800 version = "0.4.17"
780 source = "registry+https://github.com/rust-lang/crates.io-index"
801 source = "registry+https://github.com/rust-lang/crates.io-index"
781 checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
802 checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
782 dependencies = [
803 dependencies = [
783 "cfg-if",
804 "cfg-if",
784 ]
805 ]
785
806
786 [[package]]
807 [[package]]
787 name = "logging_timer"
808 name = "logging_timer"
788 version = "1.1.0"
809 version = "1.1.0"
789 source = "registry+https://github.com/rust-lang/crates.io-index"
810 source = "registry+https://github.com/rust-lang/crates.io-index"
790 checksum = "64e96f261d684b7089aa576bb74e823241dccd994b27d30fabf1dcb3af284fe9"
811 checksum = "64e96f261d684b7089aa576bb74e823241dccd994b27d30fabf1dcb3af284fe9"
791 dependencies = [
812 dependencies = [
792 "log",
813 "log",
793 "logging_timer_proc_macros",
814 "logging_timer_proc_macros",
794 ]
815 ]
795
816
796 [[package]]
817 [[package]]
797 name = "logging_timer_proc_macros"
818 name = "logging_timer_proc_macros"
798 version = "1.1.0"
819 version = "1.1.0"
799 source = "registry+https://github.com/rust-lang/crates.io-index"
820 source = "registry+https://github.com/rust-lang/crates.io-index"
800 checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
821 checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
801 dependencies = [
822 dependencies = [
802 "log",
823 "log",
803 "proc-macro2",
824 "proc-macro2",
804 "quote",
825 "quote",
805 "syn",
826 "syn",
806 ]
827 ]
807
828
808 [[package]]
829 [[package]]
809 name = "memchr"
830 name = "memchr"
810 version = "2.5.0"
831 version = "2.5.0"
811 source = "registry+https://github.com/rust-lang/crates.io-index"
832 source = "registry+https://github.com/rust-lang/crates.io-index"
812 checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
833 checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
813
834
814 [[package]]
835 [[package]]
815 name = "memmap2"
836 name = "memmap2"
816 version = "0.5.8"
837 version = "0.5.8"
817 source = "registry+https://github.com/rust-lang/crates.io-index"
838 source = "registry+https://github.com/rust-lang/crates.io-index"
818 checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
839 checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
819 dependencies = [
840 dependencies = [
820 "libc",
841 "libc",
821 "stable_deref_trait",
842 "stable_deref_trait",
822 ]
843 ]
823
844
824 [[package]]
845 [[package]]
825 name = "memoffset"
846 name = "memoffset"
826 version = "0.6.5"
847 version = "0.6.5"
827 source = "registry+https://github.com/rust-lang/crates.io-index"
848 source = "registry+https://github.com/rust-lang/crates.io-index"
828 checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
849 checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
829 dependencies = [
850 dependencies = [
830 "autocfg",
851 "autocfg",
831 ]
852 ]
832
853
833 [[package]]
854 [[package]]
834 name = "miniz_oxide"
855 name = "miniz_oxide"
835 version = "0.5.4"
856 version = "0.5.4"
836 source = "registry+https://github.com/rust-lang/crates.io-index"
857 source = "registry+https://github.com/rust-lang/crates.io-index"
837 checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
858 checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
838 dependencies = [
859 dependencies = [
839 "adler",
860 "adler",
840 ]
861 ]
841
862
842 [[package]]
863 [[package]]
843 name = "nom8"
864 name = "nom8"
844 version = "0.2.0"
865 version = "0.2.0"
845 source = "registry+https://github.com/rust-lang/crates.io-index"
866 source = "registry+https://github.com/rust-lang/crates.io-index"
846 checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8"
867 checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8"
847 dependencies = [
868 dependencies = [
848 "memchr",
869 "memchr",
849 ]
870 ]
850
871
851 [[package]]
872 [[package]]
852 name = "num-traits"
873 name = "num-traits"
853 version = "0.2.15"
874 version = "0.2.15"
854 source = "registry+https://github.com/rust-lang/crates.io-index"
875 source = "registry+https://github.com/rust-lang/crates.io-index"
855 checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
876 checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
856 dependencies = [
877 dependencies = [
857 "autocfg",
878 "autocfg",
858 "libm",
879 "libm",
859 ]
880 ]
860
881
861 [[package]]
882 [[package]]
862 name = "num_cpus"
883 name = "num_cpus"
863 version = "1.14.0"
884 version = "1.14.0"
864 source = "registry+https://github.com/rust-lang/crates.io-index"
885 source = "registry+https://github.com/rust-lang/crates.io-index"
865 checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
886 checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
866 dependencies = [
887 dependencies = [
867 "hermit-abi",
888 "hermit-abi",
868 "libc",
889 "libc",
869 ]
890 ]
870
891
871 [[package]]
892 [[package]]
872 name = "once_cell"
893 name = "once_cell"
873 version = "1.16.0"
894 version = "1.16.0"
874 source = "registry+https://github.com/rust-lang/crates.io-index"
895 source = "registry+https://github.com/rust-lang/crates.io-index"
875 checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
896 checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
876
897
877 [[package]]
898 [[package]]
878 name = "opaque-debug"
899 name = "opaque-debug"
879 version = "0.3.0"
900 version = "0.3.0"
880 source = "registry+https://github.com/rust-lang/crates.io-index"
901 source = "registry+https://github.com/rust-lang/crates.io-index"
881 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
902 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
882
903
883 [[package]]
904 [[package]]
884 name = "option-ext"
905 name = "option-ext"
885 version = "0.2.0"
906 version = "0.2.0"
886 source = "registry+https://github.com/rust-lang/crates.io-index"
907 source = "registry+https://github.com/rust-lang/crates.io-index"
887 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
908 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
888
909
889 [[package]]
910 [[package]]
890 name = "os_str_bytes"
911 name = "os_str_bytes"
891 version = "6.4.0"
912 version = "6.4.0"
892 source = "registry+https://github.com/rust-lang/crates.io-index"
913 source = "registry+https://github.com/rust-lang/crates.io-index"
893 checksum = "7b5bf27447411e9ee3ff51186bf7a08e16c341efdde93f4d823e8844429bed7e"
914 checksum = "7b5bf27447411e9ee3ff51186bf7a08e16c341efdde93f4d823e8844429bed7e"
894 dependencies = [
915 dependencies = [
895 "memchr",
916 "memchr",
896 ]
917 ]
897
918
898 [[package]]
919 [[package]]
899 name = "output_vt100"
920 name = "output_vt100"
900 version = "0.1.3"
921 version = "0.1.3"
901 source = "registry+https://github.com/rust-lang/crates.io-index"
922 source = "registry+https://github.com/rust-lang/crates.io-index"
902 checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
923 checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
903 dependencies = [
924 dependencies = [
904 "winapi",
925 "winapi",
905 ]
926 ]
906
927
907 [[package]]
928 [[package]]
908 name = "paste"
929 name = "paste"
909 version = "1.0.9"
930 version = "1.0.9"
910 source = "registry+https://github.com/rust-lang/crates.io-index"
931 source = "registry+https://github.com/rust-lang/crates.io-index"
911 checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
932 checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
912
933
913 [[package]]
934 [[package]]
914 name = "pkg-config"
935 name = "pkg-config"
915 version = "0.3.26"
936 version = "0.3.26"
916 source = "registry+https://github.com/rust-lang/crates.io-index"
937 source = "registry+https://github.com/rust-lang/crates.io-index"
917 checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
938 checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
918
939
919 [[package]]
940 [[package]]
920 name = "ppv-lite86"
941 name = "ppv-lite86"
921 version = "0.2.17"
942 version = "0.2.17"
922 source = "registry+https://github.com/rust-lang/crates.io-index"
943 source = "registry+https://github.com/rust-lang/crates.io-index"
923 checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
944 checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
924
945
925 [[package]]
946 [[package]]
926 name = "pretty_assertions"
947 name = "pretty_assertions"
927 version = "1.3.0"
948 version = "1.3.0"
928 source = "registry+https://github.com/rust-lang/crates.io-index"
949 source = "registry+https://github.com/rust-lang/crates.io-index"
929 checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
950 checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
930 dependencies = [
951 dependencies = [
931 "ctor",
952 "ctor",
932 "diff",
953 "diff",
933 "output_vt100",
954 "output_vt100",
934 "yansi",
955 "yansi",
935 ]
956 ]
936
957
937 [[package]]
958 [[package]]
938 name = "proc-macro-error"
959 name = "proc-macro-error"
939 version = "1.0.4"
960 version = "1.0.4"
940 source = "registry+https://github.com/rust-lang/crates.io-index"
961 source = "registry+https://github.com/rust-lang/crates.io-index"
941 checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
962 checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
942 dependencies = [
963 dependencies = [
943 "proc-macro-error-attr",
964 "proc-macro-error-attr",
944 "proc-macro2",
965 "proc-macro2",
945 "quote",
966 "quote",
946 "syn",
967 "syn",
947 "version_check",
968 "version_check",
948 ]
969 ]
949
970
950 [[package]]
971 [[package]]
951 name = "proc-macro-error-attr"
972 name = "proc-macro-error-attr"
952 version = "1.0.4"
973 version = "1.0.4"
953 source = "registry+https://github.com/rust-lang/crates.io-index"
974 source = "registry+https://github.com/rust-lang/crates.io-index"
954 checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
975 checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
955 dependencies = [
976 dependencies = [
956 "proc-macro2",
977 "proc-macro2",
957 "quote",
978 "quote",
958 "version_check",
979 "version_check",
959 ]
980 ]
960
981
961 [[package]]
982 [[package]]
962 name = "proc-macro2"
983 name = "proc-macro2"
963 version = "1.0.47"
984 version = "1.0.47"
964 source = "registry+https://github.com/rust-lang/crates.io-index"
985 source = "registry+https://github.com/rust-lang/crates.io-index"
965 checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
986 checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
966 dependencies = [
987 dependencies = [
967 "unicode-ident",
988 "unicode-ident",
968 ]
989 ]
969
990
970 [[package]]
991 [[package]]
971 name = "python3-sys"
992 name = "python3-sys"
972 version = "0.7.2"
993 version = "0.7.2"
973 source = "registry+https://github.com/rust-lang/crates.io-index"
994 source = "registry+https://github.com/rust-lang/crates.io-index"
974 checksum = "0f53ef6740367a09718d2cd21ba15b0d7972342a38e554736bcee7773e45c9f5"
995 checksum = "0f53ef6740367a09718d2cd21ba15b0d7972342a38e554736bcee7773e45c9f5"
975 dependencies = [
996 dependencies = [
976 "libc",
997 "libc",
977 "regex",
998 "regex",
978 ]
999 ]
979
1000
980 [[package]]
1001 [[package]]
981 name = "quote"
1002 name = "quote"
982 version = "1.0.21"
1003 version = "1.0.21"
983 source = "registry+https://github.com/rust-lang/crates.io-index"
1004 source = "registry+https://github.com/rust-lang/crates.io-index"
984 checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
1005 checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
985 dependencies = [
1006 dependencies = [
986 "proc-macro2",
1007 "proc-macro2",
987 ]
1008 ]
988
1009
989 [[package]]
1010 [[package]]
990 name = "radium"
1011 name = "radium"
991 version = "0.7.0"
1012 version = "0.7.0"
992 source = "registry+https://github.com/rust-lang/crates.io-index"
1013 source = "registry+https://github.com/rust-lang/crates.io-index"
993 checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
1014 checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
994
1015
995 [[package]]
1016 [[package]]
996 name = "rand"
1017 name = "rand"
997 version = "0.7.3"
1018 version = "0.7.3"
998 source = "registry+https://github.com/rust-lang/crates.io-index"
1019 source = "registry+https://github.com/rust-lang/crates.io-index"
999 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
1020 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
1000 dependencies = [
1021 dependencies = [
1001 "getrandom 0.1.16",
1022 "getrandom 0.1.16",
1002 "libc",
1023 "libc",
1003 "rand_chacha 0.2.2",
1024 "rand_chacha 0.2.2",
1004 "rand_core 0.5.1",
1025 "rand_core 0.5.1",
1005 "rand_hc",
1026 "rand_hc",
1006 ]
1027 ]
1007
1028
1008 [[package]]
1029 [[package]]
1009 name = "rand"
1030 name = "rand"
1010 version = "0.8.5"
1031 version = "0.8.5"
1011 source = "registry+https://github.com/rust-lang/crates.io-index"
1032 source = "registry+https://github.com/rust-lang/crates.io-index"
1012 checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
1033 checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
1013 dependencies = [
1034 dependencies = [
1014 "libc",
1035 "libc",
1015 "rand_chacha 0.3.1",
1036 "rand_chacha 0.3.1",
1016 "rand_core 0.6.4",
1037 "rand_core 0.6.4",
1017 ]
1038 ]
1018
1039
1019 [[package]]
1040 [[package]]
1020 name = "rand_chacha"
1041 name = "rand_chacha"
1021 version = "0.2.2"
1042 version = "0.2.2"
1022 source = "registry+https://github.com/rust-lang/crates.io-index"
1043 source = "registry+https://github.com/rust-lang/crates.io-index"
1023 checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
1044 checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
1024 dependencies = [
1045 dependencies = [
1025 "ppv-lite86",
1046 "ppv-lite86",
1026 "rand_core 0.5.1",
1047 "rand_core 0.5.1",
1027 ]
1048 ]
1028
1049
1029 [[package]]
1050 [[package]]
1030 name = "rand_chacha"
1051 name = "rand_chacha"
1031 version = "0.3.1"
1052 version = "0.3.1"
1032 source = "registry+https://github.com/rust-lang/crates.io-index"
1053 source = "registry+https://github.com/rust-lang/crates.io-index"
1033 checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
1054 checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
1034 dependencies = [
1055 dependencies = [
1035 "ppv-lite86",
1056 "ppv-lite86",
1036 "rand_core 0.6.4",
1057 "rand_core 0.6.4",
1037 ]
1058 ]
1038
1059
1039 [[package]]
1060 [[package]]
1040 name = "rand_core"
1061 name = "rand_core"
1041 version = "0.5.1"
1062 version = "0.5.1"
1042 source = "registry+https://github.com/rust-lang/crates.io-index"
1063 source = "registry+https://github.com/rust-lang/crates.io-index"
1043 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
1064 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
1044 dependencies = [
1065 dependencies = [
1045 "getrandom 0.1.16",
1066 "getrandom 0.1.16",
1046 ]
1067 ]
1047
1068
1048 [[package]]
1069 [[package]]
1049 name = "rand_core"
1070 name = "rand_core"
1050 version = "0.6.4"
1071 version = "0.6.4"
1051 source = "registry+https://github.com/rust-lang/crates.io-index"
1072 source = "registry+https://github.com/rust-lang/crates.io-index"
1052 checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
1073 checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
1053 dependencies = [
1074 dependencies = [
1054 "getrandom 0.2.8",
1075 "getrandom 0.2.8",
1055 ]
1076 ]
1056
1077
1057 [[package]]
1078 [[package]]
1058 name = "rand_distr"
1079 name = "rand_distr"
1059 version = "0.4.3"
1080 version = "0.4.3"
1060 source = "registry+https://github.com/rust-lang/crates.io-index"
1081 source = "registry+https://github.com/rust-lang/crates.io-index"
1061 checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31"
1082 checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31"
1062 dependencies = [
1083 dependencies = [
1063 "num-traits",
1084 "num-traits",
1064 "rand 0.8.5",
1085 "rand 0.8.5",
1065 ]
1086 ]
1066
1087
1067 [[package]]
1088 [[package]]
1068 name = "rand_hc"
1089 name = "rand_hc"
1069 version = "0.2.0"
1090 version = "0.2.0"
1070 source = "registry+https://github.com/rust-lang/crates.io-index"
1091 source = "registry+https://github.com/rust-lang/crates.io-index"
1071 checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
1092 checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
1072 dependencies = [
1093 dependencies = [
1073 "rand_core 0.5.1",
1094 "rand_core 0.5.1",
1074 ]
1095 ]
1075
1096
1076 [[package]]
1097 [[package]]
1077 name = "rand_pcg"
1098 name = "rand_pcg"
1078 version = "0.3.1"
1099 version = "0.3.1"
1079 source = "registry+https://github.com/rust-lang/crates.io-index"
1100 source = "registry+https://github.com/rust-lang/crates.io-index"
1080 checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e"
1101 checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e"
1081 dependencies = [
1102 dependencies = [
1082 "rand_core 0.6.4",
1103 "rand_core 0.6.4",
1083 ]
1104 ]
1084
1105
1085 [[package]]
1106 [[package]]
1086 name = "rand_xoshiro"
1107 name = "rand_xoshiro"
1087 version = "0.6.0"
1108 version = "0.6.0"
1088 source = "registry+https://github.com/rust-lang/crates.io-index"
1109 source = "registry+https://github.com/rust-lang/crates.io-index"
1089 checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
1110 checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
1090 dependencies = [
1111 dependencies = [
1091 "rand_core 0.6.4",
1112 "rand_core 0.6.4",
1092 ]
1113 ]
1093
1114
1094 [[package]]
1115 [[package]]
1095 name = "rayon"
1116 name = "rayon"
1096 version = "1.7.0"
1117 version = "1.7.0"
1097 source = "registry+https://github.com/rust-lang/crates.io-index"
1118 source = "registry+https://github.com/rust-lang/crates.io-index"
1098 checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
1119 checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
1099 dependencies = [
1120 dependencies = [
1100 "either",
1121 "either",
1101 "rayon-core",
1122 "rayon-core",
1102 ]
1123 ]
1103
1124
1104 [[package]]
1125 [[package]]
1105 name = "rayon-core"
1126 name = "rayon-core"
1106 version = "1.11.0"
1127 version = "1.11.0"
1107 source = "registry+https://github.com/rust-lang/crates.io-index"
1128 source = "registry+https://github.com/rust-lang/crates.io-index"
1108 checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
1129 checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
1109 dependencies = [
1130 dependencies = [
1110 "crossbeam-channel",
1131 "crossbeam-channel",
1111 "crossbeam-deque",
1132 "crossbeam-deque",
1112 "crossbeam-utils",
1133 "crossbeam-utils",
1113 "num_cpus",
1134 "num_cpus",
1114 ]
1135 ]
1115
1136
1116 [[package]]
1137 [[package]]
1117 name = "redox_syscall"
1138 name = "redox_syscall"
1118 version = "0.2.16"
1139 version = "0.2.16"
1119 source = "registry+https://github.com/rust-lang/crates.io-index"
1140 source = "registry+https://github.com/rust-lang/crates.io-index"
1120 checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
1141 checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
1121 dependencies = [
1142 dependencies = [
1122 "bitflags 1.3.2",
1143 "bitflags 1.3.2",
1123 ]
1144 ]
1124
1145
1125 [[package]]
1146 [[package]]
1147 name = "redox_syscall"
1148 version = "0.5.3"
1149 source = "registry+https://github.com/rust-lang/crates.io-index"
1150 checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
1151 dependencies = [
1152 "bitflags 2.6.0",
1153 ]
1154
1155 [[package]]
1126 name = "redox_users"
1156 name = "redox_users"
1127 version = "0.4.5"
1157 version = "0.4.5"
1128 source = "registry+https://github.com/rust-lang/crates.io-index"
1158 source = "registry+https://github.com/rust-lang/crates.io-index"
1129 checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
1159 checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
1130 dependencies = [
1160 dependencies = [
1131 "getrandom 0.2.8",
1161 "getrandom 0.2.8",
1132 "libredox",
1162 "libredox",
1133 "thiserror",
1163 "thiserror",
1134 ]
1164 ]
1135
1165
1136 [[package]]
1166 [[package]]
1137 name = "regex"
1167 name = "regex"
1138 version = "1.7.0"
1168 version = "1.7.0"
1139 source = "registry+https://github.com/rust-lang/crates.io-index"
1169 source = "registry+https://github.com/rust-lang/crates.io-index"
1140 checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
1170 checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
1141 dependencies = [
1171 dependencies = [
1142 "aho-corasick",
1172 "aho-corasick",
1143 "memchr",
1173 "memchr",
1144 "regex-syntax",
1174 "regex-syntax",
1145 ]
1175 ]
1146
1176
1147 [[package]]
1177 [[package]]
1148 name = "regex-automata"
1178 name = "regex-automata"
1149 version = "0.3.9"
1179 version = "0.3.9"
1150 source = "registry+https://github.com/rust-lang/crates.io-index"
1180 source = "registry+https://github.com/rust-lang/crates.io-index"
1151 checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
1181 checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
1152
1182
1153 [[package]]
1183 [[package]]
1154 name = "regex-syntax"
1184 name = "regex-syntax"
1155 version = "0.6.28"
1185 version = "0.6.28"
1156 source = "registry+https://github.com/rust-lang/crates.io-index"
1186 source = "registry+https://github.com/rust-lang/crates.io-index"
1157 checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
1187 checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
1158
1188
1159 [[package]]
1189 [[package]]
1160 name = "remove_dir_all"
1190 name = "remove_dir_all"
1161 version = "0.5.3"
1191 version = "0.5.3"
1162 source = "registry+https://github.com/rust-lang/crates.io-index"
1192 source = "registry+https://github.com/rust-lang/crates.io-index"
1163 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
1193 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
1164 dependencies = [
1194 dependencies = [
1165 "winapi",
1195 "winapi",
1166 ]
1196 ]
1167
1197
1168 [[package]]
1198 [[package]]
1169 name = "rhg"
1199 name = "rhg"
1170 version = "0.1.0"
1200 version = "0.1.0"
1171 dependencies = [
1201 dependencies = [
1172 "atty",
1202 "atty",
1173 "chrono",
1203 "chrono",
1174 "clap",
1204 "clap",
1175 "derive_more",
1205 "derive_more",
1176 "env_logger",
1206 "env_logger",
1177 "format-bytes",
1207 "format-bytes",
1178 "hg-core",
1208 "hg-core",
1179 "home",
1209 "home",
1180 "lazy_static",
1210 "lazy_static",
1181 "libc",
1211 "libc",
1182 "log",
1212 "log",
1183 "logging_timer",
1213 "logging_timer",
1184 "rayon",
1214 "rayon",
1185 "regex",
1215 "regex",
1186 "shellexpand",
1216 "shellexpand",
1187 "which",
1217 "which",
1188 "whoami",
1218 "whoami",
1189 ]
1219 ]
1190
1220
1191 [[package]]
1221 [[package]]
1192 name = "rustc_version"
1222 name = "rustc_version"
1193 version = "0.4.0"
1223 version = "0.4.0"
1194 source = "registry+https://github.com/rust-lang/crates.io-index"
1224 source = "registry+https://github.com/rust-lang/crates.io-index"
1195 checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
1225 checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
1196 dependencies = [
1226 dependencies = [
1197 "semver",
1227 "semver",
1198 ]
1228 ]
1199
1229
1200 [[package]]
1230 [[package]]
1201 name = "same-file"
1231 name = "same-file"
1202 version = "1.0.6"
1232 version = "1.0.6"
1203 source = "registry+https://github.com/rust-lang/crates.io-index"
1233 source = "registry+https://github.com/rust-lang/crates.io-index"
1204 checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
1234 checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
1205 dependencies = [
1235 dependencies = [
1206 "winapi-util",
1236 "winapi-util",
1207 ]
1237 ]
1208
1238
1209 [[package]]
1239 [[package]]
1210 name = "scopeguard"
1240 name = "scopeguard"
1211 version = "1.1.0"
1241 version = "1.1.0"
1212 source = "registry+https://github.com/rust-lang/crates.io-index"
1242 source = "registry+https://github.com/rust-lang/crates.io-index"
1213 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
1243 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
1214
1244
1215 [[package]]
1245 [[package]]
1216 name = "scratch"
1246 name = "scratch"
1217 version = "1.0.2"
1247 version = "1.0.2"
1218 source = "registry+https://github.com/rust-lang/crates.io-index"
1248 source = "registry+https://github.com/rust-lang/crates.io-index"
1219 checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
1249 checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
1220
1250
1221 [[package]]
1251 [[package]]
1222 name = "self_cell"
1252 name = "self_cell"
1223 version = "1.0.0"
1253 version = "1.0.0"
1224 source = "registry+https://github.com/rust-lang/crates.io-index"
1254 source = "registry+https://github.com/rust-lang/crates.io-index"
1225 checksum = "4a3926e239738d36060909ffe6f511502f92149a45a1fade7fe031cb2d33e88b"
1255 checksum = "4a3926e239738d36060909ffe6f511502f92149a45a1fade7fe031cb2d33e88b"
1226
1256
1227 [[package]]
1257 [[package]]
1228 name = "semver"
1258 name = "semver"
1229 version = "1.0.14"
1259 version = "1.0.14"
1230 source = "registry+https://github.com/rust-lang/crates.io-index"
1260 source = "registry+https://github.com/rust-lang/crates.io-index"
1231 checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
1261 checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
1232
1262
1233 [[package]]
1263 [[package]]
1234 name = "serde"
1264 name = "serde"
1235 version = "1.0.152"
1265 version = "1.0.152"
1236 source = "registry+https://github.com/rust-lang/crates.io-index"
1266 source = "registry+https://github.com/rust-lang/crates.io-index"
1237 checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
1267 checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
1238 dependencies = [
1268 dependencies = [
1239 "serde_derive",
1269 "serde_derive",
1240 ]
1270 ]
1241
1271
1242 [[package]]
1272 [[package]]
1243 name = "serde_derive"
1273 name = "serde_derive"
1244 version = "1.0.152"
1274 version = "1.0.152"
1245 source = "registry+https://github.com/rust-lang/crates.io-index"
1275 source = "registry+https://github.com/rust-lang/crates.io-index"
1246 checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
1276 checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
1247 dependencies = [
1277 dependencies = [
1248 "proc-macro2",
1278 "proc-macro2",
1249 "quote",
1279 "quote",
1250 "syn",
1280 "syn",
1251 ]
1281 ]
1252
1282
1253 [[package]]
1283 [[package]]
1254 name = "serde_spanned"
1284 name = "serde_spanned"
1255 version = "0.6.1"
1285 version = "0.6.1"
1256 source = "registry+https://github.com/rust-lang/crates.io-index"
1286 source = "registry+https://github.com/rust-lang/crates.io-index"
1257 checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
1287 checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
1258 dependencies = [
1288 dependencies = [
1259 "serde",
1289 "serde",
1260 ]
1290 ]
1261
1291
1262 [[package]]
1292 [[package]]
1263 name = "sha-1"
1293 name = "sha-1"
1264 version = "0.9.8"
1294 version = "0.9.8"
1265 source = "registry+https://github.com/rust-lang/crates.io-index"
1295 source = "registry+https://github.com/rust-lang/crates.io-index"
1266 checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
1296 checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
1267 dependencies = [
1297 dependencies = [
1268 "block-buffer 0.9.0",
1298 "block-buffer 0.9.0",
1269 "cfg-if",
1299 "cfg-if",
1270 "cpufeatures",
1300 "cpufeatures",
1271 "digest 0.9.0",
1301 "digest 0.9.0",
1272 "opaque-debug",
1302 "opaque-debug",
1273 ]
1303 ]
1274
1304
1275 [[package]]
1305 [[package]]
1276 name = "sha-1"
1306 name = "sha-1"
1277 version = "0.10.0"
1307 version = "0.10.0"
1278 source = "registry+https://github.com/rust-lang/crates.io-index"
1308 source = "registry+https://github.com/rust-lang/crates.io-index"
1279 checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
1309 checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
1280 dependencies = [
1310 dependencies = [
1281 "cfg-if",
1311 "cfg-if",
1282 "cpufeatures",
1312 "cpufeatures",
1283 "digest 0.10.5",
1313 "digest 0.10.5",
1284 ]
1314 ]
1285
1315
1286 [[package]]
1316 [[package]]
1287 name = "shellexpand"
1317 name = "shellexpand"
1288 version = "3.1.0"
1318 version = "3.1.0"
1289 source = "registry+https://github.com/rust-lang/crates.io-index"
1319 source = "registry+https://github.com/rust-lang/crates.io-index"
1290 checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
1320 checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b"
1291 dependencies = [
1321 dependencies = [
1292 "bstr",
1322 "bstr",
1293 "dirs",
1323 "dirs",
1294 "os_str_bytes",
1324 "os_str_bytes",
1295 ]
1325 ]
1296
1326
1297 [[package]]
1327 [[package]]
1298 name = "sized-chunks"
1328 name = "sized-chunks"
1299 version = "0.6.5"
1329 version = "0.6.5"
1300 source = "registry+https://github.com/rust-lang/crates.io-index"
1330 source = "registry+https://github.com/rust-lang/crates.io-index"
1301 checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
1331 checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
1302 dependencies = [
1332 dependencies = [
1303 "bitmaps",
1333 "bitmaps",
1304 "typenum",
1334 "typenum",
1305 ]
1335 ]
1306
1336
1307 [[package]]
1337 [[package]]
1308 name = "stable_deref_trait"
1338 name = "stable_deref_trait"
1309 version = "1.2.0"
1339 version = "1.2.0"
1310 source = "registry+https://github.com/rust-lang/crates.io-index"
1340 source = "registry+https://github.com/rust-lang/crates.io-index"
1311 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
1341 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
1312
1342
1313 [[package]]
1343 [[package]]
1314 name = "static_assertions"
1344 name = "static_assertions"
1315 version = "1.1.0"
1345 version = "1.1.0"
1316 source = "registry+https://github.com/rust-lang/crates.io-index"
1346 source = "registry+https://github.com/rust-lang/crates.io-index"
1317 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
1347 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
1318
1348
1319 [[package]]
1349 [[package]]
1320 name = "strsim"
1350 name = "strsim"
1321 version = "0.10.0"
1351 version = "0.10.0"
1322 source = "registry+https://github.com/rust-lang/crates.io-index"
1352 source = "registry+https://github.com/rust-lang/crates.io-index"
1323 checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
1353 checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
1324
1354
1325 [[package]]
1355 [[package]]
1326 name = "syn"
1356 name = "syn"
1327 version = "1.0.109"
1357 version = "1.0.109"
1328 source = "registry+https://github.com/rust-lang/crates.io-index"
1358 source = "registry+https://github.com/rust-lang/crates.io-index"
1329 checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
1359 checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
1330 dependencies = [
1360 dependencies = [
1331 "proc-macro2",
1361 "proc-macro2",
1332 "quote",
1362 "quote",
1333 "unicode-ident",
1363 "unicode-ident",
1334 ]
1364 ]
1335
1365
1336 [[package]]
1366 [[package]]
1337 name = "tap"
1367 name = "tap"
1338 version = "1.0.1"
1368 version = "1.0.1"
1339 source = "registry+https://github.com/rust-lang/crates.io-index"
1369 source = "registry+https://github.com/rust-lang/crates.io-index"
1340 checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
1370 checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
1341
1371
1342 [[package]]
1372 [[package]]
1343 name = "tempfile"
1373 name = "tempfile"
1344 version = "3.3.0"
1374 version = "3.3.0"
1345 source = "registry+https://github.com/rust-lang/crates.io-index"
1375 source = "registry+https://github.com/rust-lang/crates.io-index"
1346 checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
1376 checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
1347 dependencies = [
1377 dependencies = [
1348 "cfg-if",
1378 "cfg-if",
1349 "fastrand",
1379 "fastrand",
1350 "libc",
1380 "libc",
1351 "redox_syscall",
1381 "redox_syscall 0.2.16",
1352 "remove_dir_all",
1382 "remove_dir_all",
1353 "winapi",
1383 "winapi",
1354 ]
1384 ]
1355
1385
1356 [[package]]
1386 [[package]]
1357 name = "termcolor"
1387 name = "termcolor"
1358 version = "1.1.3"
1388 version = "1.1.3"
1359 source = "registry+https://github.com/rust-lang/crates.io-index"
1389 source = "registry+https://github.com/rust-lang/crates.io-index"
1360 checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
1390 checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
1361 dependencies = [
1391 dependencies = [
1362 "winapi-util",
1392 "winapi-util",
1363 ]
1393 ]
1364
1394
1365 [[package]]
1395 [[package]]
1366 name = "thiserror"
1396 name = "thiserror"
1367 version = "1.0.39"
1397 version = "1.0.39"
1368 source = "registry+https://github.com/rust-lang/crates.io-index"
1398 source = "registry+https://github.com/rust-lang/crates.io-index"
1369 checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c"
1399 checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c"
1370 dependencies = [
1400 dependencies = [
1371 "thiserror-impl",
1401 "thiserror-impl",
1372 ]
1402 ]
1373
1403
1374 [[package]]
1404 [[package]]
1375 name = "thiserror-impl"
1405 name = "thiserror-impl"
1376 version = "1.0.39"
1406 version = "1.0.39"
1377 source = "registry+https://github.com/rust-lang/crates.io-index"
1407 source = "registry+https://github.com/rust-lang/crates.io-index"
1378 checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
1408 checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
1379 dependencies = [
1409 dependencies = [
1380 "proc-macro2",
1410 "proc-macro2",
1381 "quote",
1411 "quote",
1382 "syn",
1412 "syn",
1383 ]
1413 ]
1384
1414
1385 [[package]]
1415 [[package]]
1386 name = "thread_local"
1416 name = "thread_local"
1387 version = "1.1.4"
1417 version = "1.1.4"
1388 source = "registry+https://github.com/rust-lang/crates.io-index"
1418 source = "registry+https://github.com/rust-lang/crates.io-index"
1389 checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
1419 checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
1390 dependencies = [
1420 dependencies = [
1391 "once_cell",
1421 "once_cell",
1392 ]
1422 ]
1393
1423
1394 [[package]]
1424 [[package]]
1395 name = "toml"
1425 name = "toml"
1396 version = "0.6.0"
1426 version = "0.6.0"
1397 source = "registry+https://github.com/rust-lang/crates.io-index"
1427 source = "registry+https://github.com/rust-lang/crates.io-index"
1398 checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
1428 checksum = "4fb9d890e4dc9298b70f740f615f2e05b9db37dce531f6b24fb77ac993f9f217"
1399 dependencies = [
1429 dependencies = [
1400 "serde",
1430 "serde",
1401 "serde_spanned",
1431 "serde_spanned",
1402 "toml_datetime",
1432 "toml_datetime",
1403 "toml_edit",
1433 "toml_edit",
1404 ]
1434 ]
1405
1435
1406 [[package]]
1436 [[package]]
1407 name = "toml_datetime"
1437 name = "toml_datetime"
1408 version = "0.5.1"
1438 version = "0.5.1"
1409 source = "registry+https://github.com/rust-lang/crates.io-index"
1439 source = "registry+https://github.com/rust-lang/crates.io-index"
1410 checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
1440 checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5"
1411 dependencies = [
1441 dependencies = [
1412 "serde",
1442 "serde",
1413 ]
1443 ]
1414
1444
1415 [[package]]
1445 [[package]]
1416 name = "toml_edit"
1446 name = "toml_edit"
1417 version = "0.18.1"
1447 version = "0.18.1"
1418 source = "registry+https://github.com/rust-lang/crates.io-index"
1448 source = "registry+https://github.com/rust-lang/crates.io-index"
1419 checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b"
1449 checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b"
1420 dependencies = [
1450 dependencies = [
1421 "indexmap",
1451 "indexmap",
1422 "nom8",
1452 "nom8",
1423 "serde",
1453 "serde",
1424 "serde_spanned",
1454 "serde_spanned",
1425 "toml_datetime",
1455 "toml_datetime",
1426 ]
1456 ]
1427
1457
1428 [[package]]
1458 [[package]]
1429 name = "twox-hash"
1459 name = "twox-hash"
1430 version = "1.6.3"
1460 version = "1.6.3"
1431 source = "registry+https://github.com/rust-lang/crates.io-index"
1461 source = "registry+https://github.com/rust-lang/crates.io-index"
1432 checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
1462 checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
1433 dependencies = [
1463 dependencies = [
1434 "cfg-if",
1464 "cfg-if",
1435 "rand 0.8.5",
1465 "rand 0.8.5",
1436 "static_assertions",
1466 "static_assertions",
1437 ]
1467 ]
1438
1468
1439 [[package]]
1469 [[package]]
1440 name = "typenum"
1470 name = "typenum"
1441 version = "1.15.0"
1471 version = "1.15.0"
1442 source = "registry+https://github.com/rust-lang/crates.io-index"
1472 source = "registry+https://github.com/rust-lang/crates.io-index"
1443 checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
1473 checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
1444
1474
1445 [[package]]
1475 [[package]]
1446 name = "unicode-ident"
1476 name = "unicode-ident"
1447 version = "1.0.5"
1477 version = "1.0.5"
1448 source = "registry+https://github.com/rust-lang/crates.io-index"
1478 source = "registry+https://github.com/rust-lang/crates.io-index"
1449 checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
1479 checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
1450
1480
1451 [[package]]
1481 [[package]]
1452 name = "unicode-width"
1482 name = "unicode-width"
1453 version = "0.1.10"
1483 version = "0.1.10"
1454 source = "registry+https://github.com/rust-lang/crates.io-index"
1484 source = "registry+https://github.com/rust-lang/crates.io-index"
1455 checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
1485 checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
1456
1486
1457 [[package]]
1487 [[package]]
1458 name = "vcpkg"
1488 name = "vcpkg"
1459 version = "0.2.15"
1489 version = "0.2.15"
1460 source = "registry+https://github.com/rust-lang/crates.io-index"
1490 source = "registry+https://github.com/rust-lang/crates.io-index"
1461 checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
1491 checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
1462
1492
1463 [[package]]
1493 [[package]]
1464 name = "vcsgraph"
1494 name = "vcsgraph"
1465 version = "0.2.0"
1495 version = "0.2.0"
1466 source = "registry+https://github.com/rust-lang/crates.io-index"
1496 source = "registry+https://github.com/rust-lang/crates.io-index"
1467 checksum = "4cb68c231e2575f7503a7c19213875f9d4ec2e84e963a56ce3de4b6bee351ef7"
1497 checksum = "4cb68c231e2575f7503a7c19213875f9d4ec2e84e963a56ce3de4b6bee351ef7"
1468 dependencies = [
1498 dependencies = [
1469 "hex",
1499 "hex",
1470 "rand 0.7.3",
1500 "rand 0.7.3",
1471 "sha-1 0.9.8",
1501 "sha-1 0.9.8",
1472 ]
1502 ]
1473
1503
1474 [[package]]
1504 [[package]]
1475 name = "version_check"
1505 name = "version_check"
1476 version = "0.9.4"
1506 version = "0.9.4"
1477 source = "registry+https://github.com/rust-lang/crates.io-index"
1507 source = "registry+https://github.com/rust-lang/crates.io-index"
1478 checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
1508 checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
1479
1509
1480 [[package]]
1510 [[package]]
1481 name = "wasi"
1511 name = "wasi"
1482 version = "0.9.0+wasi-snapshot-preview1"
1512 version = "0.9.0+wasi-snapshot-preview1"
1483 source = "registry+https://github.com/rust-lang/crates.io-index"
1513 source = "registry+https://github.com/rust-lang/crates.io-index"
1484 checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
1514 checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
1485
1515
1486 [[package]]
1516 [[package]]
1487 name = "wasi"
1517 name = "wasi"
1488 version = "0.11.0+wasi-snapshot-preview1"
1518 version = "0.11.0+wasi-snapshot-preview1"
1489 source = "registry+https://github.com/rust-lang/crates.io-index"
1519 source = "registry+https://github.com/rust-lang/crates.io-index"
1490 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
1520 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
1491
1521
1492 [[package]]
1522 [[package]]
1493 name = "wasm-bindgen"
1523 name = "wasm-bindgen"
1494 version = "0.2.83"
1524 version = "0.2.83"
1495 source = "registry+https://github.com/rust-lang/crates.io-index"
1525 source = "registry+https://github.com/rust-lang/crates.io-index"
1496 checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
1526 checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
1497 dependencies = [
1527 dependencies = [
1498 "cfg-if",
1528 "cfg-if",
1499 "wasm-bindgen-macro",
1529 "wasm-bindgen-macro",
1500 ]
1530 ]
1501
1531
1502 [[package]]
1532 [[package]]
1503 name = "wasm-bindgen-backend"
1533 name = "wasm-bindgen-backend"
1504 version = "0.2.83"
1534 version = "0.2.83"
1505 source = "registry+https://github.com/rust-lang/crates.io-index"
1535 source = "registry+https://github.com/rust-lang/crates.io-index"
1506 checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
1536 checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
1507 dependencies = [
1537 dependencies = [
1508 "bumpalo",
1538 "bumpalo",
1509 "log",
1539 "log",
1510 "once_cell",
1540 "once_cell",
1511 "proc-macro2",
1541 "proc-macro2",
1512 "quote",
1542 "quote",
1513 "syn",
1543 "syn",
1514 "wasm-bindgen-shared",
1544 "wasm-bindgen-shared",
1515 ]
1545 ]
1516
1546
1517 [[package]]
1547 [[package]]
1518 name = "wasm-bindgen-macro"
1548 name = "wasm-bindgen-macro"
1519 version = "0.2.83"
1549 version = "0.2.83"
1520 source = "registry+https://github.com/rust-lang/crates.io-index"
1550 source = "registry+https://github.com/rust-lang/crates.io-index"
1521 checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
1551 checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
1522 dependencies = [
1552 dependencies = [
1523 "quote",
1553 "quote",
1524 "wasm-bindgen-macro-support",
1554 "wasm-bindgen-macro-support",
1525 ]
1555 ]
1526
1556
1527 [[package]]
1557 [[package]]
1528 name = "wasm-bindgen-macro-support"
1558 name = "wasm-bindgen-macro-support"
1529 version = "0.2.83"
1559 version = "0.2.83"
1530 source = "registry+https://github.com/rust-lang/crates.io-index"
1560 source = "registry+https://github.com/rust-lang/crates.io-index"
1531 checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
1561 checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
1532 dependencies = [
1562 dependencies = [
1533 "proc-macro2",
1563 "proc-macro2",
1534 "quote",
1564 "quote",
1535 "syn",
1565 "syn",
1536 "wasm-bindgen-backend",
1566 "wasm-bindgen-backend",
1537 "wasm-bindgen-shared",
1567 "wasm-bindgen-shared",
1538 ]
1568 ]
1539
1569
1540 [[package]]
1570 [[package]]
1541 name = "wasm-bindgen-shared"
1571 name = "wasm-bindgen-shared"
1542 version = "0.2.83"
1572 version = "0.2.83"
1543 source = "registry+https://github.com/rust-lang/crates.io-index"
1573 source = "registry+https://github.com/rust-lang/crates.io-index"
1544 checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
1574 checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
1545
1575
1546 [[package]]
1576 [[package]]
1547 name = "web-sys"
1577 name = "web-sys"
1548 version = "0.3.60"
1578 version = "0.3.60"
1549 source = "registry+https://github.com/rust-lang/crates.io-index"
1579 source = "registry+https://github.com/rust-lang/crates.io-index"
1550 checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f"
1580 checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f"
1551 dependencies = [
1581 dependencies = [
1552 "js-sys",
1582 "js-sys",
1553 "wasm-bindgen",
1583 "wasm-bindgen",
1554 ]
1584 ]
1555
1585
1556 [[package]]
1586 [[package]]
1557 name = "which"
1587 name = "which"
1558 version = "4.3.0"
1588 version = "4.3.0"
1559 source = "registry+https://github.com/rust-lang/crates.io-index"
1589 source = "registry+https://github.com/rust-lang/crates.io-index"
1560 checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b"
1590 checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b"
1561 dependencies = [
1591 dependencies = [
1562 "either",
1592 "either",
1563 "libc",
1593 "libc",
1564 "once_cell",
1594 "once_cell",
1565 ]
1595 ]
1566
1596
1567 [[package]]
1597 [[package]]
1568 name = "whoami"
1598 name = "whoami"
1569 version = "1.4.0"
1599 version = "1.4.0"
1570 source = "registry+https://github.com/rust-lang/crates.io-index"
1600 source = "registry+https://github.com/rust-lang/crates.io-index"
1571 checksum = "2c70234412ca409cc04e864e89523cb0fc37f5e1344ebed5a3ebf4192b6b9f68"
1601 checksum = "2c70234412ca409cc04e864e89523cb0fc37f5e1344ebed5a3ebf4192b6b9f68"
1572 dependencies = [
1602 dependencies = [
1573 "wasm-bindgen",
1603 "wasm-bindgen",
1574 "web-sys",
1604 "web-sys",
1575 ]
1605 ]
1576
1606
1577 [[package]]
1607 [[package]]
1578 name = "winapi"
1608 name = "winapi"
1579 version = "0.3.9"
1609 version = "0.3.9"
1580 source = "registry+https://github.com/rust-lang/crates.io-index"
1610 source = "registry+https://github.com/rust-lang/crates.io-index"
1581 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
1611 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
1582 dependencies = [
1612 dependencies = [
1583 "winapi-i686-pc-windows-gnu",
1613 "winapi-i686-pc-windows-gnu",
1584 "winapi-x86_64-pc-windows-gnu",
1614 "winapi-x86_64-pc-windows-gnu",
1585 ]
1615 ]
1586
1616
1587 [[package]]
1617 [[package]]
1588 name = "winapi-i686-pc-windows-gnu"
1618 name = "winapi-i686-pc-windows-gnu"
1589 version = "0.4.0"
1619 version = "0.4.0"
1590 source = "registry+https://github.com/rust-lang/crates.io-index"
1620 source = "registry+https://github.com/rust-lang/crates.io-index"
1591 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
1621 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
1592
1622
1593 [[package]]
1623 [[package]]
1594 name = "winapi-util"
1624 name = "winapi-util"
1595 version = "0.1.5"
1625 version = "0.1.5"
1596 source = "registry+https://github.com/rust-lang/crates.io-index"
1626 source = "registry+https://github.com/rust-lang/crates.io-index"
1597 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
1627 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
1598 dependencies = [
1628 dependencies = [
1599 "winapi",
1629 "winapi",
1600 ]
1630 ]
1601
1631
1602 [[package]]
1632 [[package]]
1603 name = "winapi-x86_64-pc-windows-gnu"
1633 name = "winapi-x86_64-pc-windows-gnu"
1604 version = "0.4.0"
1634 version = "0.4.0"
1605 source = "registry+https://github.com/rust-lang/crates.io-index"
1635 source = "registry+https://github.com/rust-lang/crates.io-index"
1606 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1636 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1607
1637
1608 [[package]]
1638 [[package]]
1609 name = "windows-sys"
1639 name = "windows-sys"
1610 version = "0.48.0"
1640 version = "0.48.0"
1611 source = "registry+https://github.com/rust-lang/crates.io-index"
1641 source = "registry+https://github.com/rust-lang/crates.io-index"
1612 checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
1642 checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
1613 dependencies = [
1643 dependencies = [
1614 "windows-targets 0.48.5",
1644 "windows-targets 0.48.5",
1615 ]
1645 ]
1616
1646
1617 [[package]]
1647 [[package]]
1648 name = "windows-sys"
1649 version = "0.59.0"
1650 source = "registry+https://github.com/rust-lang/crates.io-index"
1651 checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
1652 dependencies = [
1653 "windows-targets 0.52.6",
1654 ]
1655
1656 [[package]]
1618 name = "windows-targets"
1657 name = "windows-targets"
1619 version = "0.48.5"
1658 version = "0.48.5"
1620 source = "registry+https://github.com/rust-lang/crates.io-index"
1659 source = "registry+https://github.com/rust-lang/crates.io-index"
1621 checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
1660 checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
1622 dependencies = [
1661 dependencies = [
1623 "windows_aarch64_gnullvm 0.48.5",
1662 "windows_aarch64_gnullvm 0.48.5",
1624 "windows_aarch64_msvc 0.48.5",
1663 "windows_aarch64_msvc 0.48.5",
1625 "windows_i686_gnu 0.48.5",
1664 "windows_i686_gnu 0.48.5",
1626 "windows_i686_msvc 0.48.5",
1665 "windows_i686_msvc 0.48.5",
1627 "windows_x86_64_gnu 0.48.5",
1666 "windows_x86_64_gnu 0.48.5",
1628 "windows_x86_64_gnullvm 0.48.5",
1667 "windows_x86_64_gnullvm 0.48.5",
1629 "windows_x86_64_msvc 0.48.5",
1668 "windows_x86_64_msvc 0.48.5",
1630 ]
1669 ]
1631
1670
1632 [[package]]
1671 [[package]]
1633 name = "windows-targets"
1672 name = "windows-targets"
1634 version = "0.52.0"
1673 version = "0.52.6"
1635 source = "registry+https://github.com/rust-lang/crates.io-index"
1674 source = "registry+https://github.com/rust-lang/crates.io-index"
1636 checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
1675 checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
1637 dependencies = [
1676 dependencies = [
1638 "windows_aarch64_gnullvm 0.52.0",
1677 "windows_aarch64_gnullvm 0.52.6",
1639 "windows_aarch64_msvc 0.52.0",
1678 "windows_aarch64_msvc 0.52.6",
1640 "windows_i686_gnu 0.52.0",
1679 "windows_i686_gnu 0.52.6",
1641 "windows_i686_msvc 0.52.0",
1680 "windows_i686_gnullvm",
1642 "windows_x86_64_gnu 0.52.0",
1681 "windows_i686_msvc 0.52.6",
1643 "windows_x86_64_gnullvm 0.52.0",
1682 "windows_x86_64_gnu 0.52.6",
1644 "windows_x86_64_msvc 0.52.0",
1683 "windows_x86_64_gnullvm 0.52.6",
1684 "windows_x86_64_msvc 0.52.6",
1645 ]
1685 ]
1646
1686
1647 [[package]]
1687 [[package]]
1648 name = "windows_aarch64_gnullvm"
1688 name = "windows_aarch64_gnullvm"
1649 version = "0.48.5"
1689 version = "0.48.5"
1650 source = "registry+https://github.com/rust-lang/crates.io-index"
1690 source = "registry+https://github.com/rust-lang/crates.io-index"
1651 checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
1691 checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
1652
1692
1653 [[package]]
1693 [[package]]
1654 name = "windows_aarch64_gnullvm"
1694 name = "windows_aarch64_gnullvm"
1655 version = "0.52.0"
1695 version = "0.52.6"
1656 source = "registry+https://github.com/rust-lang/crates.io-index"
1696 source = "registry+https://github.com/rust-lang/crates.io-index"
1657 checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
1697 checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
1658
1698
1659 [[package]]
1699 [[package]]
1660 name = "windows_aarch64_msvc"
1700 name = "windows_aarch64_msvc"
1661 version = "0.48.5"
1701 version = "0.48.5"
1662 source = "registry+https://github.com/rust-lang/crates.io-index"
1702 source = "registry+https://github.com/rust-lang/crates.io-index"
1663 checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
1703 checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
1664
1704
1665 [[package]]
1705 [[package]]
1666 name = "windows_aarch64_msvc"
1706 name = "windows_aarch64_msvc"
1667 version = "0.52.0"
1707 version = "0.52.6"
1668 source = "registry+https://github.com/rust-lang/crates.io-index"
1708 source = "registry+https://github.com/rust-lang/crates.io-index"
1669 checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
1709 checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
1670
1710
1671 [[package]]
1711 [[package]]
1672 name = "windows_i686_gnu"
1712 name = "windows_i686_gnu"
1673 version = "0.48.5"
1713 version = "0.48.5"
1674 source = "registry+https://github.com/rust-lang/crates.io-index"
1714 source = "registry+https://github.com/rust-lang/crates.io-index"
1675 checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
1715 checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
1676
1716
1677 [[package]]
1717 [[package]]
1678 name = "windows_i686_gnu"
1718 name = "windows_i686_gnu"
1679 version = "0.52.0"
1719 version = "0.52.6"
1680 source = "registry+https://github.com/rust-lang/crates.io-index"
1720 source = "registry+https://github.com/rust-lang/crates.io-index"
1681 checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
1721 checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
1722
1723 [[package]]
1724 name = "windows_i686_gnullvm"
1725 version = "0.52.6"
1726 source = "registry+https://github.com/rust-lang/crates.io-index"
1727 checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
1682
1728
1683 [[package]]
1729 [[package]]
1684 name = "windows_i686_msvc"
1730 name = "windows_i686_msvc"
1685 version = "0.48.5"
1731 version = "0.48.5"
1686 source = "registry+https://github.com/rust-lang/crates.io-index"
1732 source = "registry+https://github.com/rust-lang/crates.io-index"
1687 checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
1733 checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
1688
1734
1689 [[package]]
1735 [[package]]
1690 name = "windows_i686_msvc"
1736 name = "windows_i686_msvc"
1691 version = "0.52.0"
1737 version = "0.52.6"
1692 source = "registry+https://github.com/rust-lang/crates.io-index"
1738 source = "registry+https://github.com/rust-lang/crates.io-index"
1693 checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
1739 checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
1694
1740
1695 [[package]]
1741 [[package]]
1696 name = "windows_x86_64_gnu"
1742 name = "windows_x86_64_gnu"
1697 version = "0.48.5"
1743 version = "0.48.5"
1698 source = "registry+https://github.com/rust-lang/crates.io-index"
1744 source = "registry+https://github.com/rust-lang/crates.io-index"
1699 checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
1745 checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
1700
1746
1701 [[package]]
1747 [[package]]
1702 name = "windows_x86_64_gnu"
1748 name = "windows_x86_64_gnu"
1703 version = "0.52.0"
1749 version = "0.52.6"
1704 source = "registry+https://github.com/rust-lang/crates.io-index"
1750 source = "registry+https://github.com/rust-lang/crates.io-index"
1705 checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
1751 checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
1706
1752
1707 [[package]]
1753 [[package]]
1708 name = "windows_x86_64_gnullvm"
1754 name = "windows_x86_64_gnullvm"
1709 version = "0.48.5"
1755 version = "0.48.5"
1710 source = "registry+https://github.com/rust-lang/crates.io-index"
1756 source = "registry+https://github.com/rust-lang/crates.io-index"
1711 checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
1757 checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
1712
1758
1713 [[package]]
1759 [[package]]
1714 name = "windows_x86_64_gnullvm"
1760 name = "windows_x86_64_gnullvm"
1715 version = "0.52.0"
1761 version = "0.52.6"
1716 source = "registry+https://github.com/rust-lang/crates.io-index"
1762 source = "registry+https://github.com/rust-lang/crates.io-index"
1717 checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
1763 checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
1718
1764
1719 [[package]]
1765 [[package]]
1720 name = "windows_x86_64_msvc"
1766 name = "windows_x86_64_msvc"
1721 version = "0.48.5"
1767 version = "0.48.5"
1722 source = "registry+https://github.com/rust-lang/crates.io-index"
1768 source = "registry+https://github.com/rust-lang/crates.io-index"
1723 checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
1769 checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
1724
1770
1725 [[package]]
1771 [[package]]
1726 name = "windows_x86_64_msvc"
1772 name = "windows_x86_64_msvc"
1727 version = "0.52.0"
1773 version = "0.52.6"
1728 source = "registry+https://github.com/rust-lang/crates.io-index"
1774 source = "registry+https://github.com/rust-lang/crates.io-index"
1729 checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
1775 checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
1730
1776
1731 [[package]]
1777 [[package]]
1732 name = "wyz"
1778 name = "wyz"
1733 version = "0.5.1"
1779 version = "0.5.1"
1734 source = "registry+https://github.com/rust-lang/crates.io-index"
1780 source = "registry+https://github.com/rust-lang/crates.io-index"
1735 checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
1781 checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
1736 dependencies = [
1782 dependencies = [
1737 "tap",
1783 "tap",
1738 ]
1784 ]
1739
1785
1740 [[package]]
1786 [[package]]
1741 name = "yansi"
1787 name = "yansi"
1742 version = "0.5.1"
1788 version = "0.5.1"
1743 source = "registry+https://github.com/rust-lang/crates.io-index"
1789 source = "registry+https://github.com/rust-lang/crates.io-index"
1744 checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
1790 checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
1745
1791
1746 [[package]]
1792 [[package]]
1747 name = "zstd"
1793 name = "zstd"
1748 version = "0.12.3+zstd.1.5.2"
1794 version = "0.12.3+zstd.1.5.2"
1749 source = "registry+https://github.com/rust-lang/crates.io-index"
1795 source = "registry+https://github.com/rust-lang/crates.io-index"
1750 checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
1796 checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
1751 dependencies = [
1797 dependencies = [
1752 "zstd-safe",
1798 "zstd-safe",
1753 ]
1799 ]
1754
1800
1755 [[package]]
1801 [[package]]
1756 name = "zstd-safe"
1802 name = "zstd-safe"
1757 version = "6.0.4+zstd.1.5.4"
1803 version = "6.0.4+zstd.1.5.4"
1758 source = "registry+https://github.com/rust-lang/crates.io-index"
1804 source = "registry+https://github.com/rust-lang/crates.io-index"
1759 checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
1805 checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
1760 dependencies = [
1806 dependencies = [
1761 "libc",
1807 "libc",
1762 "zstd-sys",
1808 "zstd-sys",
1763 ]
1809 ]
1764
1810
1765 [[package]]
1811 [[package]]
1766 name = "zstd-sys"
1812 name = "zstd-sys"
1767 version = "2.0.7+zstd.1.5.4"
1813 version = "2.0.7+zstd.1.5.4"
1768 source = "registry+https://github.com/rust-lang/crates.io-index"
1814 source = "registry+https://github.com/rust-lang/crates.io-index"
1769 checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
1815 checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
1770 dependencies = [
1816 dependencies = [
1771 "cc",
1817 "cc",
1772 "libc",
1818 "libc",
1773 "pkg-config",
1819 "pkg-config",
1774 ]
1820 ]
@@ -1,54 +1,56
1 [package]
1 [package]
2 name = "hg-core"
2 name = "hg-core"
3 version = "0.1.0"
3 version = "0.1.0"
4 authors = ["Georges Racinet <gracinet@anybox.fr>"]
4 authors = ["Georges Racinet <gracinet@anybox.fr>"]
5 description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
5 description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
6 edition = "2021"
6 edition = "2021"
7
7
8 [lib]
8 [lib]
9 name = "hg"
9 name = "hg"
10
10
11 [dependencies]
11 [dependencies]
12 bitflags = "1.3.2"
12 bitflags = "1.3.2"
13 bytes-cast = "0.3.0"
13 bytes-cast = "0.3.0"
14 byteorder = "1.4.3"
14 byteorder = "1.4.3"
15 derive_more = "0.99.17"
15 derive_more = "0.99.17"
16 hashbrown = { version = "0.13.1", features = ["rayon"] }
16 hashbrown = { version = "0.13.1", features = ["rayon"] }
17 home = "0.5.4"
17 home = "0.5.4"
18 im-rc = "15.1.0"
18 im-rc = "15.1.0"
19 itertools = "0.10.5"
19 itertools = "0.10.5"
20 lazy_static = "1.4.0"
20 lazy_static = "1.4.0"
21 libc = "0.2.137"
21 libc = "0.2.137"
22 logging_timer = "1.1.0"
22 logging_timer = "1.1.0"
23 rand = "0.8.5"
23 rand = "0.8.5"
24 rand_pcg = "0.3.1"
24 rand_pcg = "0.3.1"
25 rand_distr = "0.4.3"
25 rand_distr = "0.4.3"
26 rayon = "1.7.0"
26 rayon = "1.7.0"
27 regex = "1.7.0"
27 regex = "1.7.0"
28 self_cell = "1.0"
28 self_cell = "1.0"
29 serde = { version = "1.0", features = ["derive"] }
29 serde = { version = "1.0", features = ["derive"] }
30 sha-1 = "0.10.0"
30 sha-1 = "0.10.0"
31 twox-hash = "1.6.3"
31 twox-hash = "1.6.3"
32 same-file = "1.0.6"
32 same-file = "1.0.6"
33 tempfile = "3.3.0"
33 tempfile = "3.3.0"
34 toml = "0.6"
34 toml = "0.6"
35 thread_local = "1.1.4"
35 thread_local = "1.1.4"
36 crossbeam-channel = "0.5.6"
36 crossbeam-channel = "0.5.6"
37 log = "0.4.17"
37 log = "0.4.17"
38 memmap2 = { version = "0.5.8", features = ["stable_deref_trait"] }
38 memmap2 = { version = "0.5.8", features = ["stable_deref_trait"] }
39 zstd = "0.12"
39 zstd = "0.12"
40 format-bytes = "0.3.0"
40 format-bytes = "0.3.0"
41 once_cell = "1.16.0"
41 once_cell = "1.16.0"
42 bitvec = "1.0.1"
42 bitvec = "1.0.1"
43 chrono = "0.4.34"
43 chrono = "0.4.34"
44 dyn-clone = "1.0.16"
45 filetime = "0.2.23"
44
46
45 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
47 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
46 # we have a clearer view of which backend is the fastest.
48 # we have a clearer view of which backend is the fastest.
47 [dependencies.flate2]
49 [dependencies.flate2]
48 version = "1.0.24"
50 version = "1.0.24"
49 features = ["zlib"]
51 features = ["zlib"]
50 default-features = false
52 default-features = false
51
53
52 [dev-dependencies]
54 [dev-dependencies]
53 clap = { version = "~4.0", features = ["derive"] }
55 clap = { version = "~4.0", features = ["derive"] }
54 pretty_assertions = "1.1.0"
56 pretty_assertions = "1.1.0"
@@ -1,188 +1,188
1 //! Filesystem-based locks for local repositories
1 //! Filesystem-based locks for local repositories
2
2
3 use crate::errors::HgError;
3 use crate::errors::HgError;
4 use crate::errors::HgResultExt;
4 use crate::errors::HgResultExt;
5 use crate::vfs::Vfs;
5 use crate::vfs::VfsImpl;
6 use std::io;
6 use std::io;
7 use std::io::ErrorKind;
7 use std::io::ErrorKind;
8
8
9 #[derive(derive_more::From)]
9 #[derive(derive_more::From)]
10 pub enum LockError {
10 pub enum LockError {
11 AlreadyHeld,
11 AlreadyHeld,
12 #[from]
12 #[from]
13 Other(HgError),
13 Other(HgError),
14 }
14 }
15
15
16 /// Try to call `f` with the lock acquired, without waiting.
16 /// Try to call `f` with the lock acquired, without waiting.
17 ///
17 ///
18 /// If the lock is aready held, `f` is not called and `LockError::AlreadyHeld`
18 /// If the lock is aready held, `f` is not called and `LockError::AlreadyHeld`
19 /// is returned. `LockError::Io` is returned for any unexpected I/O error
19 /// is returned. `LockError::Io` is returned for any unexpected I/O error
20 /// accessing the lock file, including for removing it after `f` was called.
20 /// accessing the lock file, including for removing it after `f` was called.
21 /// The return value of `f` is dropped in that case. If all is successful, the
21 /// The return value of `f` is dropped in that case. If all is successful, the
22 /// return value of `f` is forwarded.
22 /// return value of `f` is forwarded.
23 pub fn try_with_lock_no_wait<R>(
23 pub fn try_with_lock_no_wait<R>(
24 hg_vfs: Vfs,
24 hg_vfs: &VfsImpl,
25 lock_filename: &str,
25 lock_filename: &str,
26 f: impl FnOnce() -> R,
26 f: impl FnOnce() -> R,
27 ) -> Result<R, LockError> {
27 ) -> Result<R, LockError> {
28 let our_lock_data = &*OUR_LOCK_DATA;
28 let our_lock_data = &*OUR_LOCK_DATA;
29 for _retry in 0..5 {
29 for _retry in 0..5 {
30 match make_lock(hg_vfs, lock_filename, our_lock_data) {
30 match make_lock(hg_vfs, lock_filename, our_lock_data) {
31 Ok(()) => {
31 Ok(()) => {
32 let result = f();
32 let result = f();
33 unlock(hg_vfs, lock_filename)?;
33 unlock(hg_vfs, lock_filename)?;
34 return Ok(result);
34 return Ok(result);
35 }
35 }
36 Err(HgError::IoError { error, .. })
36 Err(HgError::IoError { error, .. })
37 if error.kind() == ErrorKind::AlreadyExists =>
37 if error.kind() == ErrorKind::AlreadyExists =>
38 {
38 {
39 let lock_data = read_lock(hg_vfs, lock_filename)?;
39 let lock_data = read_lock(hg_vfs, lock_filename)?;
40 if lock_data.is_none() {
40 if lock_data.is_none() {
41 // Lock was apparently just released, retry acquiring it
41 // Lock was apparently just released, retry acquiring it
42 continue;
42 continue;
43 }
43 }
44 if !lock_should_be_broken(&lock_data) {
44 if !lock_should_be_broken(&lock_data) {
45 return Err(LockError::AlreadyHeld);
45 return Err(LockError::AlreadyHeld);
46 }
46 }
47 // The lock file is left over from a process not running
47 // The lock file is left over from a process not running
48 // anymore. Break it, but with another lock to
48 // anymore. Break it, but with another lock to
49 // avoid a race.
49 // avoid a race.
50 break_lock(hg_vfs, lock_filename)?;
50 break_lock(hg_vfs, lock_filename)?;
51
51
52 // Retry acquiring
52 // Retry acquiring
53 }
53 }
54 Err(error) => Err(error)?,
54 Err(error) => Err(error)?,
55 }
55 }
56 }
56 }
57 Err(LockError::AlreadyHeld)
57 Err(LockError::AlreadyHeld)
58 }
58 }
59
59
60 fn break_lock(hg_vfs: Vfs, lock_filename: &str) -> Result<(), LockError> {
60 fn break_lock(hg_vfs: &VfsImpl, lock_filename: &str) -> Result<(), LockError> {
61 try_with_lock_no_wait(hg_vfs, &format!("{}.break", lock_filename), || {
61 try_with_lock_no_wait(hg_vfs, &format!("{}.break", lock_filename), || {
62 // Check again in case some other process broke and
62 // Check again in case some other process broke and
63 // acquired the lock in the meantime
63 // acquired the lock in the meantime
64 let lock_data = read_lock(hg_vfs, lock_filename)?;
64 let lock_data = read_lock(hg_vfs, lock_filename)?;
65 if !lock_should_be_broken(&lock_data) {
65 if !lock_should_be_broken(&lock_data) {
66 return Err(LockError::AlreadyHeld);
66 return Err(LockError::AlreadyHeld);
67 }
67 }
68 Ok(hg_vfs.remove_file(lock_filename)?)
68 Ok(hg_vfs.remove_file(lock_filename)?)
69 })?
69 })?
70 }
70 }
71
71
72 #[cfg(unix)]
72 #[cfg(unix)]
73 fn make_lock(
73 fn make_lock(
74 hg_vfs: Vfs,
74 hg_vfs: &VfsImpl,
75 lock_filename: &str,
75 lock_filename: &str,
76 data: &str,
76 data: &str,
77 ) -> Result<(), HgError> {
77 ) -> Result<(), HgError> {
78 // Use a symbolic link because creating it is atomic.
78 // Use a symbolic link because creating it is atomic.
79 // The link’s "target" contains data not representing any path.
79 // The link’s "target" contains data not representing any path.
80 let fake_symlink_target = data;
80 let fake_symlink_target = data;
81 hg_vfs.create_symlink(lock_filename, fake_symlink_target)
81 hg_vfs.create_symlink(lock_filename, fake_symlink_target)
82 }
82 }
83
83
84 fn read_lock(
84 fn read_lock(
85 hg_vfs: Vfs,
85 hg_vfs: &VfsImpl,
86 lock_filename: &str,
86 lock_filename: &str,
87 ) -> Result<Option<String>, HgError> {
87 ) -> Result<Option<String>, HgError> {
88 let link_target =
88 let link_target =
89 hg_vfs.read_link(lock_filename).io_not_found_as_none()?;
89 hg_vfs.read_link(lock_filename).io_not_found_as_none()?;
90 if let Some(target) = link_target {
90 if let Some(target) = link_target {
91 let data = target
91 let data = target
92 .into_os_string()
92 .into_os_string()
93 .into_string()
93 .into_string()
94 .map_err(|_| HgError::corrupted("non-UTF-8 lock data"))?;
94 .map_err(|_| HgError::corrupted("non-UTF-8 lock data"))?;
95 Ok(Some(data))
95 Ok(Some(data))
96 } else {
96 } else {
97 Ok(None)
97 Ok(None)
98 }
98 }
99 }
99 }
100
100
101 fn unlock(hg_vfs: Vfs, lock_filename: &str) -> Result<(), HgError> {
101 fn unlock(hg_vfs: &VfsImpl, lock_filename: &str) -> Result<(), HgError> {
102 hg_vfs.remove_file(lock_filename)
102 hg_vfs.remove_file(lock_filename)
103 }
103 }
104
104
105 /// Return whether the process that is/was holding the lock is known not to be
105 /// Return whether the process that is/was holding the lock is known not to be
106 /// running anymore.
106 /// running anymore.
107 fn lock_should_be_broken(data: &Option<String>) -> bool {
107 fn lock_should_be_broken(data: &Option<String>) -> bool {
108 (|| -> Option<bool> {
108 (|| -> Option<bool> {
109 let (prefix, pid) = data.as_ref()?.split_once(':')?;
109 let (prefix, pid) = data.as_ref()?.split_once(':')?;
110 if prefix != *LOCK_PREFIX {
110 if prefix != *LOCK_PREFIX {
111 return Some(false);
111 return Some(false);
112 }
112 }
113 let process_is_running;
113 let process_is_running;
114
114
115 #[cfg(unix)]
115 #[cfg(unix)]
116 {
116 {
117 let pid: libc::pid_t = pid.parse().ok()?;
117 let pid: libc::pid_t = pid.parse().ok()?;
118 unsafe {
118 unsafe {
119 let signal = 0; // Test if we could send a signal, without sending
119 let signal = 0; // Test if we could send a signal, without sending
120 let result = libc::kill(pid, signal);
120 let result = libc::kill(pid, signal);
121 if result == 0 {
121 if result == 0 {
122 process_is_running = true
122 process_is_running = true
123 } else {
123 } else {
124 let errno =
124 let errno =
125 io::Error::last_os_error().raw_os_error().unwrap();
125 io::Error::last_os_error().raw_os_error().unwrap();
126 process_is_running = errno != libc::ESRCH
126 process_is_running = errno != libc::ESRCH
127 }
127 }
128 }
128 }
129 }
129 }
130
130
131 Some(!process_is_running)
131 Some(!process_is_running)
132 })()
132 })()
133 .unwrap_or(false)
133 .unwrap_or(false)
134 }
134 }
135
135
136 lazy_static::lazy_static! {
136 lazy_static::lazy_static! {
137 /// A string which is used to differentiate pid namespaces
137 /// A string which is used to differentiate pid namespaces
138 ///
138 ///
139 /// It's useful to detect "dead" processes and remove stale locks with
139 /// It's useful to detect "dead" processes and remove stale locks with
140 /// confidence. Typically it's just hostname. On modern linux, we include an
140 /// confidence. Typically it's just hostname. On modern linux, we include an
141 /// extra Linux-specific pid namespace identifier.
141 /// extra Linux-specific pid namespace identifier.
142 static ref LOCK_PREFIX: String = {
142 static ref LOCK_PREFIX: String = {
143 // Note: this must match the behavior of `_getlockprefix` in `mercurial/lock.py`
143 // Note: this must match the behavior of `_getlockprefix` in `mercurial/lock.py`
144
144
145 /// Same as https://github.com/python/cpython/blob/v3.10.0/Modules/socketmodule.c#L5414
145 /// Same as https://github.com/python/cpython/blob/v3.10.0/Modules/socketmodule.c#L5414
146 const BUFFER_SIZE: usize = 1024;
146 const BUFFER_SIZE: usize = 1024;
147 // This cast is *needed* for platforms with signed chars
147 // This cast is *needed* for platforms with signed chars
148 #[allow(clippy::unnecessary_cast)]
148 #[allow(clippy::unnecessary_cast)]
149 let mut buffer = [0 as libc::c_char; BUFFER_SIZE];
149 let mut buffer = [0 as libc::c_char; BUFFER_SIZE];
150 let hostname_bytes = unsafe {
150 let hostname_bytes = unsafe {
151 let result = libc::gethostname(buffer.as_mut_ptr(), BUFFER_SIZE);
151 let result = libc::gethostname(buffer.as_mut_ptr(), BUFFER_SIZE);
152 if result != 0 {
152 if result != 0 {
153 panic!("gethostname: {}", io::Error::last_os_error())
153 panic!("gethostname: {}", io::Error::last_os_error())
154 }
154 }
155 std::ffi::CStr::from_ptr(buffer.as_mut_ptr()).to_bytes()
155 std::ffi::CStr::from_ptr(buffer.as_mut_ptr()).to_bytes()
156 };
156 };
157 let hostname =
157 let hostname =
158 std::str::from_utf8(hostname_bytes).expect("non-UTF-8 hostname");
158 std::str::from_utf8(hostname_bytes).expect("non-UTF-8 hostname");
159
159
160 #[cfg(target_os = "linux")]
160 #[cfg(target_os = "linux")]
161 {
161 {
162 use std::os::linux::fs::MetadataExt;
162 use std::os::linux::fs::MetadataExt;
163 match std::fs::metadata("/proc/self/ns/pid") {
163 match std::fs::metadata("/proc/self/ns/pid") {
164 Ok(meta) => {
164 Ok(meta) => {
165 return format!("{}/{:x}", hostname, meta.st_ino())
165 return format!("{}/{:x}", hostname, meta.st_ino())
166 }
166 }
167 Err(error) => {
167 Err(error) => {
168 // TODO: match on `error.kind()` when `NotADirectory`
168 // TODO: match on `error.kind()` when `NotADirectory`
169 // is available on all supported Rust versions:
169 // is available on all supported Rust versions:
170 // https://github.com/rust-lang/rust/issues/86442
170 // https://github.com/rust-lang/rust/issues/86442
171 use libc::{
171 use libc::{
172 ENOENT, // ErrorKind::NotFound
172 ENOENT, // ErrorKind::NotFound
173 ENOTDIR, // ErrorKind::NotADirectory
173 ENOTDIR, // ErrorKind::NotADirectory
174 EACCES, // ErrorKind::PermissionDenied
174 EACCES, // ErrorKind::PermissionDenied
175 };
175 };
176 match error.raw_os_error() {
176 match error.raw_os_error() {
177 Some(ENOENT) | Some(ENOTDIR) | Some(EACCES) => {}
177 Some(ENOENT) | Some(ENOTDIR) | Some(EACCES) => {}
178 _ => panic!("stat /proc/self/ns/pid: {}", error),
178 _ => panic!("stat /proc/self/ns/pid: {}", error),
179 }
179 }
180 }
180 }
181 }
181 }
182 }
182 }
183
183
184 hostname.to_owned()
184 hostname.to_owned()
185 };
185 };
186
186
187 static ref OUR_LOCK_DATA: String = format!("{}:{}", &*LOCK_PREFIX, std::process::id());
187 static ref OUR_LOCK_DATA: String = format!("{}:{}", &*LOCK_PREFIX, std::process::id());
188 }
188 }
@@ -1,101 +1,105
1 use crate::errors::{HgError, HgResultExt, IoErrorContext, IoResultExt};
1 use crate::errors::{HgError, HgResultExt, IoErrorContext, IoResultExt};
2 use crate::vfs::Vfs;
2 use crate::vfs::VfsImpl;
3 use std::io::Write;
3 use std::io::Write;
4
4
5 /// An utility to append to a log file with the given name, and optionally
5 /// An utility to append to a log file with the given name, and optionally
6 /// rotate it after it reaches a certain maximum size.
6 /// rotate it after it reaches a certain maximum size.
7 ///
7 ///
8 /// Rotation works by renaming "example.log" to "example.log.1", after renaming
8 /// Rotation works by renaming "example.log" to "example.log.1", after renaming
9 /// "example.log.1" to "example.log.2" etc up to the given maximum number of
9 /// "example.log.1" to "example.log.2" etc up to the given maximum number of
10 /// files.
10 /// files.
11 pub struct LogFile<'a> {
11 pub struct LogFile<'a> {
12 vfs: Vfs<'a>,
12 vfs: VfsImpl,
13 name: &'a str,
13 name: &'a str,
14 max_size: Option<u64>,
14 max_size: Option<u64>,
15 max_files: u32,
15 max_files: u32,
16 }
16 }
17
17
18 impl<'a> LogFile<'a> {
18 impl<'a> LogFile<'a> {
19 pub fn new(vfs: Vfs<'a>, name: &'a str) -> Self {
19 pub fn new(vfs: VfsImpl, name: &'a str) -> Self {
20 Self {
20 Self {
21 vfs,
21 vfs,
22 name,
22 name,
23 max_size: None,
23 max_size: None,
24 max_files: 0,
24 max_files: 0,
25 }
25 }
26 }
26 }
27
27
28 /// Rotate before writing to a log file that was already larger than the
28 /// Rotate before writing to a log file that was already larger than the
29 /// given size, in bytes. `None` disables rotation.
29 /// given size, in bytes. `None` disables rotation.
30 pub fn max_size(mut self, value: Option<u64>) -> Self {
30 pub fn max_size(mut self, value: Option<u64>) -> Self {
31 self.max_size = value;
31 self.max_size = value;
32 self
32 self
33 }
33 }
34
34
35 /// Keep this many rotated files `{name}.1` up to `{name}.{max}`, in
35 /// Keep this many rotated files `{name}.1` up to `{name}.{max}`, in
36 /// addition to the original `{name}` file.
36 /// addition to the original `{name}` file.
37 pub fn max_files(mut self, value: u32) -> Self {
37 pub fn max_files(mut self, value: u32) -> Self {
38 self.max_files = value;
38 self.max_files = value;
39 self
39 self
40 }
40 }
41
41
42 /// Append the given `bytes` as-is to the log file, after rotating if
42 /// Append the given `bytes` as-is to the log file, after rotating if
43 /// needed.
43 /// needed.
44 ///
44 ///
45 /// No trailing newline is added. Make sure to include one in `bytes` if
45 /// No trailing newline is added. Make sure to include one in `bytes` if
46 /// desired.
46 /// desired.
47 pub fn write(&self, bytes: &[u8]) -> Result<(), HgError> {
47 pub fn write(&self, bytes: &[u8]) -> Result<(), HgError> {
48 let path = self.vfs.join(self.name);
48 let path = self.vfs.join(self.name);
49 let context = || IoErrorContext::WritingFile(path.clone());
49 let context = || IoErrorContext::WritingFile(path.clone());
50 let open = || {
50 let open = || {
51 std::fs::OpenOptions::new()
51 std::fs::OpenOptions::new()
52 .create(true)
52 .create(true)
53 .append(true)
53 .append(true)
54 .open(&path)
54 .open(&path)
55 .with_context(context)
55 .with_context(context)
56 };
56 };
57 let mut file = open()?;
57 let mut file = open()?;
58 if let Some(max_size) = self.max_size {
58 if let Some(max_size) = self.max_size {
59 if file.metadata().with_context(context)?.len() >= max_size {
59 if file.metadata().with_context(context)?.len() >= max_size {
60 // For example with `max_files == 5`, the first iteration of
60 // For example with `max_files == 5`, the first iteration of
61 // this loop has `i == 4` and renames `{name}.4` to `{name}.5`.
61 // this loop has `i == 4` and renames `{name}.4` to `{name}.5`.
62 // The last iteration renames `{name}.1` to
62 // The last iteration renames `{name}.1` to
63 // `{name}.2`
63 // `{name}.2`
64 for i in (1..self.max_files).rev() {
64 for i in (1..self.max_files).rev() {
65 self.vfs
65 self.vfs
66 .rename(
66 .rename(
67 format!("{}.{}", self.name, i),
67 format!("{}.{}", self.name, i),
68 format!("{}.{}", self.name, i + 1),
68 format!("{}.{}", self.name, i + 1),
69 )
69 )
70 .io_not_found_as_none()?;
70 .io_not_found_as_none()?;
71 }
71 }
72 // Then rename `{name}` to `{name}.1`. This is the
72 // Then rename `{name}` to `{name}.1`. This is the
73 // previously-opened `file`.
73 // previously-opened `file`.
74 self.vfs
74 self.vfs
75 .rename(self.name, format!("{}.1", self.name))
75 .rename(self.name, format!("{}.1", self.name))
76 .io_not_found_as_none()?;
76 .io_not_found_as_none()?;
77 // Finally, create a new `{name}` file and replace our `file`
77 // Finally, create a new `{name}` file and replace our `file`
78 // handle.
78 // handle.
79 file = open()?;
79 file = open()?;
80 }
80 }
81 }
81 }
82 file.write_all(bytes).with_context(context)?;
82 file.write_all(bytes).with_context(context)?;
83 file.sync_all().with_context(context)
83 file.sync_all().with_context(context)
84 }
84 }
85 }
85 }
86
86
87 #[test]
87 #[test]
88 fn test_rotation() {
88 fn test_rotation() {
89 let temp = tempfile::tempdir().unwrap();
89 let temp = tempfile::tempdir().unwrap();
90 let vfs = Vfs { base: temp.path() };
90 let vfs = VfsImpl {
91 let logger = LogFile::new(vfs, "log").max_size(Some(3)).max_files(2);
91 base: temp.path().to_owned(),
92 };
93 let logger = LogFile::new(vfs.clone(), "log")
94 .max_size(Some(3))
95 .max_files(2);
92 logger.write(b"one\n").unwrap();
96 logger.write(b"one\n").unwrap();
93 logger.write(b"two\n").unwrap();
97 logger.write(b"two\n").unwrap();
94 logger.write(b"3\n").unwrap();
98 logger.write(b"3\n").unwrap();
95 logger.write(b"four\n").unwrap();
99 logger.write(b"four\n").unwrap();
96 logger.write(b"five\n").unwrap();
100 logger.write(b"five\n").unwrap();
97 assert_eq!(vfs.read("log").unwrap(), b"five\n");
101 assert_eq!(vfs.read("log").unwrap(), b"five\n");
98 assert_eq!(vfs.read("log.1").unwrap(), b"3\nfour\n");
102 assert_eq!(vfs.read("log.1").unwrap(), b"3\nfour\n");
99 assert_eq!(vfs.read("log.2").unwrap(), b"two\n");
103 assert_eq!(vfs.read("log.2").unwrap(), b"two\n");
100 assert!(vfs.read("log.3").io_not_found_as_none().unwrap().is_none());
104 assert!(vfs.read("log.3").io_not_found_as_none().unwrap().is_none());
101 }
105 }
@@ -1,842 +1,851
1 use crate::changelog::Changelog;
1 use crate::changelog::Changelog;
2 use crate::config::{Config, ConfigError, ConfigParseError};
2 use crate::config::{Config, ConfigError, ConfigParseError};
3 use crate::dirstate::DirstateParents;
3 use crate::dirstate::DirstateParents;
4 use crate::dirstate_tree::dirstate_map::DirstateMapWriteMode;
4 use crate::dirstate_tree::dirstate_map::DirstateMapWriteMode;
5 use crate::dirstate_tree::on_disk::Docket as DirstateDocket;
5 use crate::dirstate_tree::on_disk::Docket as DirstateDocket;
6 use crate::dirstate_tree::owning::OwningDirstateMap;
6 use crate::dirstate_tree::owning::OwningDirstateMap;
7 use crate::errors::HgResultExt;
7 use crate::errors::HgResultExt;
8 use crate::errors::{HgError, IoResultExt};
8 use crate::errors::{HgError, IoResultExt};
9 use crate::lock::{try_with_lock_no_wait, LockError};
9 use crate::lock::{try_with_lock_no_wait, LockError};
10 use crate::manifest::{Manifest, Manifestlog};
10 use crate::manifest::{Manifest, Manifestlog};
11 use crate::requirements::{
11 use crate::requirements::{
12 CHANGELOGV2_REQUIREMENT, GENERALDELTA_REQUIREMENT, NODEMAP_REQUIREMENT,
12 CHANGELOGV2_REQUIREMENT, GENERALDELTA_REQUIREMENT, NODEMAP_REQUIREMENT,
13 REVLOGV1_REQUIREMENT, REVLOGV2_REQUIREMENT,
13 REVLOGV1_REQUIREMENT, REVLOGV2_REQUIREMENT,
14 };
14 };
15 use crate::revlog::filelog::Filelog;
15 use crate::revlog::filelog::Filelog;
16 use crate::revlog::RevlogError;
16 use crate::revlog::RevlogError;
17 use crate::utils::debug::debug_wait_for_file_or_print;
17 use crate::utils::debug::debug_wait_for_file_or_print;
18 use crate::utils::files::get_path_from_bytes;
18 use crate::utils::files::get_path_from_bytes;
19 use crate::utils::hg_path::HgPath;
19 use crate::utils::hg_path::HgPath;
20 use crate::utils::SliceExt;
20 use crate::utils::SliceExt;
21 use crate::vfs::{is_dir, is_file, Vfs};
21 use crate::vfs::{is_dir, is_file, VfsImpl};
22 use crate::{
22 use crate::{
23 requirements, NodePrefix, RevlogDataConfig, RevlogDeltaConfig,
23 requirements, NodePrefix, RevlogDataConfig, RevlogDeltaConfig,
24 RevlogFeatureConfig, RevlogType, RevlogVersionOptions, UncheckedRevision,
24 RevlogFeatureConfig, RevlogType, RevlogVersionOptions, UncheckedRevision,
25 };
25 };
26 use crate::{DirstateError, RevlogOpenOptions};
26 use crate::{DirstateError, RevlogOpenOptions};
27 use std::cell::{Ref, RefCell, RefMut};
27 use std::cell::{Ref, RefCell, RefMut};
28 use std::collections::HashSet;
28 use std::collections::HashSet;
29 use std::io::Seek;
29 use std::io::Seek;
30 use std::io::SeekFrom;
30 use std::io::SeekFrom;
31 use std::io::Write as IoWrite;
31 use std::io::Write as IoWrite;
32 use std::path::{Path, PathBuf};
32 use std::path::{Path, PathBuf};
33
33
34 const V2_MAX_READ_ATTEMPTS: usize = 5;
34 const V2_MAX_READ_ATTEMPTS: usize = 5;
35
35
36 type DirstateMapIdentity = (Option<u64>, Option<Vec<u8>>, usize);
36 type DirstateMapIdentity = (Option<u64>, Option<Vec<u8>>, usize);
37
37
38 /// A repository on disk
38 /// A repository on disk
39 pub struct Repo {
39 pub struct Repo {
40 working_directory: PathBuf,
40 working_directory: PathBuf,
41 dot_hg: PathBuf,
41 dot_hg: PathBuf,
42 store: PathBuf,
42 store: PathBuf,
43 requirements: HashSet<String>,
43 requirements: HashSet<String>,
44 config: Config,
44 config: Config,
45 dirstate_parents: LazyCell<DirstateParents>,
45 dirstate_parents: LazyCell<DirstateParents>,
46 dirstate_map: LazyCell<OwningDirstateMap>,
46 dirstate_map: LazyCell<OwningDirstateMap>,
47 changelog: LazyCell<Changelog>,
47 changelog: LazyCell<Changelog>,
48 manifestlog: LazyCell<Manifestlog>,
48 manifestlog: LazyCell<Manifestlog>,
49 }
49 }
50
50
51 #[derive(Debug, derive_more::From)]
51 #[derive(Debug, derive_more::From)]
52 pub enum RepoError {
52 pub enum RepoError {
53 NotFound {
53 NotFound {
54 at: PathBuf,
54 at: PathBuf,
55 },
55 },
56 #[from]
56 #[from]
57 ConfigParseError(ConfigParseError),
57 ConfigParseError(ConfigParseError),
58 #[from]
58 #[from]
59 Other(HgError),
59 Other(HgError),
60 }
60 }
61
61
62 impl From<ConfigError> for RepoError {
62 impl From<ConfigError> for RepoError {
63 fn from(error: ConfigError) -> Self {
63 fn from(error: ConfigError) -> Self {
64 match error {
64 match error {
65 ConfigError::Parse(error) => error.into(),
65 ConfigError::Parse(error) => error.into(),
66 ConfigError::Other(error) => error.into(),
66 ConfigError::Other(error) => error.into(),
67 }
67 }
68 }
68 }
69 }
69 }
70
70
71 impl Repo {
71 impl Repo {
72 /// tries to find nearest repository root in current working directory or
72 /// tries to find nearest repository root in current working directory or
73 /// its ancestors
73 /// its ancestors
74 pub fn find_repo_root() -> Result<PathBuf, RepoError> {
74 pub fn find_repo_root() -> Result<PathBuf, RepoError> {
75 let current_directory = crate::utils::current_dir()?;
75 let current_directory = crate::utils::current_dir()?;
76 // ancestors() is inclusive: it first yields `current_directory`
76 // ancestors() is inclusive: it first yields `current_directory`
77 // as-is.
77 // as-is.
78 for ancestor in current_directory.ancestors() {
78 for ancestor in current_directory.ancestors() {
79 if is_dir(ancestor.join(".hg"))? {
79 if is_dir(ancestor.join(".hg"))? {
80 return Ok(ancestor.to_path_buf());
80 return Ok(ancestor.to_path_buf());
81 }
81 }
82 }
82 }
83 Err(RepoError::NotFound {
83 Err(RepoError::NotFound {
84 at: current_directory,
84 at: current_directory,
85 })
85 })
86 }
86 }
87
87
88 /// Find a repository, either at the given path (which must contain a `.hg`
88 /// Find a repository, either at the given path (which must contain a `.hg`
89 /// sub-directory) or by searching the current directory and its
89 /// sub-directory) or by searching the current directory and its
90 /// ancestors.
90 /// ancestors.
91 ///
91 ///
92 /// A method with two very different "modes" like this usually a code smell
92 /// A method with two very different "modes" like this usually a code smell
93 /// to make two methods instead, but in this case an `Option` is what rhg
93 /// to make two methods instead, but in this case an `Option` is what rhg
94 /// sub-commands get from Clap for the `-R` / `--repository` CLI argument.
94 /// sub-commands get from Clap for the `-R` / `--repository` CLI argument.
95 /// Having two methods would just move that `if` to almost all callers.
95 /// Having two methods would just move that `if` to almost all callers.
96 pub fn find(
96 pub fn find(
97 config: &Config,
97 config: &Config,
98 explicit_path: Option<PathBuf>,
98 explicit_path: Option<PathBuf>,
99 ) -> Result<Self, RepoError> {
99 ) -> Result<Self, RepoError> {
100 if let Some(root) = explicit_path {
100 if let Some(root) = explicit_path {
101 if is_dir(root.join(".hg"))? {
101 if is_dir(root.join(".hg"))? {
102 Self::new_at_path(root, config)
102 Self::new_at_path(root, config)
103 } else if is_file(&root)? {
103 } else if is_file(&root)? {
104 Err(HgError::unsupported("bundle repository").into())
104 Err(HgError::unsupported("bundle repository").into())
105 } else {
105 } else {
106 Err(RepoError::NotFound { at: root })
106 Err(RepoError::NotFound { at: root })
107 }
107 }
108 } else {
108 } else {
109 let root = Self::find_repo_root()?;
109 let root = Self::find_repo_root()?;
110 Self::new_at_path(root, config)
110 Self::new_at_path(root, config)
111 }
111 }
112 }
112 }
113
113
114 /// To be called after checking that `.hg` is a sub-directory
114 /// To be called after checking that `.hg` is a sub-directory
115 fn new_at_path(
115 fn new_at_path(
116 working_directory: PathBuf,
116 working_directory: PathBuf,
117 config: &Config,
117 config: &Config,
118 ) -> Result<Self, RepoError> {
118 ) -> Result<Self, RepoError> {
119 let dot_hg = working_directory.join(".hg");
119 let dot_hg = working_directory.join(".hg");
120
120
121 let mut repo_config_files =
121 let mut repo_config_files =
122 vec![dot_hg.join("hgrc"), dot_hg.join("hgrc-not-shared")];
122 vec![dot_hg.join("hgrc"), dot_hg.join("hgrc-not-shared")];
123
123
124 let hg_vfs = Vfs { base: &dot_hg };
124 let hg_vfs = VfsImpl {
125 let mut reqs = requirements::load_if_exists(hg_vfs)?;
125 base: dot_hg.to_owned(),
126 };
127 let mut reqs = requirements::load_if_exists(&hg_vfs)?;
126 let relative =
128 let relative =
127 reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT);
129 reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT);
128 let shared =
130 let shared =
129 reqs.contains(requirements::SHARED_REQUIREMENT) || relative;
131 reqs.contains(requirements::SHARED_REQUIREMENT) || relative;
130
132
131 // From `mercurial/localrepo.py`:
133 // From `mercurial/localrepo.py`:
132 //
134 //
133 // if .hg/requires contains the sharesafe requirement, it means
135 // if .hg/requires contains the sharesafe requirement, it means
134 // there exists a `.hg/store/requires` too and we should read it
136 // there exists a `.hg/store/requires` too and we should read it
135 // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
137 // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
136 // is present. We never write SHARESAFE_REQUIREMENT for a repo if store
138 // is present. We never write SHARESAFE_REQUIREMENT for a repo if store
137 // is not present, refer checkrequirementscompat() for that
139 // is not present, refer checkrequirementscompat() for that
138 //
140 //
139 // However, if SHARESAFE_REQUIREMENT is not present, it means that the
141 // However, if SHARESAFE_REQUIREMENT is not present, it means that the
140 // repository was shared the old way. We check the share source
142 // repository was shared the old way. We check the share source
141 // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the
143 // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the
142 // current repository needs to be reshared
144 // current repository needs to be reshared
143 let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT);
145 let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT);
144
146
145 let store_path;
147 let store_path;
146 if !shared {
148 if !shared {
147 store_path = dot_hg.join("store");
149 store_path = dot_hg.join("store");
148 } else {
150 } else {
149 let bytes = hg_vfs.read("sharedpath")?;
151 let bytes = hg_vfs.read("sharedpath")?;
150 let mut shared_path =
152 let mut shared_path =
151 get_path_from_bytes(bytes.trim_end_matches(|b| b == b'\n'))
153 get_path_from_bytes(bytes.trim_end_matches(|b| b == b'\n'))
152 .to_owned();
154 .to_owned();
153 if relative {
155 if relative {
154 shared_path = dot_hg.join(shared_path)
156 shared_path = dot_hg.join(shared_path)
155 }
157 }
156 if !is_dir(&shared_path)? {
158 if !is_dir(&shared_path)? {
157 return Err(HgError::corrupted(format!(
159 return Err(HgError::corrupted(format!(
158 ".hg/sharedpath points to nonexistent directory {}",
160 ".hg/sharedpath points to nonexistent directory {}",
159 shared_path.display()
161 shared_path.display()
160 ))
162 ))
161 .into());
163 .into());
162 }
164 }
163
165
164 store_path = shared_path.join("store");
166 store_path = shared_path.join("store");
165
167
166 let source_is_share_safe =
168 let source_is_share_safe = requirements::load(VfsImpl {
167 requirements::load(Vfs { base: &shared_path })?
169 base: shared_path.to_owned(),
168 .contains(requirements::SHARESAFE_REQUIREMENT);
170 })?
171 .contains(requirements::SHARESAFE_REQUIREMENT);
169
172
170 if share_safe != source_is_share_safe {
173 if share_safe != source_is_share_safe {
171 return Err(HgError::unsupported("share-safe mismatch").into());
174 return Err(HgError::unsupported("share-safe mismatch").into());
172 }
175 }
173
176
174 if share_safe {
177 if share_safe {
175 repo_config_files.insert(0, shared_path.join("hgrc"))
178 repo_config_files.insert(0, shared_path.join("hgrc"))
176 }
179 }
177 }
180 }
178 if share_safe {
181 if share_safe {
179 reqs.extend(requirements::load(Vfs { base: &store_path })?);
182 reqs.extend(requirements::load(VfsImpl {
183 base: store_path.to_owned(),
184 })?);
180 }
185 }
181
186
182 let repo_config = if std::env::var_os("HGRCSKIPREPO").is_none() {
187 let repo_config = if std::env::var_os("HGRCSKIPREPO").is_none() {
183 config.combine_with_repo(&repo_config_files)?
188 config.combine_with_repo(&repo_config_files)?
184 } else {
189 } else {
185 config.clone()
190 config.clone()
186 };
191 };
187
192
188 let repo = Self {
193 let repo = Self {
189 requirements: reqs,
194 requirements: reqs,
190 working_directory,
195 working_directory,
191 store: store_path,
196 store: store_path,
192 dot_hg,
197 dot_hg,
193 config: repo_config,
198 config: repo_config,
194 dirstate_parents: LazyCell::new(),
199 dirstate_parents: LazyCell::new(),
195 dirstate_map: LazyCell::new(),
200 dirstate_map: LazyCell::new(),
196 changelog: LazyCell::new(),
201 changelog: LazyCell::new(),
197 manifestlog: LazyCell::new(),
202 manifestlog: LazyCell::new(),
198 };
203 };
199
204
200 requirements::check(&repo)?;
205 requirements::check(&repo)?;
201
206
202 Ok(repo)
207 Ok(repo)
203 }
208 }
204
209
205 pub fn working_directory_path(&self) -> &Path {
210 pub fn working_directory_path(&self) -> &Path {
206 &self.working_directory
211 &self.working_directory
207 }
212 }
208
213
209 pub fn requirements(&self) -> &HashSet<String> {
214 pub fn requirements(&self) -> &HashSet<String> {
210 &self.requirements
215 &self.requirements
211 }
216 }
212
217
213 pub fn config(&self) -> &Config {
218 pub fn config(&self) -> &Config {
214 &self.config
219 &self.config
215 }
220 }
216
221
217 /// For accessing repository files (in `.hg`), except for the store
222 /// For accessing repository files (in `.hg`), except for the store
218 /// (`.hg/store`).
223 /// (`.hg/store`).
219 pub fn hg_vfs(&self) -> Vfs<'_> {
224 pub fn hg_vfs(&self) -> VfsImpl {
220 Vfs { base: &self.dot_hg }
225 VfsImpl {
226 base: self.dot_hg.to_owned(),
227 }
221 }
228 }
222
229
223 /// For accessing repository store files (in `.hg/store`)
230 /// For accessing repository store files (in `.hg/store`)
224 pub fn store_vfs(&self) -> Vfs<'_> {
231 pub fn store_vfs(&self) -> VfsImpl {
225 Vfs { base: &self.store }
232 VfsImpl {
233 base: self.store.to_owned(),
234 }
226 }
235 }
227
236
228 /// For accessing the working copy
237 /// For accessing the working copy
229 pub fn working_directory_vfs(&self) -> Vfs<'_> {
238 pub fn working_directory_vfs(&self) -> VfsImpl {
230 Vfs {
239 VfsImpl {
231 base: &self.working_directory,
240 base: self.working_directory.to_owned(),
232 }
241 }
233 }
242 }
234
243
235 pub fn try_with_wlock_no_wait<R>(
244 pub fn try_with_wlock_no_wait<R>(
236 &self,
245 &self,
237 f: impl FnOnce() -> R,
246 f: impl FnOnce() -> R,
238 ) -> Result<R, LockError> {
247 ) -> Result<R, LockError> {
239 try_with_lock_no_wait(self.hg_vfs(), "wlock", f)
248 try_with_lock_no_wait(&self.hg_vfs(), "wlock", f)
240 }
249 }
241
250
242 /// Whether this repo should use dirstate-v2.
251 /// Whether this repo should use dirstate-v2.
243 /// The presence of `dirstate-v2` in the requirements does not mean that
252 /// The presence of `dirstate-v2` in the requirements does not mean that
244 /// the on-disk dirstate is necessarily in version 2. In most cases,
253 /// the on-disk dirstate is necessarily in version 2. In most cases,
245 /// a dirstate-v2 file will indeed be found, but in rare cases (like the
254 /// a dirstate-v2 file will indeed be found, but in rare cases (like the
246 /// upgrade mechanism being cut short), the on-disk version will be a
255 /// upgrade mechanism being cut short), the on-disk version will be a
247 /// v1 file.
256 /// v1 file.
248 /// Semantically, having a requirement only means that a client cannot
257 /// Semantically, having a requirement only means that a client cannot
249 /// properly understand or properly update the repo if it lacks the support
258 /// properly understand or properly update the repo if it lacks the support
250 /// for the required feature, but not that that feature is actually used
259 /// for the required feature, but not that that feature is actually used
251 /// in all occasions.
260 /// in all occasions.
252 pub fn use_dirstate_v2(&self) -> bool {
261 pub fn use_dirstate_v2(&self) -> bool {
253 self.requirements
262 self.requirements
254 .contains(requirements::DIRSTATE_V2_REQUIREMENT)
263 .contains(requirements::DIRSTATE_V2_REQUIREMENT)
255 }
264 }
256
265
257 pub fn has_sparse(&self) -> bool {
266 pub fn has_sparse(&self) -> bool {
258 self.requirements.contains(requirements::SPARSE_REQUIREMENT)
267 self.requirements.contains(requirements::SPARSE_REQUIREMENT)
259 }
268 }
260
269
261 pub fn has_narrow(&self) -> bool {
270 pub fn has_narrow(&self) -> bool {
262 self.requirements.contains(requirements::NARROW_REQUIREMENT)
271 self.requirements.contains(requirements::NARROW_REQUIREMENT)
263 }
272 }
264
273
265 pub fn has_nodemap(&self) -> bool {
274 pub fn has_nodemap(&self) -> bool {
266 self.requirements
275 self.requirements
267 .contains(requirements::NODEMAP_REQUIREMENT)
276 .contains(requirements::NODEMAP_REQUIREMENT)
268 }
277 }
269
278
270 fn dirstate_file_contents(&self) -> Result<Vec<u8>, HgError> {
279 fn dirstate_file_contents(&self) -> Result<Vec<u8>, HgError> {
271 Ok(self
280 Ok(self
272 .hg_vfs()
281 .hg_vfs()
273 .read("dirstate")
282 .read("dirstate")
274 .io_not_found_as_none()?
283 .io_not_found_as_none()?
275 .unwrap_or_default())
284 .unwrap_or_default())
276 }
285 }
277
286
278 fn dirstate_identity(&self) -> Result<Option<u64>, HgError> {
287 fn dirstate_identity(&self) -> Result<Option<u64>, HgError> {
279 use std::os::unix::fs::MetadataExt;
288 use std::os::unix::fs::MetadataExt;
280 Ok(self
289 Ok(self
281 .hg_vfs()
290 .hg_vfs()
282 .symlink_metadata("dirstate")
291 .symlink_metadata("dirstate")
283 .io_not_found_as_none()?
292 .io_not_found_as_none()?
284 .map(|meta| meta.ino()))
293 .map(|meta| meta.ino()))
285 }
294 }
286
295
287 pub fn dirstate_parents(&self) -> Result<DirstateParents, HgError> {
296 pub fn dirstate_parents(&self) -> Result<DirstateParents, HgError> {
288 Ok(*self
297 Ok(*self
289 .dirstate_parents
298 .dirstate_parents
290 .get_or_init(|| self.read_dirstate_parents())?)
299 .get_or_init(|| self.read_dirstate_parents())?)
291 }
300 }
292
301
293 fn read_dirstate_parents(&self) -> Result<DirstateParents, HgError> {
302 fn read_dirstate_parents(&self) -> Result<DirstateParents, HgError> {
294 let dirstate = self.dirstate_file_contents()?;
303 let dirstate = self.dirstate_file_contents()?;
295 let parents = if dirstate.is_empty() {
304 let parents = if dirstate.is_empty() {
296 DirstateParents::NULL
305 DirstateParents::NULL
297 } else if self.use_dirstate_v2() {
306 } else if self.use_dirstate_v2() {
298 let docket_res =
307 let docket_res =
299 crate::dirstate_tree::on_disk::read_docket(&dirstate);
308 crate::dirstate_tree::on_disk::read_docket(&dirstate);
300 match docket_res {
309 match docket_res {
301 Ok(docket) => docket.parents(),
310 Ok(docket) => docket.parents(),
302 Err(_) => {
311 Err(_) => {
303 log::info!(
312 log::info!(
304 "Parsing dirstate docket failed, \
313 "Parsing dirstate docket failed, \
305 falling back to dirstate-v1"
314 falling back to dirstate-v1"
306 );
315 );
307 *crate::dirstate::parsers::parse_dirstate_parents(
316 *crate::dirstate::parsers::parse_dirstate_parents(
308 &dirstate,
317 &dirstate,
309 )?
318 )?
310 }
319 }
311 }
320 }
312 } else {
321 } else {
313 *crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
322 *crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
314 };
323 };
315 self.dirstate_parents.set(parents);
324 self.dirstate_parents.set(parents);
316 Ok(parents)
325 Ok(parents)
317 }
326 }
318
327
319 /// Returns the information read from the dirstate docket necessary to
328 /// Returns the information read from the dirstate docket necessary to
320 /// check if the data file has been updated/deleted by another process
329 /// check if the data file has been updated/deleted by another process
321 /// since we last read the dirstate.
330 /// since we last read the dirstate.
322 /// Namely, the inode, data file uuid and the data size.
331 /// Namely, the inode, data file uuid and the data size.
323 fn get_dirstate_data_file_integrity(
332 fn get_dirstate_data_file_integrity(
324 &self,
333 &self,
325 ) -> Result<DirstateMapIdentity, HgError> {
334 ) -> Result<DirstateMapIdentity, HgError> {
326 assert!(
335 assert!(
327 self.use_dirstate_v2(),
336 self.use_dirstate_v2(),
328 "accessing dirstate data file ID without dirstate-v2"
337 "accessing dirstate data file ID without dirstate-v2"
329 );
338 );
330 // Get the identity before the contents since we could have a race
339 // Get the identity before the contents since we could have a race
331 // between the two. Having an identity that is too old is fine, but
340 // between the two. Having an identity that is too old is fine, but
332 // one that is younger than the content change is bad.
341 // one that is younger than the content change is bad.
333 let identity = self.dirstate_identity()?;
342 let identity = self.dirstate_identity()?;
334 let dirstate = self.dirstate_file_contents()?;
343 let dirstate = self.dirstate_file_contents()?;
335 if dirstate.is_empty() {
344 if dirstate.is_empty() {
336 self.dirstate_parents.set(DirstateParents::NULL);
345 self.dirstate_parents.set(DirstateParents::NULL);
337 Ok((identity, None, 0))
346 Ok((identity, None, 0))
338 } else {
347 } else {
339 let docket_res =
348 let docket_res =
340 crate::dirstate_tree::on_disk::read_docket(&dirstate);
349 crate::dirstate_tree::on_disk::read_docket(&dirstate);
341 match docket_res {
350 match docket_res {
342 Ok(docket) => {
351 Ok(docket) => {
343 self.dirstate_parents.set(docket.parents());
352 self.dirstate_parents.set(docket.parents());
344 Ok((
353 Ok((
345 identity,
354 identity,
346 Some(docket.uuid.to_owned()),
355 Some(docket.uuid.to_owned()),
347 docket.data_size(),
356 docket.data_size(),
348 ))
357 ))
349 }
358 }
350 Err(_) => {
359 Err(_) => {
351 log::info!(
360 log::info!(
352 "Parsing dirstate docket failed, \
361 "Parsing dirstate docket failed, \
353 falling back to dirstate-v1"
362 falling back to dirstate-v1"
354 );
363 );
355 let parents =
364 let parents =
356 *crate::dirstate::parsers::parse_dirstate_parents(
365 *crate::dirstate::parsers::parse_dirstate_parents(
357 &dirstate,
366 &dirstate,
358 )?;
367 )?;
359 self.dirstate_parents.set(parents);
368 self.dirstate_parents.set(parents);
360 Ok((identity, None, 0))
369 Ok((identity, None, 0))
361 }
370 }
362 }
371 }
363 }
372 }
364 }
373 }
365
374
366 fn new_dirstate_map(&self) -> Result<OwningDirstateMap, DirstateError> {
375 fn new_dirstate_map(&self) -> Result<OwningDirstateMap, DirstateError> {
367 if self.use_dirstate_v2() {
376 if self.use_dirstate_v2() {
368 // The v2 dirstate is split into a docket and a data file.
377 // The v2 dirstate is split into a docket and a data file.
369 // Since we don't always take the `wlock` to read it
378 // Since we don't always take the `wlock` to read it
370 // (like in `hg status`), it is susceptible to races.
379 // (like in `hg status`), it is susceptible to races.
371 // A simple retry method should be enough since full rewrites
380 // A simple retry method should be enough since full rewrites
372 // only happen when too much garbage data is present and
381 // only happen when too much garbage data is present and
373 // this race is unlikely.
382 // this race is unlikely.
374 let mut tries = 0;
383 let mut tries = 0;
375
384
376 while tries < V2_MAX_READ_ATTEMPTS {
385 while tries < V2_MAX_READ_ATTEMPTS {
377 tries += 1;
386 tries += 1;
378 match self.read_docket_and_data_file() {
387 match self.read_docket_and_data_file() {
379 Ok(m) => {
388 Ok(m) => {
380 return Ok(m);
389 return Ok(m);
381 }
390 }
382 Err(e) => match e {
391 Err(e) => match e {
383 DirstateError::Common(HgError::RaceDetected(
392 DirstateError::Common(HgError::RaceDetected(
384 context,
393 context,
385 )) => {
394 )) => {
386 log::info!(
395 log::info!(
387 "dirstate read race detected {} (retry {}/{})",
396 "dirstate read race detected {} (retry {}/{})",
388 context,
397 context,
389 tries,
398 tries,
390 V2_MAX_READ_ATTEMPTS,
399 V2_MAX_READ_ATTEMPTS,
391 );
400 );
392 continue;
401 continue;
393 }
402 }
394 _ => {
403 _ => {
395 log::info!(
404 log::info!(
396 "Reading dirstate v2 failed, \
405 "Reading dirstate v2 failed, \
397 falling back to v1"
406 falling back to v1"
398 );
407 );
399 return self.new_dirstate_map_v1();
408 return self.new_dirstate_map_v1();
400 }
409 }
401 },
410 },
402 }
411 }
403 }
412 }
404 let error = HgError::abort(
413 let error = HgError::abort(
405 format!("dirstate read race happened {tries} times in a row"),
414 format!("dirstate read race happened {tries} times in a row"),
406 255,
415 255,
407 None,
416 None,
408 );
417 );
409 Err(DirstateError::Common(error))
418 Err(DirstateError::Common(error))
410 } else {
419 } else {
411 self.new_dirstate_map_v1()
420 self.new_dirstate_map_v1()
412 }
421 }
413 }
422 }
414
423
415 fn new_dirstate_map_v1(&self) -> Result<OwningDirstateMap, DirstateError> {
424 fn new_dirstate_map_v1(&self) -> Result<OwningDirstateMap, DirstateError> {
416 debug_wait_for_file_or_print(self.config(), "dirstate.pre-read-file");
425 debug_wait_for_file_or_print(self.config(), "dirstate.pre-read-file");
417 let identity = self.dirstate_identity()?;
426 let identity = self.dirstate_identity()?;
418 let dirstate_file_contents = self.dirstate_file_contents()?;
427 let dirstate_file_contents = self.dirstate_file_contents()?;
419 if dirstate_file_contents.is_empty() {
428 if dirstate_file_contents.is_empty() {
420 self.dirstate_parents.set(DirstateParents::NULL);
429 self.dirstate_parents.set(DirstateParents::NULL);
421 Ok(OwningDirstateMap::new_empty(Vec::new()))
430 Ok(OwningDirstateMap::new_empty(Vec::new()))
422 } else {
431 } else {
423 let (map, parents) =
432 let (map, parents) =
424 OwningDirstateMap::new_v1(dirstate_file_contents, identity)?;
433 OwningDirstateMap::new_v1(dirstate_file_contents, identity)?;
425 self.dirstate_parents.set(parents);
434 self.dirstate_parents.set(parents);
426 Ok(map)
435 Ok(map)
427 }
436 }
428 }
437 }
429
438
430 fn read_docket_and_data_file(
439 fn read_docket_and_data_file(
431 &self,
440 &self,
432 ) -> Result<OwningDirstateMap, DirstateError> {
441 ) -> Result<OwningDirstateMap, DirstateError> {
433 debug_wait_for_file_or_print(self.config(), "dirstate.pre-read-file");
442 debug_wait_for_file_or_print(self.config(), "dirstate.pre-read-file");
434 let dirstate_file_contents = self.dirstate_file_contents()?;
443 let dirstate_file_contents = self.dirstate_file_contents()?;
435 let identity = self.dirstate_identity()?;
444 let identity = self.dirstate_identity()?;
436 if dirstate_file_contents.is_empty() {
445 if dirstate_file_contents.is_empty() {
437 self.dirstate_parents.set(DirstateParents::NULL);
446 self.dirstate_parents.set(DirstateParents::NULL);
438 return Ok(OwningDirstateMap::new_empty(Vec::new()));
447 return Ok(OwningDirstateMap::new_empty(Vec::new()));
439 }
448 }
440 let docket = crate::dirstate_tree::on_disk::read_docket(
449 let docket = crate::dirstate_tree::on_disk::read_docket(
441 &dirstate_file_contents,
450 &dirstate_file_contents,
442 )?;
451 )?;
443 debug_wait_for_file_or_print(
452 debug_wait_for_file_or_print(
444 self.config(),
453 self.config(),
445 "dirstate.post-docket-read-file",
454 "dirstate.post-docket-read-file",
446 );
455 );
447 self.dirstate_parents.set(docket.parents());
456 self.dirstate_parents.set(docket.parents());
448 let uuid = docket.uuid.to_owned();
457 let uuid = docket.uuid.to_owned();
449 let data_size = docket.data_size();
458 let data_size = docket.data_size();
450
459
451 let context = "between reading dirstate docket and data file";
460 let context = "between reading dirstate docket and data file";
452 let race_error = HgError::RaceDetected(context.into());
461 let race_error = HgError::RaceDetected(context.into());
453 let metadata = docket.tree_metadata();
462 let metadata = docket.tree_metadata();
454
463
455 let mut map = if crate::vfs::is_on_nfs_mount(docket.data_filename()) {
464 let mut map = if crate::vfs::is_on_nfs_mount(docket.data_filename()) {
456 // Don't mmap on NFS to prevent `SIGBUS` error on deletion
465 // Don't mmap on NFS to prevent `SIGBUS` error on deletion
457 let contents = self.hg_vfs().read(docket.data_filename());
466 let contents = self.hg_vfs().read(docket.data_filename());
458 let contents = match contents {
467 let contents = match contents {
459 Ok(c) => c,
468 Ok(c) => c,
460 Err(HgError::IoError { error, context }) => {
469 Err(HgError::IoError { error, context }) => {
461 match error.raw_os_error().expect("real os error") {
470 match error.raw_os_error().expect("real os error") {
462 // 2 = ENOENT, No such file or directory
471 // 2 = ENOENT, No such file or directory
463 // 116 = ESTALE, Stale NFS file handle
472 // 116 = ESTALE, Stale NFS file handle
464 //
473 //
465 // TODO match on `error.kind()` when
474 // TODO match on `error.kind()` when
466 // `ErrorKind::StaleNetworkFileHandle` is stable.
475 // `ErrorKind::StaleNetworkFileHandle` is stable.
467 2 | 116 => {
476 2 | 116 => {
468 // Race where the data file was deleted right after
477 // Race where the data file was deleted right after
469 // we read the docket, try again
478 // we read the docket, try again
470 return Err(race_error.into());
479 return Err(race_error.into());
471 }
480 }
472 _ => {
481 _ => {
473 return Err(
482 return Err(
474 HgError::IoError { error, context }.into()
483 HgError::IoError { error, context }.into()
475 )
484 )
476 }
485 }
477 }
486 }
478 }
487 }
479 Err(e) => return Err(e.into()),
488 Err(e) => return Err(e.into()),
480 };
489 };
481 OwningDirstateMap::new_v2(
490 OwningDirstateMap::new_v2(
482 contents, data_size, metadata, uuid, identity,
491 contents, data_size, metadata, uuid, identity,
483 )
492 )
484 } else {
493 } else {
485 match self
494 match self
486 .hg_vfs()
495 .hg_vfs()
487 .mmap_open(docket.data_filename())
496 .mmap_open(docket.data_filename())
488 .io_not_found_as_none()
497 .io_not_found_as_none()
489 {
498 {
490 Ok(Some(data_mmap)) => OwningDirstateMap::new_v2(
499 Ok(Some(data_mmap)) => OwningDirstateMap::new_v2(
491 data_mmap, data_size, metadata, uuid, identity,
500 data_mmap, data_size, metadata, uuid, identity,
492 ),
501 ),
493 Ok(None) => {
502 Ok(None) => {
494 // Race where the data file was deleted right after we
503 // Race where the data file was deleted right after we
495 // read the docket, try again
504 // read the docket, try again
496 return Err(race_error.into());
505 return Err(race_error.into());
497 }
506 }
498 Err(e) => return Err(e.into()),
507 Err(e) => return Err(e.into()),
499 }
508 }
500 }?;
509 }?;
501
510
502 let write_mode_config = self
511 let write_mode_config = self
503 .config()
512 .config()
504 .get_str(b"devel", b"dirstate.v2.data_update_mode")
513 .get_str(b"devel", b"dirstate.v2.data_update_mode")
505 .unwrap_or(Some("auto"))
514 .unwrap_or(Some("auto"))
506 .unwrap_or("auto"); // don't bother for devel options
515 .unwrap_or("auto"); // don't bother for devel options
507 let write_mode = match write_mode_config {
516 let write_mode = match write_mode_config {
508 "auto" => DirstateMapWriteMode::Auto,
517 "auto" => DirstateMapWriteMode::Auto,
509 "force-new" => DirstateMapWriteMode::ForceNewDataFile,
518 "force-new" => DirstateMapWriteMode::ForceNewDataFile,
510 "force-append" => DirstateMapWriteMode::ForceAppend,
519 "force-append" => DirstateMapWriteMode::ForceAppend,
511 _ => DirstateMapWriteMode::Auto,
520 _ => DirstateMapWriteMode::Auto,
512 };
521 };
513
522
514 map.with_dmap_mut(|m| m.set_write_mode(write_mode));
523 map.with_dmap_mut(|m| m.set_write_mode(write_mode));
515
524
516 Ok(map)
525 Ok(map)
517 }
526 }
518
527
519 pub fn dirstate_map(
528 pub fn dirstate_map(
520 &self,
529 &self,
521 ) -> Result<Ref<OwningDirstateMap>, DirstateError> {
530 ) -> Result<Ref<OwningDirstateMap>, DirstateError> {
522 self.dirstate_map.get_or_init(|| self.new_dirstate_map())
531 self.dirstate_map.get_or_init(|| self.new_dirstate_map())
523 }
532 }
524
533
525 pub fn dirstate_map_mut(
534 pub fn dirstate_map_mut(
526 &self,
535 &self,
527 ) -> Result<RefMut<OwningDirstateMap>, DirstateError> {
536 ) -> Result<RefMut<OwningDirstateMap>, DirstateError> {
528 self.dirstate_map
537 self.dirstate_map
529 .get_mut_or_init(|| self.new_dirstate_map())
538 .get_mut_or_init(|| self.new_dirstate_map())
530 }
539 }
531
540
532 fn new_changelog(&self) -> Result<Changelog, HgError> {
541 fn new_changelog(&self) -> Result<Changelog, HgError> {
533 Changelog::open(
542 Changelog::open(
534 &self.store_vfs(),
543 &self.store_vfs(),
535 self.default_revlog_options(RevlogType::Changelog)?,
544 self.default_revlog_options(RevlogType::Changelog)?,
536 )
545 )
537 }
546 }
538
547
539 pub fn changelog(&self) -> Result<Ref<Changelog>, HgError> {
548 pub fn changelog(&self) -> Result<Ref<Changelog>, HgError> {
540 self.changelog.get_or_init(|| self.new_changelog())
549 self.changelog.get_or_init(|| self.new_changelog())
541 }
550 }
542
551
543 pub fn changelog_mut(&self) -> Result<RefMut<Changelog>, HgError> {
552 pub fn changelog_mut(&self) -> Result<RefMut<Changelog>, HgError> {
544 self.changelog.get_mut_or_init(|| self.new_changelog())
553 self.changelog.get_mut_or_init(|| self.new_changelog())
545 }
554 }
546
555
547 fn new_manifestlog(&self) -> Result<Manifestlog, HgError> {
556 fn new_manifestlog(&self) -> Result<Manifestlog, HgError> {
548 Manifestlog::open(
557 Manifestlog::open(
549 &self.store_vfs(),
558 &self.store_vfs(),
550 self.default_revlog_options(RevlogType::Manifestlog)?,
559 self.default_revlog_options(RevlogType::Manifestlog)?,
551 )
560 )
552 }
561 }
553
562
554 pub fn manifestlog(&self) -> Result<Ref<Manifestlog>, HgError> {
563 pub fn manifestlog(&self) -> Result<Ref<Manifestlog>, HgError> {
555 self.manifestlog.get_or_init(|| self.new_manifestlog())
564 self.manifestlog.get_or_init(|| self.new_manifestlog())
556 }
565 }
557
566
558 pub fn manifestlog_mut(&self) -> Result<RefMut<Manifestlog>, HgError> {
567 pub fn manifestlog_mut(&self) -> Result<RefMut<Manifestlog>, HgError> {
559 self.manifestlog.get_mut_or_init(|| self.new_manifestlog())
568 self.manifestlog.get_mut_or_init(|| self.new_manifestlog())
560 }
569 }
561
570
562 /// Returns the manifest of the *changeset* with the given node ID
571 /// Returns the manifest of the *changeset* with the given node ID
563 pub fn manifest_for_node(
572 pub fn manifest_for_node(
564 &self,
573 &self,
565 node: impl Into<NodePrefix>,
574 node: impl Into<NodePrefix>,
566 ) -> Result<Manifest, RevlogError> {
575 ) -> Result<Manifest, RevlogError> {
567 self.manifestlog()?.data_for_node(
576 self.manifestlog()?.data_for_node(
568 self.changelog()?
577 self.changelog()?
569 .data_for_node(node.into())?
578 .data_for_node(node.into())?
570 .manifest_node()?
579 .manifest_node()?
571 .into(),
580 .into(),
572 )
581 )
573 }
582 }
574
583
575 /// Returns the manifest of the *changeset* with the given revision number
584 /// Returns the manifest of the *changeset* with the given revision number
576 pub fn manifest_for_rev(
585 pub fn manifest_for_rev(
577 &self,
586 &self,
578 revision: UncheckedRevision,
587 revision: UncheckedRevision,
579 ) -> Result<Manifest, RevlogError> {
588 ) -> Result<Manifest, RevlogError> {
580 self.manifestlog()?.data_for_node(
589 self.manifestlog()?.data_for_node(
581 self.changelog()?
590 self.changelog()?
582 .data_for_rev(revision)?
591 .data_for_rev(revision)?
583 .manifest_node()?
592 .manifest_node()?
584 .into(),
593 .into(),
585 )
594 )
586 }
595 }
587
596
588 pub fn has_subrepos(&self) -> Result<bool, DirstateError> {
597 pub fn has_subrepos(&self) -> Result<bool, DirstateError> {
589 if let Some(entry) = self.dirstate_map()?.get(HgPath::new(".hgsub"))? {
598 if let Some(entry) = self.dirstate_map()?.get(HgPath::new(".hgsub"))? {
590 Ok(entry.tracked())
599 Ok(entry.tracked())
591 } else {
600 } else {
592 Ok(false)
601 Ok(false)
593 }
602 }
594 }
603 }
595
604
596 pub fn filelog(&self, path: &HgPath) -> Result<Filelog, HgError> {
605 pub fn filelog(&self, path: &HgPath) -> Result<Filelog, HgError> {
597 Filelog::open(
606 Filelog::open(
598 self,
607 self,
599 path,
608 path,
600 self.default_revlog_options(RevlogType::Filelog)?,
609 self.default_revlog_options(RevlogType::Filelog)?,
601 )
610 )
602 }
611 }
603 /// Write to disk any updates that were made through `dirstate_map_mut`.
612 /// Write to disk any updates that were made through `dirstate_map_mut`.
604 ///
613 ///
605 /// The "wlock" must be held while calling this.
614 /// The "wlock" must be held while calling this.
606 /// See for example `try_with_wlock_no_wait`.
615 /// See for example `try_with_wlock_no_wait`.
607 ///
616 ///
608 /// TODO: have a `WritableRepo` type only accessible while holding the
617 /// TODO: have a `WritableRepo` type only accessible while holding the
609 /// lock?
618 /// lock?
610 pub fn write_dirstate(&self) -> Result<(), DirstateError> {
619 pub fn write_dirstate(&self) -> Result<(), DirstateError> {
611 let map = self.dirstate_map()?;
620 let map = self.dirstate_map()?;
612 // TODO: Maintain a `DirstateMap::dirty` flag, and return early here if
621 // TODO: Maintain a `DirstateMap::dirty` flag, and return early here if
613 // it’s unset
622 // it’s unset
614 let parents = self.dirstate_parents()?;
623 let parents = self.dirstate_parents()?;
615 let (packed_dirstate, old_uuid_to_remove) = if self.use_dirstate_v2() {
624 let (packed_dirstate, old_uuid_to_remove) = if self.use_dirstate_v2() {
616 let (identity, uuid, data_size) =
625 let (identity, uuid, data_size) =
617 self.get_dirstate_data_file_integrity()?;
626 self.get_dirstate_data_file_integrity()?;
618 let identity_changed = identity != map.old_identity();
627 let identity_changed = identity != map.old_identity();
619 let uuid_changed = uuid.as_deref() != map.old_uuid();
628 let uuid_changed = uuid.as_deref() != map.old_uuid();
620 let data_length_changed = data_size != map.old_data_size();
629 let data_length_changed = data_size != map.old_data_size();
621
630
622 if identity_changed || uuid_changed || data_length_changed {
631 if identity_changed || uuid_changed || data_length_changed {
623 // If any of identity, uuid or length have changed since
632 // If any of identity, uuid or length have changed since
624 // last disk read, don't write.
633 // last disk read, don't write.
625 // This is fine because either we're in a command that doesn't
634 // This is fine because either we're in a command that doesn't
626 // write anything too important (like `hg status`), or we're in
635 // write anything too important (like `hg status`), or we're in
627 // `hg add` and we're supposed to have taken the lock before
636 // `hg add` and we're supposed to have taken the lock before
628 // reading anyway.
637 // reading anyway.
629 //
638 //
630 // TODO complain loudly if we've changed anything important
639 // TODO complain loudly if we've changed anything important
631 // without taking the lock.
640 // without taking the lock.
632 // (see `hg help config.format.use-dirstate-tracked-hint`)
641 // (see `hg help config.format.use-dirstate-tracked-hint`)
633 log::debug!(
642 log::debug!(
634 "dirstate has changed since last read, not updating."
643 "dirstate has changed since last read, not updating."
635 );
644 );
636 return Ok(());
645 return Ok(());
637 }
646 }
638
647
639 let uuid_opt = map.old_uuid();
648 let uuid_opt = map.old_uuid();
640 let write_mode = if uuid_opt.is_some() {
649 let write_mode = if uuid_opt.is_some() {
641 DirstateMapWriteMode::Auto
650 DirstateMapWriteMode::Auto
642 } else {
651 } else {
643 DirstateMapWriteMode::ForceNewDataFile
652 DirstateMapWriteMode::ForceNewDataFile
644 };
653 };
645 let (data, tree_metadata, append, old_data_size) =
654 let (data, tree_metadata, append, old_data_size) =
646 map.pack_v2(write_mode)?;
655 map.pack_v2(write_mode)?;
647
656
648 // Reuse the uuid, or generate a new one, keeping the old for
657 // Reuse the uuid, or generate a new one, keeping the old for
649 // deletion.
658 // deletion.
650 let (uuid, old_uuid) = match uuid_opt {
659 let (uuid, old_uuid) = match uuid_opt {
651 Some(uuid) => {
660 Some(uuid) => {
652 let as_str = std::str::from_utf8(uuid)
661 let as_str = std::str::from_utf8(uuid)
653 .map_err(|_| {
662 .map_err(|_| {
654 HgError::corrupted(
663 HgError::corrupted(
655 "non-UTF-8 dirstate data file ID",
664 "non-UTF-8 dirstate data file ID",
656 )
665 )
657 })?
666 })?
658 .to_owned();
667 .to_owned();
659 if append {
668 if append {
660 (as_str, None)
669 (as_str, None)
661 } else {
670 } else {
662 (DirstateDocket::new_uid(), Some(as_str))
671 (DirstateDocket::new_uid(), Some(as_str))
663 }
672 }
664 }
673 }
665 None => (DirstateDocket::new_uid(), None),
674 None => (DirstateDocket::new_uid(), None),
666 };
675 };
667
676
668 let data_filename = format!("dirstate.{}", uuid);
677 let data_filename = format!("dirstate.{}", uuid);
669 let data_filename = self.hg_vfs().join(data_filename);
678 let data_filename = self.hg_vfs().join(data_filename);
670 let mut options = std::fs::OpenOptions::new();
679 let mut options = std::fs::OpenOptions::new();
671 options.write(true);
680 options.write(true);
672
681
673 // Why are we not using the O_APPEND flag when appending?
682 // Why are we not using the O_APPEND flag when appending?
674 //
683 //
675 // - O_APPEND makes it trickier to deal with garbage at the end of
684 // - O_APPEND makes it trickier to deal with garbage at the end of
676 // the file, left by a previous uncommitted transaction. By
685 // the file, left by a previous uncommitted transaction. By
677 // starting the write at [old_data_size] we make sure we erase
686 // starting the write at [old_data_size] we make sure we erase
678 // all such garbage.
687 // all such garbage.
679 //
688 //
680 // - O_APPEND requires to special-case 0-byte writes, whereas we
689 // - O_APPEND requires to special-case 0-byte writes, whereas we
681 // don't need that.
690 // don't need that.
682 //
691 //
683 // - Some OSes have bugs in implementation O_APPEND:
692 // - Some OSes have bugs in implementation O_APPEND:
684 // revlog.py talks about a Solaris bug, but we also saw some ZFS
693 // revlog.py talks about a Solaris bug, but we also saw some ZFS
685 // bug: https://github.com/openzfs/zfs/pull/3124,
694 // bug: https://github.com/openzfs/zfs/pull/3124,
686 // https://github.com/openzfs/zfs/issues/13370
695 // https://github.com/openzfs/zfs/issues/13370
687 //
696 //
688 if !append {
697 if !append {
689 log::trace!("creating a new dirstate data file");
698 log::trace!("creating a new dirstate data file");
690 options.create_new(true);
699 options.create_new(true);
691 } else {
700 } else {
692 log::trace!("appending to the dirstate data file");
701 log::trace!("appending to the dirstate data file");
693 }
702 }
694
703
695 let data_size = (|| {
704 let data_size = (|| {
696 // TODO: loop and try another random ID if !append and this
705 // TODO: loop and try another random ID if !append and this
697 // returns `ErrorKind::AlreadyExists`? Collision chance of two
706 // returns `ErrorKind::AlreadyExists`? Collision chance of two
698 // random IDs is one in 2**32
707 // random IDs is one in 2**32
699 let mut file = options.open(&data_filename)?;
708 let mut file = options.open(&data_filename)?;
700 if append {
709 if append {
701 file.seek(SeekFrom::Start(old_data_size as u64))?;
710 file.seek(SeekFrom::Start(old_data_size as u64))?;
702 }
711 }
703 file.write_all(&data)?;
712 file.write_all(&data)?;
704 file.flush()?;
713 file.flush()?;
705 file.stream_position()
714 file.stream_position()
706 })()
715 })()
707 .when_writing_file(&data_filename)?;
716 .when_writing_file(&data_filename)?;
708
717
709 let packed_dirstate = DirstateDocket::serialize(
718 let packed_dirstate = DirstateDocket::serialize(
710 parents,
719 parents,
711 tree_metadata,
720 tree_metadata,
712 data_size,
721 data_size,
713 uuid.as_bytes(),
722 uuid.as_bytes(),
714 )
723 )
715 .map_err(|_: std::num::TryFromIntError| {
724 .map_err(|_: std::num::TryFromIntError| {
716 HgError::corrupted("overflow in dirstate docket serialization")
725 HgError::corrupted("overflow in dirstate docket serialization")
717 })?;
726 })?;
718
727
719 (packed_dirstate, old_uuid)
728 (packed_dirstate, old_uuid)
720 } else {
729 } else {
721 let identity = self.dirstate_identity()?;
730 let identity = self.dirstate_identity()?;
722 if identity != map.old_identity() {
731 if identity != map.old_identity() {
723 // If identity changed since last disk read, don't write.
732 // If identity changed since last disk read, don't write.
724 // This is fine because either we're in a command that doesn't
733 // This is fine because either we're in a command that doesn't
725 // write anything too important (like `hg status`), or we're in
734 // write anything too important (like `hg status`), or we're in
726 // `hg add` and we're supposed to have taken the lock before
735 // `hg add` and we're supposed to have taken the lock before
727 // reading anyway.
736 // reading anyway.
728 //
737 //
729 // TODO complain loudly if we've changed anything important
738 // TODO complain loudly if we've changed anything important
730 // without taking the lock.
739 // without taking the lock.
731 // (see `hg help config.format.use-dirstate-tracked-hint`)
740 // (see `hg help config.format.use-dirstate-tracked-hint`)
732 log::debug!(
741 log::debug!(
733 "dirstate has changed since last read, not updating."
742 "dirstate has changed since last read, not updating."
734 );
743 );
735 return Ok(());
744 return Ok(());
736 }
745 }
737 (map.pack_v1(parents)?, None)
746 (map.pack_v1(parents)?, None)
738 };
747 };
739
748
740 let vfs = self.hg_vfs();
749 let vfs = self.hg_vfs();
741 vfs.atomic_write("dirstate", &packed_dirstate)?;
750 vfs.atomic_write("dirstate", &packed_dirstate)?;
742 if let Some(uuid) = old_uuid_to_remove {
751 if let Some(uuid) = old_uuid_to_remove {
743 // Remove the old data file after the new docket pointing to the
752 // Remove the old data file after the new docket pointing to the
744 // new data file was written.
753 // new data file was written.
745 vfs.remove_file(format!("dirstate.{}", uuid))?;
754 vfs.remove_file(format!("dirstate.{}", uuid))?;
746 }
755 }
747 Ok(())
756 Ok(())
748 }
757 }
749
758
750 pub fn default_revlog_options(
759 pub fn default_revlog_options(
751 &self,
760 &self,
752 revlog_type: RevlogType,
761 revlog_type: RevlogType,
753 ) -> Result<RevlogOpenOptions, HgError> {
762 ) -> Result<RevlogOpenOptions, HgError> {
754 let requirements = self.requirements();
763 let requirements = self.requirements();
755 let is_changelog = revlog_type == RevlogType::Changelog;
764 let is_changelog = revlog_type == RevlogType::Changelog;
756 let version = if is_changelog
765 let version = if is_changelog
757 && requirements.contains(CHANGELOGV2_REQUIREMENT)
766 && requirements.contains(CHANGELOGV2_REQUIREMENT)
758 {
767 {
759 let compute_rank = self
768 let compute_rank = self
760 .config()
769 .config()
761 .get_bool(b"experimental", b"changelog-v2.compute-rank")?;
770 .get_bool(b"experimental", b"changelog-v2.compute-rank")?;
762 RevlogVersionOptions::ChangelogV2 { compute_rank }
771 RevlogVersionOptions::ChangelogV2 { compute_rank }
763 } else if requirements.contains(REVLOGV2_REQUIREMENT) {
772 } else if requirements.contains(REVLOGV2_REQUIREMENT) {
764 RevlogVersionOptions::V2
773 RevlogVersionOptions::V2
765 } else if requirements.contains(REVLOGV1_REQUIREMENT) {
774 } else if requirements.contains(REVLOGV1_REQUIREMENT) {
766 RevlogVersionOptions::V1 {
775 RevlogVersionOptions::V1 {
767 general_delta: requirements.contains(GENERALDELTA_REQUIREMENT),
776 general_delta: requirements.contains(GENERALDELTA_REQUIREMENT),
768 inline: !is_changelog,
777 inline: !is_changelog,
769 }
778 }
770 } else {
779 } else {
771 RevlogVersionOptions::V0
780 RevlogVersionOptions::V0
772 };
781 };
773 Ok(RevlogOpenOptions {
782 Ok(RevlogOpenOptions {
774 version,
783 version,
775 // We don't need to dance around the slow path like in the Python
784 // We don't need to dance around the slow path like in the Python
776 // implementation since we know we have access to the fast code.
785 // implementation since we know we have access to the fast code.
777 use_nodemap: requirements.contains(NODEMAP_REQUIREMENT),
786 use_nodemap: requirements.contains(NODEMAP_REQUIREMENT),
778 delta_config: RevlogDeltaConfig::new(
787 delta_config: RevlogDeltaConfig::new(
779 self.config(),
788 self.config(),
780 self.requirements(),
789 self.requirements(),
781 revlog_type,
790 revlog_type,
782 )?,
791 )?,
783 data_config: RevlogDataConfig::new(
792 data_config: RevlogDataConfig::new(
784 self.config(),
793 self.config(),
785 self.requirements(),
794 self.requirements(),
786 )?,
795 )?,
787 feature_config: RevlogFeatureConfig::new(
796 feature_config: RevlogFeatureConfig::new(
788 self.config(),
797 self.config(),
789 requirements,
798 requirements,
790 )?,
799 )?,
791 })
800 })
792 }
801 }
793 }
802 }
794
803
795 /// Lazily-initialized component of `Repo` with interior mutability
804 /// Lazily-initialized component of `Repo` with interior mutability
796 ///
805 ///
797 /// This differs from `OnceCell` in that the value can still be "deinitialized"
806 /// This differs from `OnceCell` in that the value can still be "deinitialized"
798 /// later by setting its inner `Option` to `None`. It also takes the
807 /// later by setting its inner `Option` to `None`. It also takes the
799 /// initialization function as an argument when the value is requested, not
808 /// initialization function as an argument when the value is requested, not
800 /// when the instance is created.
809 /// when the instance is created.
801 struct LazyCell<T> {
810 struct LazyCell<T> {
802 value: RefCell<Option<T>>,
811 value: RefCell<Option<T>>,
803 }
812 }
804
813
805 impl<T> LazyCell<T> {
814 impl<T> LazyCell<T> {
806 fn new() -> Self {
815 fn new() -> Self {
807 Self {
816 Self {
808 value: RefCell::new(None),
817 value: RefCell::new(None),
809 }
818 }
810 }
819 }
811
820
812 fn set(&self, value: T) {
821 fn set(&self, value: T) {
813 *self.value.borrow_mut() = Some(value)
822 *self.value.borrow_mut() = Some(value)
814 }
823 }
815
824
816 fn get_or_init<E>(
825 fn get_or_init<E>(
817 &self,
826 &self,
818 init: impl Fn() -> Result<T, E>,
827 init: impl Fn() -> Result<T, E>,
819 ) -> Result<Ref<T>, E> {
828 ) -> Result<Ref<T>, E> {
820 let mut borrowed = self.value.borrow();
829 let mut borrowed = self.value.borrow();
821 if borrowed.is_none() {
830 if borrowed.is_none() {
822 drop(borrowed);
831 drop(borrowed);
823 // Only use `borrow_mut` if it is really needed to avoid panic in
832 // Only use `borrow_mut` if it is really needed to avoid panic in
824 // case there is another outstanding borrow but mutation is not
833 // case there is another outstanding borrow but mutation is not
825 // needed.
834 // needed.
826 *self.value.borrow_mut() = Some(init()?);
835 *self.value.borrow_mut() = Some(init()?);
827 borrowed = self.value.borrow()
836 borrowed = self.value.borrow()
828 }
837 }
829 Ok(Ref::map(borrowed, |option| option.as_ref().unwrap()))
838 Ok(Ref::map(borrowed, |option| option.as_ref().unwrap()))
830 }
839 }
831
840
832 fn get_mut_or_init<E>(
841 fn get_mut_or_init<E>(
833 &self,
842 &self,
834 init: impl Fn() -> Result<T, E>,
843 init: impl Fn() -> Result<T, E>,
835 ) -> Result<RefMut<T>, E> {
844 ) -> Result<RefMut<T>, E> {
836 let mut borrowed = self.value.borrow_mut();
845 let mut borrowed = self.value.borrow_mut();
837 if borrowed.is_none() {
846 if borrowed.is_none() {
838 *borrowed = Some(init()?);
847 *borrowed = Some(init()?);
839 }
848 }
840 Ok(RefMut::map(borrowed, |option| option.as_mut().unwrap()))
849 Ok(RefMut::map(borrowed, |option| option.as_mut().unwrap()))
841 }
850 }
842 }
851 }
@@ -1,183 +1,185
1 use crate::errors::{HgError, HgResultExt};
1 use crate::errors::{HgError, HgResultExt};
2 use crate::repo::Repo;
2 use crate::repo::Repo;
3 use crate::utils::join_display;
3 use crate::utils::join_display;
4 use crate::vfs::Vfs;
4 use crate::vfs::VfsImpl;
5 use std::collections::HashSet;
5 use std::collections::HashSet;
6
6
7 fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> {
7 fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> {
8 // The Python code reading this file uses `str.splitlines`
8 // The Python code reading this file uses `str.splitlines`
9 // which looks for a number of line separators (even including a couple of
9 // which looks for a number of line separators (even including a couple of
10 // non-ASCII ones), but Python code writing it always uses `\n`.
10 // non-ASCII ones), but Python code writing it always uses `\n`.
11 let lines = bytes.split(|&byte| byte == b'\n');
11 let lines = bytes.split(|&byte| byte == b'\n');
12
12
13 lines
13 lines
14 .filter(|line| !line.is_empty())
14 .filter(|line| !line.is_empty())
15 .map(|line| {
15 .map(|line| {
16 // Python uses Unicode `str.isalnum` but feature names are all
16 // Python uses Unicode `str.isalnum` but feature names are all
17 // ASCII
17 // ASCII
18 if line[0].is_ascii_alphanumeric() && line.is_ascii() {
18 if line[0].is_ascii_alphanumeric() && line.is_ascii() {
19 Ok(String::from_utf8(line.into()).unwrap())
19 Ok(String::from_utf8(line.into()).unwrap())
20 } else {
20 } else {
21 Err(HgError::corrupted("parse error in 'requires' file"))
21 Err(HgError::corrupted("parse error in 'requires' file"))
22 }
22 }
23 })
23 })
24 .collect()
24 .collect()
25 }
25 }
26
26
27 pub(crate) fn load(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> {
27 pub(crate) fn load(hg_vfs: VfsImpl) -> Result<HashSet<String>, HgError> {
28 parse(&hg_vfs.read("requires")?)
28 parse(&hg_vfs.read("requires")?)
29 }
29 }
30
30
31 pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> {
31 pub(crate) fn load_if_exists(
32 hg_vfs: &VfsImpl,
33 ) -> Result<HashSet<String>, HgError> {
32 if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? {
34 if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? {
33 parse(&bytes)
35 parse(&bytes)
34 } else {
36 } else {
35 // Treat a missing file the same as an empty file.
37 // Treat a missing file the same as an empty file.
36 // From `mercurial/localrepo.py`:
38 // From `mercurial/localrepo.py`:
37 // > requires file contains a newline-delimited list of
39 // > requires file contains a newline-delimited list of
38 // > features/capabilities the opener (us) must have in order to use
40 // > features/capabilities the opener (us) must have in order to use
39 // > the repository. This file was introduced in Mercurial 0.9.2,
41 // > the repository. This file was introduced in Mercurial 0.9.2,
40 // > which means very old repositories may not have one. We assume
42 // > which means very old repositories may not have one. We assume
41 // > a missing file translates to no requirements.
43 // > a missing file translates to no requirements.
42 Ok(HashSet::new())
44 Ok(HashSet::new())
43 }
45 }
44 }
46 }
45
47
46 pub(crate) fn check(repo: &Repo) -> Result<(), HgError> {
48 pub(crate) fn check(repo: &Repo) -> Result<(), HgError> {
47 let unknown: Vec<_> = repo
49 let unknown: Vec<_> = repo
48 .requirements()
50 .requirements()
49 .iter()
51 .iter()
50 .map(String::as_str)
52 .map(String::as_str)
51 // .filter(|feature| !ALL_SUPPORTED.contains(feature.as_str()))
53 // .filter(|feature| !ALL_SUPPORTED.contains(feature.as_str()))
52 .filter(|feature| {
54 .filter(|feature| {
53 !REQUIRED.contains(feature) && !SUPPORTED.contains(feature)
55 !REQUIRED.contains(feature) && !SUPPORTED.contains(feature)
54 })
56 })
55 .collect();
57 .collect();
56 if !unknown.is_empty() {
58 if !unknown.is_empty() {
57 return Err(HgError::unsupported(format!(
59 return Err(HgError::unsupported(format!(
58 "repository requires feature unknown to this Mercurial: {}",
60 "repository requires feature unknown to this Mercurial: {}",
59 join_display(&unknown, ", ")
61 join_display(&unknown, ", ")
60 )));
62 )));
61 }
63 }
62 let missing: Vec<_> = REQUIRED
64 let missing: Vec<_> = REQUIRED
63 .iter()
65 .iter()
64 .filter(|&&feature| !repo.requirements().contains(feature))
66 .filter(|&&feature| !repo.requirements().contains(feature))
65 .collect();
67 .collect();
66 if !missing.is_empty() {
68 if !missing.is_empty() {
67 return Err(HgError::unsupported(format!(
69 return Err(HgError::unsupported(format!(
68 "repository is missing feature required by this Mercurial: {}",
70 "repository is missing feature required by this Mercurial: {}",
69 join_display(&missing, ", ")
71 join_display(&missing, ", ")
70 )));
72 )));
71 }
73 }
72 Ok(())
74 Ok(())
73 }
75 }
74
76
75 /// rhg does not support repositories that are *missing* any of these features
77 /// rhg does not support repositories that are *missing* any of these features
76 const REQUIRED: &[&str] = &["revlogv1", "store", "fncache", "dotencode"];
78 const REQUIRED: &[&str] = &["revlogv1", "store", "fncache", "dotencode"];
77
79
78 /// rhg supports repository with or without these
80 /// rhg supports repository with or without these
79 const SUPPORTED: &[&str] = &[
81 const SUPPORTED: &[&str] = &[
80 GENERALDELTA_REQUIREMENT,
82 GENERALDELTA_REQUIREMENT,
81 SHARED_REQUIREMENT,
83 SHARED_REQUIREMENT,
82 SHARESAFE_REQUIREMENT,
84 SHARESAFE_REQUIREMENT,
83 SPARSEREVLOG_REQUIREMENT,
85 SPARSEREVLOG_REQUIREMENT,
84 RELATIVE_SHARED_REQUIREMENT,
86 RELATIVE_SHARED_REQUIREMENT,
85 REVLOG_COMPRESSION_ZSTD,
87 REVLOG_COMPRESSION_ZSTD,
86 DIRSTATE_V2_REQUIREMENT,
88 DIRSTATE_V2_REQUIREMENT,
87 DIRSTATE_TRACKED_HINT_V1,
89 DIRSTATE_TRACKED_HINT_V1,
88 // As of this writing everything rhg does is read-only.
90 // As of this writing everything rhg does is read-only.
89 // When it starts writing to the repository, it’ll need to either keep the
91 // When it starts writing to the repository, it’ll need to either keep the
90 // persistent nodemap up to date or remove this entry:
92 // persistent nodemap up to date or remove this entry:
91 NODEMAP_REQUIREMENT,
93 NODEMAP_REQUIREMENT,
92 // Not all commands support `sparse` and `narrow`. The commands that do
94 // Not all commands support `sparse` and `narrow`. The commands that do
93 // not should opt out by checking `has_sparse` and `has_narrow`.
95 // not should opt out by checking `has_sparse` and `has_narrow`.
94 SPARSE_REQUIREMENT,
96 SPARSE_REQUIREMENT,
95 NARROW_REQUIREMENT,
97 NARROW_REQUIREMENT,
96 // rhg doesn't care about bookmarks at all yet
98 // rhg doesn't care about bookmarks at all yet
97 BOOKMARKS_IN_STORE_REQUIREMENT,
99 BOOKMARKS_IN_STORE_REQUIREMENT,
98 ];
100 ];
99
101
100 // Copied from mercurial/requirements.py:
102 // Copied from mercurial/requirements.py:
101
103
102 pub const DIRSTATE_V2_REQUIREMENT: &str = "dirstate-v2";
104 pub const DIRSTATE_V2_REQUIREMENT: &str = "dirstate-v2";
103 pub const GENERALDELTA_REQUIREMENT: &str = "generaldelta";
105 pub const GENERALDELTA_REQUIREMENT: &str = "generaldelta";
104
106
105 /// A repository that uses the tracked hint dirstate file
107 /// A repository that uses the tracked hint dirstate file
106 #[allow(unused)]
108 #[allow(unused)]
107 pub const DIRSTATE_TRACKED_HINT_V1: &str = "dirstate-tracked-key-v1";
109 pub const DIRSTATE_TRACKED_HINT_V1: &str = "dirstate-tracked-key-v1";
108
110
109 /// When narrowing is finalized and no longer subject to format changes,
111 /// When narrowing is finalized and no longer subject to format changes,
110 /// we should move this to just "narrow" or similar.
112 /// we should move this to just "narrow" or similar.
111 #[allow(unused)]
113 #[allow(unused)]
112 pub const NARROW_REQUIREMENT: &str = "narrowhg-experimental";
114 pub const NARROW_REQUIREMENT: &str = "narrowhg-experimental";
113
115
114 /// Bookmarks must be stored in the `store` part of the repository and will be
116 /// Bookmarks must be stored in the `store` part of the repository and will be
115 /// share accross shares
117 /// share accross shares
116 #[allow(unused)]
118 #[allow(unused)]
117 pub const BOOKMARKS_IN_STORE_REQUIREMENT: &str = "bookmarksinstore";
119 pub const BOOKMARKS_IN_STORE_REQUIREMENT: &str = "bookmarksinstore";
118
120
119 /// Enables sparse working directory usage
121 /// Enables sparse working directory usage
120 #[allow(unused)]
122 #[allow(unused)]
121 pub const SPARSE_REQUIREMENT: &str = "exp-sparse";
123 pub const SPARSE_REQUIREMENT: &str = "exp-sparse";
122
124
123 /// Enables the internal phase which is used to hide changesets instead
125 /// Enables the internal phase which is used to hide changesets instead
124 /// of stripping them
126 /// of stripping them
125 #[allow(unused)]
127 #[allow(unused)]
126 pub const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase";
128 pub const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase";
127
129
128 /// Stores manifest in Tree structure
130 /// Stores manifest in Tree structure
129 #[allow(unused)]
131 #[allow(unused)]
130 pub const TREEMANIFEST_REQUIREMENT: &str = "treemanifest";
132 pub const TREEMANIFEST_REQUIREMENT: &str = "treemanifest";
131
133
132 /// Whether to use the "RevlogNG" or V1 of the revlog format
134 /// Whether to use the "RevlogNG" or V1 of the revlog format
133 #[allow(unused)]
135 #[allow(unused)]
134 pub const REVLOGV1_REQUIREMENT: &str = "revlogv1";
136 pub const REVLOGV1_REQUIREMENT: &str = "revlogv1";
135
137
136 /// Increment the sub-version when the revlog v2 format changes to lock out old
138 /// Increment the sub-version when the revlog v2 format changes to lock out old
137 /// clients.
139 /// clients.
138 #[allow(unused)]
140 #[allow(unused)]
139 pub const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1";
141 pub const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1";
140
142
141 /// Increment the sub-version when the revlog v2 format changes to lock out old
143 /// Increment the sub-version when the revlog v2 format changes to lock out old
142 /// clients.
144 /// clients.
143 #[allow(unused)]
145 #[allow(unused)]
144 pub const CHANGELOGV2_REQUIREMENT: &str = "exp-changelog-v2";
146 pub const CHANGELOGV2_REQUIREMENT: &str = "exp-changelog-v2";
145
147
146 /// A repository with the sparserevlog feature will have delta chains that
148 /// A repository with the sparserevlog feature will have delta chains that
147 /// can spread over a larger span. Sparse reading cuts these large spans into
149 /// can spread over a larger span. Sparse reading cuts these large spans into
148 /// pieces, so that each piece isn't too big.
150 /// pieces, so that each piece isn't too big.
149 /// Without the sparserevlog capability, reading from the repository could use
151 /// Without the sparserevlog capability, reading from the repository could use
150 /// huge amounts of memory, because the whole span would be read at once,
152 /// huge amounts of memory, because the whole span would be read at once,
151 /// including all the intermediate revisions that aren't pertinent for the
153 /// including all the intermediate revisions that aren't pertinent for the
152 /// chain. This is why once a repository has enabled sparse-read, it becomes
154 /// chain. This is why once a repository has enabled sparse-read, it becomes
153 /// required.
155 /// required.
154 #[allow(unused)]
156 #[allow(unused)]
155 pub const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog";
157 pub const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog";
156
158
157 /// A repository with the the copies-sidedata-changeset requirement will store
159 /// A repository with the the copies-sidedata-changeset requirement will store
158 /// copies related information in changeset's sidedata.
160 /// copies related information in changeset's sidedata.
159 #[allow(unused)]
161 #[allow(unused)]
160 pub const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset";
162 pub const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset";
161
163
162 /// The repository use persistent nodemap for the changelog and the manifest.
164 /// The repository use persistent nodemap for the changelog and the manifest.
163 #[allow(unused)]
165 #[allow(unused)]
164 pub const NODEMAP_REQUIREMENT: &str = "persistent-nodemap";
166 pub const NODEMAP_REQUIREMENT: &str = "persistent-nodemap";
165
167
166 /// Denotes that the current repository is a share
168 /// Denotes that the current repository is a share
167 #[allow(unused)]
169 #[allow(unused)]
168 pub const SHARED_REQUIREMENT: &str = "shared";
170 pub const SHARED_REQUIREMENT: &str = "shared";
169
171
170 /// Denotes that current repository is a share and the shared source path is
172 /// Denotes that current repository is a share and the shared source path is
171 /// relative to the current repository root path
173 /// relative to the current repository root path
172 #[allow(unused)]
174 #[allow(unused)]
173 pub const RELATIVE_SHARED_REQUIREMENT: &str = "relshared";
175 pub const RELATIVE_SHARED_REQUIREMENT: &str = "relshared";
174
176
175 /// A repository with share implemented safely. The repository has different
177 /// A repository with share implemented safely. The repository has different
176 /// store and working copy requirements i.e. both `.hg/requires` and
178 /// store and working copy requirements i.e. both `.hg/requires` and
177 /// `.hg/store/requires` are present.
179 /// `.hg/store/requires` are present.
178 #[allow(unused)]
180 #[allow(unused)]
179 pub const SHARESAFE_REQUIREMENT: &str = "share-safe";
181 pub const SHARESAFE_REQUIREMENT: &str = "share-safe";
180
182
181 /// A repository that use zstd compression inside its revlog
183 /// A repository that use zstd compression inside its revlog
182 #[allow(unused)]
184 #[allow(unused)]
183 pub const REVLOG_COMPRESSION_ZSTD: &str = "revlog-compression-zstd";
185 pub const REVLOG_COMPRESSION_ZSTD: &str = "revlog-compression-zstd";
@@ -1,762 +1,764
1 use std::ascii::escape_default;
1 use std::ascii::escape_default;
2 use std::borrow::Cow;
2 use std::borrow::Cow;
3 use std::collections::BTreeMap;
3 use std::collections::BTreeMap;
4 use std::fmt::{Debug, Formatter};
4 use std::fmt::{Debug, Formatter};
5 use std::{iter, str};
5 use std::{iter, str};
6
6
7 use chrono::{DateTime, FixedOffset, NaiveDateTime};
7 use chrono::{DateTime, FixedOffset, NaiveDateTime};
8 use itertools::{Either, Itertools};
8 use itertools::{Either, Itertools};
9
9
10 use crate::errors::HgError;
10 use crate::errors::HgError;
11 use crate::revlog::Index;
11 use crate::revlog::Index;
12 use crate::revlog::Revision;
12 use crate::revlog::Revision;
13 use crate::revlog::{Node, NodePrefix};
13 use crate::revlog::{Node, NodePrefix};
14 use crate::revlog::{Revlog, RevlogEntry, RevlogError};
14 use crate::revlog::{Revlog, RevlogEntry, RevlogError};
15 use crate::utils::hg_path::HgPath;
15 use crate::utils::hg_path::HgPath;
16 use crate::vfs::Vfs;
16 use crate::vfs::VfsImpl;
17 use crate::{Graph, GraphError, RevlogOpenOptions, UncheckedRevision};
17 use crate::{Graph, GraphError, RevlogOpenOptions, UncheckedRevision};
18
18
19 /// A specialized `Revlog` to work with changelog data format.
19 /// A specialized `Revlog` to work with changelog data format.
20 pub struct Changelog {
20 pub struct Changelog {
21 /// The generic `revlog` format.
21 /// The generic `revlog` format.
22 pub(crate) revlog: Revlog,
22 pub(crate) revlog: Revlog,
23 }
23 }
24
24
25 impl Changelog {
25 impl Changelog {
26 /// Open the `changelog` of a repository given by its root.
26 /// Open the `changelog` of a repository given by its root.
27 pub fn open(
27 pub fn open(
28 store_vfs: &Vfs,
28 store_vfs: &VfsImpl,
29 options: RevlogOpenOptions,
29 options: RevlogOpenOptions,
30 ) -> Result<Self, HgError> {
30 ) -> Result<Self, HgError> {
31 let revlog = Revlog::open(store_vfs, "00changelog.i", None, options)?;
31 let revlog = Revlog::open(store_vfs, "00changelog.i", None, options)?;
32 Ok(Self { revlog })
32 Ok(Self { revlog })
33 }
33 }
34
34
35 /// Return the `ChangelogRevisionData` for the given node ID.
35 /// Return the `ChangelogRevisionData` for the given node ID.
36 pub fn data_for_node(
36 pub fn data_for_node(
37 &self,
37 &self,
38 node: NodePrefix,
38 node: NodePrefix,
39 ) -> Result<ChangelogRevisionData, RevlogError> {
39 ) -> Result<ChangelogRevisionData, RevlogError> {
40 let rev = self.revlog.rev_from_node(node)?;
40 let rev = self.revlog.rev_from_node(node)?;
41 self.entry_for_checked_rev(rev)?.data()
41 self.entry_for_checked_rev(rev)?.data()
42 }
42 }
43
43
44 /// Return the [`ChangelogEntry`] for the given revision number.
44 /// Return the [`ChangelogEntry`] for the given revision number.
45 pub fn entry_for_rev(
45 pub fn entry_for_rev(
46 &self,
46 &self,
47 rev: UncheckedRevision,
47 rev: UncheckedRevision,
48 ) -> Result<ChangelogEntry, RevlogError> {
48 ) -> Result<ChangelogEntry, RevlogError> {
49 let revlog_entry = self.revlog.get_entry(rev)?;
49 let revlog_entry = self.revlog.get_entry(rev)?;
50 Ok(ChangelogEntry { revlog_entry })
50 Ok(ChangelogEntry { revlog_entry })
51 }
51 }
52
52
53 /// Same as [`Self::entry_for_rev`] for checked revisions.
53 /// Same as [`Self::entry_for_rev`] for checked revisions.
54 fn entry_for_checked_rev(
54 fn entry_for_checked_rev(
55 &self,
55 &self,
56 rev: Revision,
56 rev: Revision,
57 ) -> Result<ChangelogEntry, RevlogError> {
57 ) -> Result<ChangelogEntry, RevlogError> {
58 let revlog_entry = self.revlog.get_entry_for_checked_rev(rev)?;
58 let revlog_entry = self.revlog.get_entry_for_checked_rev(rev)?;
59 Ok(ChangelogEntry { revlog_entry })
59 Ok(ChangelogEntry { revlog_entry })
60 }
60 }
61
61
62 /// Return the [`ChangelogRevisionData`] for the given revision number.
62 /// Return the [`ChangelogRevisionData`] for the given revision number.
63 ///
63 ///
64 /// This is a useful shortcut in case the caller does not need the
64 /// This is a useful shortcut in case the caller does not need the
65 /// generic revlog information (parents, hashes etc). Otherwise
65 /// generic revlog information (parents, hashes etc). Otherwise
66 /// consider taking a [`ChangelogEntry`] with
66 /// consider taking a [`ChangelogEntry`] with
67 /// [entry_for_rev](`Self::entry_for_rev`) and doing everything from there.
67 /// [entry_for_rev](`Self::entry_for_rev`) and doing everything from there.
68 pub fn data_for_rev(
68 pub fn data_for_rev(
69 &self,
69 &self,
70 rev: UncheckedRevision,
70 rev: UncheckedRevision,
71 ) -> Result<ChangelogRevisionData, RevlogError> {
71 ) -> Result<ChangelogRevisionData, RevlogError> {
72 self.entry_for_rev(rev)?.data()
72 self.entry_for_rev(rev)?.data()
73 }
73 }
74
74
75 pub fn node_from_rev(&self, rev: UncheckedRevision) -> Option<&Node> {
75 pub fn node_from_rev(&self, rev: UncheckedRevision) -> Option<&Node> {
76 self.revlog.node_from_rev(rev)
76 self.revlog.node_from_rev(rev)
77 }
77 }
78
78
79 pub fn rev_from_node(
79 pub fn rev_from_node(
80 &self,
80 &self,
81 node: NodePrefix,
81 node: NodePrefix,
82 ) -> Result<Revision, RevlogError> {
82 ) -> Result<Revision, RevlogError> {
83 self.revlog.rev_from_node(node)
83 self.revlog.rev_from_node(node)
84 }
84 }
85
85
86 pub fn get_index(&self) -> &Index {
86 pub fn get_index(&self) -> &Index {
87 &self.revlog.index
87 &self.revlog.index
88 }
88 }
89 }
89 }
90
90
91 impl Graph for Changelog {
91 impl Graph for Changelog {
92 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
92 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
93 self.revlog.parents(rev)
93 self.revlog.parents(rev)
94 }
94 }
95 }
95 }
96
96
97 /// A specialized `RevlogEntry` for `changelog` data format
97 /// A specialized `RevlogEntry` for `changelog` data format
98 ///
98 ///
99 /// This is a `RevlogEntry` with the added semantics that the associated
99 /// This is a `RevlogEntry` with the added semantics that the associated
100 /// data should meet the requirements for `changelog`, materialized by
100 /// data should meet the requirements for `changelog`, materialized by
101 /// the fact that `data()` constructs a `ChangelogRevisionData`.
101 /// the fact that `data()` constructs a `ChangelogRevisionData`.
102 /// In case that promise would be broken, the `data` method returns an error.
102 /// In case that promise would be broken, the `data` method returns an error.
103 #[derive(Clone)]
103 #[derive(Clone)]
104 pub struct ChangelogEntry<'changelog> {
104 pub struct ChangelogEntry<'changelog> {
105 /// Same data, as a generic `RevlogEntry`.
105 /// Same data, as a generic `RevlogEntry`.
106 pub(crate) revlog_entry: RevlogEntry<'changelog>,
106 pub(crate) revlog_entry: RevlogEntry<'changelog>,
107 }
107 }
108
108
109 impl<'changelog> ChangelogEntry<'changelog> {
109 impl<'changelog> ChangelogEntry<'changelog> {
110 pub fn data<'a>(
110 pub fn data<'a>(
111 &'a self,
111 &'a self,
112 ) -> Result<ChangelogRevisionData<'changelog>, RevlogError> {
112 ) -> Result<ChangelogRevisionData<'changelog>, RevlogError> {
113 let bytes = self.revlog_entry.data()?;
113 let bytes = self.revlog_entry.data()?;
114 if bytes.is_empty() {
114 if bytes.is_empty() {
115 Ok(ChangelogRevisionData::null())
115 Ok(ChangelogRevisionData::null())
116 } else {
116 } else {
117 Ok(ChangelogRevisionData::new(bytes).map_err(|err| {
117 Ok(ChangelogRevisionData::new(bytes).map_err(|err| {
118 RevlogError::Other(HgError::CorruptedRepository(format!(
118 RevlogError::Other(HgError::CorruptedRepository(format!(
119 "Invalid changelog data for revision {}: {:?}",
119 "Invalid changelog data for revision {}: {:?}",
120 self.revlog_entry.revision(),
120 self.revlog_entry.revision(),
121 err
121 err
122 )))
122 )))
123 })?)
123 })?)
124 }
124 }
125 }
125 }
126
126
127 /// Obtain a reference to the underlying `RevlogEntry`.
127 /// Obtain a reference to the underlying `RevlogEntry`.
128 ///
128 ///
129 /// This allows the caller to access the information that is common
129 /// This allows the caller to access the information that is common
130 /// to all revlog entries: revision number, node id, parent revisions etc.
130 /// to all revlog entries: revision number, node id, parent revisions etc.
131 pub fn as_revlog_entry(&self) -> &RevlogEntry {
131 pub fn as_revlog_entry(&self) -> &RevlogEntry {
132 &self.revlog_entry
132 &self.revlog_entry
133 }
133 }
134
134
135 pub fn p1_entry(&self) -> Result<Option<ChangelogEntry>, RevlogError> {
135 pub fn p1_entry(&self) -> Result<Option<ChangelogEntry>, RevlogError> {
136 Ok(self
136 Ok(self
137 .revlog_entry
137 .revlog_entry
138 .p1_entry()?
138 .p1_entry()?
139 .map(|revlog_entry| Self { revlog_entry }))
139 .map(|revlog_entry| Self { revlog_entry }))
140 }
140 }
141
141
142 pub fn p2_entry(&self) -> Result<Option<ChangelogEntry>, RevlogError> {
142 pub fn p2_entry(&self) -> Result<Option<ChangelogEntry>, RevlogError> {
143 Ok(self
143 Ok(self
144 .revlog_entry
144 .revlog_entry
145 .p2_entry()?
145 .p2_entry()?
146 .map(|revlog_entry| Self { revlog_entry }))
146 .map(|revlog_entry| Self { revlog_entry }))
147 }
147 }
148 }
148 }
149
149
150 /// `Changelog` entry which knows how to interpret the `changelog` data bytes.
150 /// `Changelog` entry which knows how to interpret the `changelog` data bytes.
151 #[derive(PartialEq)]
151 #[derive(PartialEq)]
152 pub struct ChangelogRevisionData<'changelog> {
152 pub struct ChangelogRevisionData<'changelog> {
153 /// The data bytes of the `changelog` entry.
153 /// The data bytes of the `changelog` entry.
154 bytes: Cow<'changelog, [u8]>,
154 bytes: Cow<'changelog, [u8]>,
155 /// The end offset for the hex manifest (not including the newline)
155 /// The end offset for the hex manifest (not including the newline)
156 manifest_end: usize,
156 manifest_end: usize,
157 /// The end offset for the user+email (not including the newline)
157 /// The end offset for the user+email (not including the newline)
158 user_end: usize,
158 user_end: usize,
159 /// The end offset for the timestamp+timezone+extras (not including the
159 /// The end offset for the timestamp+timezone+extras (not including the
160 /// newline)
160 /// newline)
161 timestamp_end: usize,
161 timestamp_end: usize,
162 /// The end offset for the file list (not including the newline)
162 /// The end offset for the file list (not including the newline)
163 files_end: usize,
163 files_end: usize,
164 }
164 }
165
165
166 impl<'changelog> ChangelogRevisionData<'changelog> {
166 impl<'changelog> ChangelogRevisionData<'changelog> {
167 fn new(bytes: Cow<'changelog, [u8]>) -> Result<Self, HgError> {
167 fn new(bytes: Cow<'changelog, [u8]>) -> Result<Self, HgError> {
168 let mut line_iter = bytes.split(|b| b == &b'\n');
168 let mut line_iter = bytes.split(|b| b == &b'\n');
169 let manifest_end = line_iter
169 let manifest_end = line_iter
170 .next()
170 .next()
171 .expect("Empty iterator from split()?")
171 .expect("Empty iterator from split()?")
172 .len();
172 .len();
173 let user_slice = line_iter.next().ok_or_else(|| {
173 let user_slice = line_iter.next().ok_or_else(|| {
174 HgError::corrupted("Changeset data truncated after manifest line")
174 HgError::corrupted("Changeset data truncated after manifest line")
175 })?;
175 })?;
176 let user_end = manifest_end + 1 + user_slice.len();
176 let user_end = manifest_end + 1 + user_slice.len();
177 let timestamp_slice = line_iter.next().ok_or_else(|| {
177 let timestamp_slice = line_iter.next().ok_or_else(|| {
178 HgError::corrupted("Changeset data truncated after user line")
178 HgError::corrupted("Changeset data truncated after user line")
179 })?;
179 })?;
180 let timestamp_end = user_end + 1 + timestamp_slice.len();
180 let timestamp_end = user_end + 1 + timestamp_slice.len();
181 let mut files_end = timestamp_end + 1;
181 let mut files_end = timestamp_end + 1;
182 loop {
182 loop {
183 let line = line_iter.next().ok_or_else(|| {
183 let line = line_iter.next().ok_or_else(|| {
184 HgError::corrupted("Changeset data truncated in files list")
184 HgError::corrupted("Changeset data truncated in files list")
185 })?;
185 })?;
186 if line.is_empty() {
186 if line.is_empty() {
187 if files_end == bytes.len() {
187 if files_end == bytes.len() {
188 // The list of files ended with a single newline (there
188 // The list of files ended with a single newline (there
189 // should be two)
189 // should be two)
190 return Err(HgError::corrupted(
190 return Err(HgError::corrupted(
191 "Changeset data truncated after files list",
191 "Changeset data truncated after files list",
192 ));
192 ));
193 }
193 }
194 files_end -= 1;
194 files_end -= 1;
195 break;
195 break;
196 }
196 }
197 files_end += line.len() + 1;
197 files_end += line.len() + 1;
198 }
198 }
199
199
200 Ok(Self {
200 Ok(Self {
201 bytes,
201 bytes,
202 manifest_end,
202 manifest_end,
203 user_end,
203 user_end,
204 timestamp_end,
204 timestamp_end,
205 files_end,
205 files_end,
206 })
206 })
207 }
207 }
208
208
209 fn null() -> Self {
209 fn null() -> Self {
210 Self::new(Cow::Borrowed(
210 Self::new(Cow::Borrowed(
211 b"0000000000000000000000000000000000000000\n\n0 0\n\n",
211 b"0000000000000000000000000000000000000000\n\n0 0\n\n",
212 ))
212 ))
213 .unwrap()
213 .unwrap()
214 }
214 }
215
215
216 /// Return an iterator over the lines of the entry.
216 /// Return an iterator over the lines of the entry.
217 pub fn lines(&self) -> impl Iterator<Item = &[u8]> {
217 pub fn lines(&self) -> impl Iterator<Item = &[u8]> {
218 self.bytes.split(|b| b == &b'\n')
218 self.bytes.split(|b| b == &b'\n')
219 }
219 }
220
220
221 /// Return the node id of the `manifest` referenced by this `changelog`
221 /// Return the node id of the `manifest` referenced by this `changelog`
222 /// entry.
222 /// entry.
223 pub fn manifest_node(&self) -> Result<Node, HgError> {
223 pub fn manifest_node(&self) -> Result<Node, HgError> {
224 let manifest_node_hex = &self.bytes[..self.manifest_end];
224 let manifest_node_hex = &self.bytes[..self.manifest_end];
225 Node::from_hex_for_repo(manifest_node_hex)
225 Node::from_hex_for_repo(manifest_node_hex)
226 }
226 }
227
227
228 /// The full user string (usually a name followed by an email enclosed in
228 /// The full user string (usually a name followed by an email enclosed in
229 /// angle brackets)
229 /// angle brackets)
230 pub fn user(&self) -> &[u8] {
230 pub fn user(&self) -> &[u8] {
231 &self.bytes[self.manifest_end + 1..self.user_end]
231 &self.bytes[self.manifest_end + 1..self.user_end]
232 }
232 }
233
233
234 /// The full timestamp line (timestamp in seconds, offset in seconds, and
234 /// The full timestamp line (timestamp in seconds, offset in seconds, and
235 /// possibly extras)
235 /// possibly extras)
236 // TODO: We should expose this in a more useful way
236 // TODO: We should expose this in a more useful way
237 pub fn timestamp_line(&self) -> &[u8] {
237 pub fn timestamp_line(&self) -> &[u8] {
238 &self.bytes[self.user_end + 1..self.timestamp_end]
238 &self.bytes[self.user_end + 1..self.timestamp_end]
239 }
239 }
240
240
241 /// Parsed timestamp.
241 /// Parsed timestamp.
242 pub fn timestamp(&self) -> Result<DateTime<FixedOffset>, HgError> {
242 pub fn timestamp(&self) -> Result<DateTime<FixedOffset>, HgError> {
243 parse_timestamp(self.timestamp_line())
243 parse_timestamp(self.timestamp_line())
244 }
244 }
245
245
246 /// Optional commit extras.
246 /// Optional commit extras.
247 pub fn extra(&self) -> Result<BTreeMap<String, Vec<u8>>, HgError> {
247 pub fn extra(&self) -> Result<BTreeMap<String, Vec<u8>>, HgError> {
248 parse_timestamp_line_extra(self.timestamp_line())
248 parse_timestamp_line_extra(self.timestamp_line())
249 }
249 }
250
250
251 /// The files changed in this revision.
251 /// The files changed in this revision.
252 pub fn files(&self) -> impl Iterator<Item = &HgPath> {
252 pub fn files(&self) -> impl Iterator<Item = &HgPath> {
253 if self.timestamp_end == self.files_end {
253 if self.timestamp_end == self.files_end {
254 Either::Left(iter::empty())
254 Either::Left(iter::empty())
255 } else {
255 } else {
256 Either::Right(
256 Either::Right(
257 self.bytes[self.timestamp_end + 1..self.files_end]
257 self.bytes[self.timestamp_end + 1..self.files_end]
258 .split(|b| b == &b'\n')
258 .split(|b| b == &b'\n')
259 .map(HgPath::new),
259 .map(HgPath::new),
260 )
260 )
261 }
261 }
262 }
262 }
263
263
264 /// The change description.
264 /// The change description.
265 pub fn description(&self) -> &[u8] {
265 pub fn description(&self) -> &[u8] {
266 &self.bytes[self.files_end + 2..]
266 &self.bytes[self.files_end + 2..]
267 }
267 }
268 }
268 }
269
269
270 impl Debug for ChangelogRevisionData<'_> {
270 impl Debug for ChangelogRevisionData<'_> {
271 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
271 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
272 f.debug_struct("ChangelogRevisionData")
272 f.debug_struct("ChangelogRevisionData")
273 .field("bytes", &debug_bytes(&self.bytes))
273 .field("bytes", &debug_bytes(&self.bytes))
274 .field("manifest", &debug_bytes(&self.bytes[..self.manifest_end]))
274 .field("manifest", &debug_bytes(&self.bytes[..self.manifest_end]))
275 .field(
275 .field(
276 "user",
276 "user",
277 &debug_bytes(
277 &debug_bytes(
278 &self.bytes[self.manifest_end + 1..self.user_end],
278 &self.bytes[self.manifest_end + 1..self.user_end],
279 ),
279 ),
280 )
280 )
281 .field(
281 .field(
282 "timestamp",
282 "timestamp",
283 &debug_bytes(
283 &debug_bytes(
284 &self.bytes[self.user_end + 1..self.timestamp_end],
284 &self.bytes[self.user_end + 1..self.timestamp_end],
285 ),
285 ),
286 )
286 )
287 .field(
287 .field(
288 "files",
288 "files",
289 &debug_bytes(
289 &debug_bytes(
290 &self.bytes[self.timestamp_end + 1..self.files_end],
290 &self.bytes[self.timestamp_end + 1..self.files_end],
291 ),
291 ),
292 )
292 )
293 .field(
293 .field(
294 "description",
294 "description",
295 &debug_bytes(&self.bytes[self.files_end + 2..]),
295 &debug_bytes(&self.bytes[self.files_end + 2..]),
296 )
296 )
297 .finish()
297 .finish()
298 }
298 }
299 }
299 }
300
300
301 fn debug_bytes(bytes: &[u8]) -> String {
301 fn debug_bytes(bytes: &[u8]) -> String {
302 String::from_utf8_lossy(
302 String::from_utf8_lossy(
303 &bytes.iter().flat_map(|b| escape_default(*b)).collect_vec(),
303 &bytes.iter().flat_map(|b| escape_default(*b)).collect_vec(),
304 )
304 )
305 .to_string()
305 .to_string()
306 }
306 }
307
307
308 /// Parse the raw bytes of the timestamp line from a changelog entry.
308 /// Parse the raw bytes of the timestamp line from a changelog entry.
309 ///
309 ///
310 /// According to the documentation in `hg help dates` and the
310 /// According to the documentation in `hg help dates` and the
311 /// implementation in `changelog.py`, the format of the timestamp line
311 /// implementation in `changelog.py`, the format of the timestamp line
312 /// is `time tz extra\n` where:
312 /// is `time tz extra\n` where:
313 ///
313 ///
314 /// - `time` is an ASCII-encoded signed int or float denoting a UTC timestamp
314 /// - `time` is an ASCII-encoded signed int or float denoting a UTC timestamp
315 /// as seconds since the UNIX epoch.
315 /// as seconds since the UNIX epoch.
316 ///
316 ///
317 /// - `tz` is the timezone offset as an ASCII-encoded signed integer denoting
317 /// - `tz` is the timezone offset as an ASCII-encoded signed integer denoting
318 /// seconds WEST of UTC (so negative for timezones east of UTC, which is the
318 /// seconds WEST of UTC (so negative for timezones east of UTC, which is the
319 /// opposite of the sign in ISO 8601 timestamps).
319 /// opposite of the sign in ISO 8601 timestamps).
320 ///
320 ///
321 /// - `extra` is an optional set of NUL-delimited key-value pairs, with the key
321 /// - `extra` is an optional set of NUL-delimited key-value pairs, with the key
322 /// and value in each pair separated by an ASCII colon. Keys are limited to
322 /// and value in each pair separated by an ASCII colon. Keys are limited to
323 /// ASCII letters, digits, hyphens, and underscores, whereas values can be
323 /// ASCII letters, digits, hyphens, and underscores, whereas values can be
324 /// arbitrary bytes.
324 /// arbitrary bytes.
325 fn parse_timestamp(
325 fn parse_timestamp(
326 timestamp_line: &[u8],
326 timestamp_line: &[u8],
327 ) -> Result<DateTime<FixedOffset>, HgError> {
327 ) -> Result<DateTime<FixedOffset>, HgError> {
328 let mut parts = timestamp_line.splitn(3, |c| *c == b' ');
328 let mut parts = timestamp_line.splitn(3, |c| *c == b' ');
329
329
330 let timestamp_bytes = parts
330 let timestamp_bytes = parts
331 .next()
331 .next()
332 .ok_or_else(|| HgError::corrupted("missing timestamp"))?;
332 .ok_or_else(|| HgError::corrupted("missing timestamp"))?;
333 let timestamp_str = str::from_utf8(timestamp_bytes).map_err(|e| {
333 let timestamp_str = str::from_utf8(timestamp_bytes).map_err(|e| {
334 HgError::corrupted(format!("timestamp is not valid UTF-8: {e}"))
334 HgError::corrupted(format!("timestamp is not valid UTF-8: {e}"))
335 })?;
335 })?;
336 let timestamp_utc = timestamp_str
336 let timestamp_utc = timestamp_str
337 .parse()
337 .parse()
338 .map_err(|e| {
338 .map_err(|e| {
339 HgError::corrupted(format!("failed to parse timestamp: {e}"))
339 HgError::corrupted(format!("failed to parse timestamp: {e}"))
340 })
340 })
341 .and_then(|secs| {
341 .and_then(|secs| {
342 NaiveDateTime::from_timestamp_opt(secs, 0).ok_or_else(|| {
342 NaiveDateTime::from_timestamp_opt(secs, 0).ok_or_else(|| {
343 HgError::corrupted(format!(
343 HgError::corrupted(format!(
344 "integer timestamp out of valid range: {secs}"
344 "integer timestamp out of valid range: {secs}"
345 ))
345 ))
346 })
346 })
347 })
347 })
348 // Attempt to parse the timestamp as a float if we can't parse
348 // Attempt to parse the timestamp as a float if we can't parse
349 // it as an int. It doesn't seem like float timestamps are actually
349 // it as an int. It doesn't seem like float timestamps are actually
350 // used in practice, but the Python code supports them.
350 // used in practice, but the Python code supports them.
351 .or_else(|_| parse_float_timestamp(timestamp_str))?;
351 .or_else(|_| parse_float_timestamp(timestamp_str))?;
352
352
353 let timezone_bytes = parts
353 let timezone_bytes = parts
354 .next()
354 .next()
355 .ok_or_else(|| HgError::corrupted("missing timezone"))?;
355 .ok_or_else(|| HgError::corrupted("missing timezone"))?;
356 let timezone_secs: i32 = str::from_utf8(timezone_bytes)
356 let timezone_secs: i32 = str::from_utf8(timezone_bytes)
357 .map_err(|e| {
357 .map_err(|e| {
358 HgError::corrupted(format!("timezone is not valid UTF-8: {e}"))
358 HgError::corrupted(format!("timezone is not valid UTF-8: {e}"))
359 })?
359 })?
360 .parse()
360 .parse()
361 .map_err(|e| {
361 .map_err(|e| {
362 HgError::corrupted(format!("timezone is not an integer: {e}"))
362 HgError::corrupted(format!("timezone is not an integer: {e}"))
363 })?;
363 })?;
364 let timezone = FixedOffset::west_opt(timezone_secs)
364 let timezone = FixedOffset::west_opt(timezone_secs)
365 .ok_or_else(|| HgError::corrupted("timezone offset out of bounds"))?;
365 .ok_or_else(|| HgError::corrupted("timezone offset out of bounds"))?;
366
366
367 Ok(DateTime::from_naive_utc_and_offset(timestamp_utc, timezone))
367 Ok(DateTime::from_naive_utc_and_offset(timestamp_utc, timezone))
368 }
368 }
369
369
370 /// Attempt to parse the given string as floating-point timestamp, and
370 /// Attempt to parse the given string as floating-point timestamp, and
371 /// convert the result into a `chrono::NaiveDateTime`.
371 /// convert the result into a `chrono::NaiveDateTime`.
372 fn parse_float_timestamp(
372 fn parse_float_timestamp(
373 timestamp_str: &str,
373 timestamp_str: &str,
374 ) -> Result<NaiveDateTime, HgError> {
374 ) -> Result<NaiveDateTime, HgError> {
375 let timestamp = timestamp_str.parse::<f64>().map_err(|e| {
375 let timestamp = timestamp_str.parse::<f64>().map_err(|e| {
376 HgError::corrupted(format!("failed to parse timestamp: {e}"))
376 HgError::corrupted(format!("failed to parse timestamp: {e}"))
377 })?;
377 })?;
378
378
379 // To construct a `NaiveDateTime` we'll need to convert the float
379 // To construct a `NaiveDateTime` we'll need to convert the float
380 // into signed integer seconds and unsigned integer nanoseconds.
380 // into signed integer seconds and unsigned integer nanoseconds.
381 let mut secs = timestamp.trunc() as i64;
381 let mut secs = timestamp.trunc() as i64;
382 let mut subsecs = timestamp.fract();
382 let mut subsecs = timestamp.fract();
383
383
384 // If the timestamp is negative, we need to express the fractional
384 // If the timestamp is negative, we need to express the fractional
385 // component as positive nanoseconds since the previous second.
385 // component as positive nanoseconds since the previous second.
386 if timestamp < 0.0 {
386 if timestamp < 0.0 {
387 secs -= 1;
387 secs -= 1;
388 subsecs += 1.0;
388 subsecs += 1.0;
389 }
389 }
390
390
391 // This cast should be safe because the fractional component is
391 // This cast should be safe because the fractional component is
392 // by definition less than 1.0, so this value should not exceed
392 // by definition less than 1.0, so this value should not exceed
393 // 1 billion, which is representable as an f64 without loss of
393 // 1 billion, which is representable as an f64 without loss of
394 // precision and should fit into a u32 without overflowing.
394 // precision and should fit into a u32 without overflowing.
395 //
395 //
396 // (Any loss of precision in the fractional component will have
396 // (Any loss of precision in the fractional component will have
397 // already happened at the time of initial parsing; in general,
397 // already happened at the time of initial parsing; in general,
398 // f64s are insufficiently precise to provide nanosecond-level
398 // f64s are insufficiently precise to provide nanosecond-level
399 // precision with present-day timestamps.)
399 // precision with present-day timestamps.)
400 let nsecs = (subsecs * 1_000_000_000.0) as u32;
400 let nsecs = (subsecs * 1_000_000_000.0) as u32;
401
401
402 NaiveDateTime::from_timestamp_opt(secs, nsecs).ok_or_else(|| {
402 NaiveDateTime::from_timestamp_opt(secs, nsecs).ok_or_else(|| {
403 HgError::corrupted(format!(
403 HgError::corrupted(format!(
404 "float timestamp out of valid range: {timestamp}"
404 "float timestamp out of valid range: {timestamp}"
405 ))
405 ))
406 })
406 })
407 }
407 }
408
408
409 /// Decode changeset extra fields.
409 /// Decode changeset extra fields.
410 ///
410 ///
411 /// Extras are null-delimited key-value pairs where the key consists of ASCII
411 /// Extras are null-delimited key-value pairs where the key consists of ASCII
412 /// alphanumeric characters plus hyphens and underscores, and the value can
412 /// alphanumeric characters plus hyphens and underscores, and the value can
413 /// contain arbitrary bytes.
413 /// contain arbitrary bytes.
414 fn decode_extra(extra: &[u8]) -> Result<BTreeMap<String, Vec<u8>>, HgError> {
414 fn decode_extra(extra: &[u8]) -> Result<BTreeMap<String, Vec<u8>>, HgError> {
415 extra
415 extra
416 .split(|c| *c == b'\0')
416 .split(|c| *c == b'\0')
417 .map(|pair| {
417 .map(|pair| {
418 let pair = unescape_extra(pair);
418 let pair = unescape_extra(pair);
419 let mut iter = pair.splitn(2, |c| *c == b':');
419 let mut iter = pair.splitn(2, |c| *c == b':');
420
420
421 let key_bytes =
421 let key_bytes =
422 iter.next().filter(|k| !k.is_empty()).ok_or_else(|| {
422 iter.next().filter(|k| !k.is_empty()).ok_or_else(|| {
423 HgError::corrupted("empty key in changeset extras")
423 HgError::corrupted("empty key in changeset extras")
424 })?;
424 })?;
425
425
426 let key = str::from_utf8(key_bytes)
426 let key = str::from_utf8(key_bytes)
427 .ok()
427 .ok()
428 .filter(|k| {
428 .filter(|k| {
429 k.chars().all(|c| {
429 k.chars().all(|c| {
430 c.is_ascii_alphanumeric() || c == '_' || c == '-'
430 c.is_ascii_alphanumeric() || c == '_' || c == '-'
431 })
431 })
432 })
432 })
433 .ok_or_else(|| {
433 .ok_or_else(|| {
434 let key = String::from_utf8_lossy(key_bytes);
434 let key = String::from_utf8_lossy(key_bytes);
435 HgError::corrupted(format!(
435 HgError::corrupted(format!(
436 "invalid key in changeset extras: {key}",
436 "invalid key in changeset extras: {key}",
437 ))
437 ))
438 })?
438 })?
439 .to_string();
439 .to_string();
440
440
441 let value = iter.next().map(Into::into).ok_or_else(|| {
441 let value = iter.next().map(Into::into).ok_or_else(|| {
442 HgError::corrupted(format!(
442 HgError::corrupted(format!(
443 "missing value for changeset extra: {key}"
443 "missing value for changeset extra: {key}"
444 ))
444 ))
445 })?;
445 })?;
446
446
447 Ok((key, value))
447 Ok((key, value))
448 })
448 })
449 .collect()
449 .collect()
450 }
450 }
451
451
452 /// Parse the extra fields from a changeset's timestamp line.
452 /// Parse the extra fields from a changeset's timestamp line.
453 fn parse_timestamp_line_extra(
453 fn parse_timestamp_line_extra(
454 timestamp_line: &[u8],
454 timestamp_line: &[u8],
455 ) -> Result<BTreeMap<String, Vec<u8>>, HgError> {
455 ) -> Result<BTreeMap<String, Vec<u8>>, HgError> {
456 Ok(timestamp_line
456 Ok(timestamp_line
457 .splitn(3, |c| *c == b' ')
457 .splitn(3, |c| *c == b' ')
458 .nth(2)
458 .nth(2)
459 .map(decode_extra)
459 .map(decode_extra)
460 .transpose()?
460 .transpose()?
461 .unwrap_or_default())
461 .unwrap_or_default())
462 }
462 }
463
463
464 /// Decode Mercurial's escaping for changelog extras.
464 /// Decode Mercurial's escaping for changelog extras.
465 ///
465 ///
466 /// The `_string_escape` function in `changelog.py` only escapes 4 characters
466 /// The `_string_escape` function in `changelog.py` only escapes 4 characters
467 /// (null, backslash, newline, and carriage return) so we only decode those.
467 /// (null, backslash, newline, and carriage return) so we only decode those.
468 ///
468 ///
469 /// The Python code also includes a workaround for decoding escaped nuls
469 /// The Python code also includes a workaround for decoding escaped nuls
470 /// that are followed by an ASCII octal digit, since Python's built-in
470 /// that are followed by an ASCII octal digit, since Python's built-in
471 /// `string_escape` codec will interpret that as an escaped octal byte value.
471 /// `string_escape` codec will interpret that as an escaped octal byte value.
472 /// That workaround is omitted here since we don't support decoding octal.
472 /// That workaround is omitted here since we don't support decoding octal.
473 fn unescape_extra(bytes: &[u8]) -> Vec<u8> {
473 fn unescape_extra(bytes: &[u8]) -> Vec<u8> {
474 let mut output = Vec::with_capacity(bytes.len());
474 let mut output = Vec::with_capacity(bytes.len());
475 let mut input = bytes.iter().copied();
475 let mut input = bytes.iter().copied();
476
476
477 while let Some(c) = input.next() {
477 while let Some(c) = input.next() {
478 if c != b'\\' {
478 if c != b'\\' {
479 output.push(c);
479 output.push(c);
480 continue;
480 continue;
481 }
481 }
482
482
483 match input.next() {
483 match input.next() {
484 Some(b'0') => output.push(b'\0'),
484 Some(b'0') => output.push(b'\0'),
485 Some(b'\\') => output.push(b'\\'),
485 Some(b'\\') => output.push(b'\\'),
486 Some(b'n') => output.push(b'\n'),
486 Some(b'n') => output.push(b'\n'),
487 Some(b'r') => output.push(b'\r'),
487 Some(b'r') => output.push(b'\r'),
488 // The following cases should never occur in theory because any
488 // The following cases should never occur in theory because any
489 // backslashes in the original input should have been escaped
489 // backslashes in the original input should have been escaped
490 // with another backslash, so it should not be possible to
490 // with another backslash, so it should not be possible to
491 // observe an escape sequence other than the 4 above.
491 // observe an escape sequence other than the 4 above.
492 Some(c) => output.extend_from_slice(&[b'\\', c]),
492 Some(c) => output.extend_from_slice(&[b'\\', c]),
493 None => output.push(b'\\'),
493 None => output.push(b'\\'),
494 }
494 }
495 }
495 }
496
496
497 output
497 output
498 }
498 }
499
499
500 #[cfg(test)]
500 #[cfg(test)]
501 mod tests {
501 mod tests {
502 use super::*;
502 use super::*;
503 use crate::vfs::Vfs;
503 use crate::vfs::VfsImpl;
504 use crate::{
504 use crate::{
505 RevlogDataConfig, RevlogDeltaConfig, RevlogFeatureConfig,
505 RevlogDataConfig, RevlogDeltaConfig, RevlogFeatureConfig,
506 NULL_REVISION,
506 NULL_REVISION,
507 };
507 };
508 use pretty_assertions::assert_eq;
508 use pretty_assertions::assert_eq;
509
509
510 #[test]
510 #[test]
511 fn test_create_changelogrevisiondata_invalid() {
511 fn test_create_changelogrevisiondata_invalid() {
512 // Completely empty
512 // Completely empty
513 assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd")).is_err());
513 assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd")).is_err());
514 // No newline after manifest
514 // No newline after manifest
515 assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd")).is_err());
515 assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd")).is_err());
516 // No newline after user
516 // No newline after user
517 assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd\n")).is_err());
517 assert!(ChangelogRevisionData::new(Cow::Borrowed(b"abcd\n")).is_err());
518 // No newline after timestamp
518 // No newline after timestamp
519 assert!(
519 assert!(
520 ChangelogRevisionData::new(Cow::Borrowed(b"abcd\n\n0 0")).is_err()
520 ChangelogRevisionData::new(Cow::Borrowed(b"abcd\n\n0 0")).is_err()
521 );
521 );
522 // Missing newline after files
522 // Missing newline after files
523 assert!(ChangelogRevisionData::new(Cow::Borrowed(
523 assert!(ChangelogRevisionData::new(Cow::Borrowed(
524 b"abcd\n\n0 0\nfile1\nfile2"
524 b"abcd\n\n0 0\nfile1\nfile2"
525 ))
525 ))
526 .is_err(),);
526 .is_err(),);
527 // Only one newline after files
527 // Only one newline after files
528 assert!(ChangelogRevisionData::new(Cow::Borrowed(
528 assert!(ChangelogRevisionData::new(Cow::Borrowed(
529 b"abcd\n\n0 0\nfile1\nfile2\n"
529 b"abcd\n\n0 0\nfile1\nfile2\n"
530 ))
530 ))
531 .is_err(),);
531 .is_err(),);
532 }
532 }
533
533
534 #[test]
534 #[test]
535 fn test_create_changelogrevisiondata() {
535 fn test_create_changelogrevisiondata() {
536 let data = ChangelogRevisionData::new(Cow::Borrowed(
536 let data = ChangelogRevisionData::new(Cow::Borrowed(
537 b"0123456789abcdef0123456789abcdef01234567
537 b"0123456789abcdef0123456789abcdef01234567
538 Some One <someone@example.com>
538 Some One <someone@example.com>
539 0 0
539 0 0
540 file1
540 file1
541 file2
541 file2
542
542
543 some
543 some
544 commit
544 commit
545 message",
545 message",
546 ))
546 ))
547 .unwrap();
547 .unwrap();
548 assert_eq!(
548 assert_eq!(
549 data.manifest_node().unwrap(),
549 data.manifest_node().unwrap(),
550 Node::from_hex("0123456789abcdef0123456789abcdef01234567")
550 Node::from_hex("0123456789abcdef0123456789abcdef01234567")
551 .unwrap()
551 .unwrap()
552 );
552 );
553 assert_eq!(data.user(), b"Some One <someone@example.com>");
553 assert_eq!(data.user(), b"Some One <someone@example.com>");
554 assert_eq!(data.timestamp_line(), b"0 0");
554 assert_eq!(data.timestamp_line(), b"0 0");
555 assert_eq!(
555 assert_eq!(
556 data.files().collect_vec(),
556 data.files().collect_vec(),
557 vec![HgPath::new("file1"), HgPath::new("file2")]
557 vec![HgPath::new("file1"), HgPath::new("file2")]
558 );
558 );
559 assert_eq!(data.description(), b"some\ncommit\nmessage");
559 assert_eq!(data.description(), b"some\ncommit\nmessage");
560 }
560 }
561
561
562 #[test]
562 #[test]
563 fn test_data_from_rev_null() -> Result<(), RevlogError> {
563 fn test_data_from_rev_null() -> Result<(), RevlogError> {
564 // an empty revlog will be enough for this case
564 // an empty revlog will be enough for this case
565 let temp = tempfile::tempdir().unwrap();
565 let temp = tempfile::tempdir().unwrap();
566 let vfs = Vfs { base: temp.path() };
566 let vfs = VfsImpl {
567 base: temp.path().to_owned(),
568 };
567 std::fs::write(temp.path().join("foo.i"), b"").unwrap();
569 std::fs::write(temp.path().join("foo.i"), b"").unwrap();
568 std::fs::write(temp.path().join("foo.d"), b"").unwrap();
570 std::fs::write(temp.path().join("foo.d"), b"").unwrap();
569 let revlog = Revlog::open(
571 let revlog = Revlog::open(
570 &vfs,
572 &vfs,
571 "foo.i",
573 "foo.i",
572 None,
574 None,
573 RevlogOpenOptions::new(
575 RevlogOpenOptions::new(
574 false,
576 false,
575 RevlogDataConfig::default(),
577 RevlogDataConfig::default(),
576 RevlogDeltaConfig::default(),
578 RevlogDeltaConfig::default(),
577 RevlogFeatureConfig::default(),
579 RevlogFeatureConfig::default(),
578 ),
580 ),
579 )
581 )
580 .unwrap();
582 .unwrap();
581
583
582 let changelog = Changelog { revlog };
584 let changelog = Changelog { revlog };
583 assert_eq!(
585 assert_eq!(
584 changelog.data_for_rev(NULL_REVISION.into())?,
586 changelog.data_for_rev(NULL_REVISION.into())?,
585 ChangelogRevisionData::null()
587 ChangelogRevisionData::null()
586 );
588 );
587 // same with the intermediate entry object
589 // same with the intermediate entry object
588 assert_eq!(
590 assert_eq!(
589 changelog.entry_for_rev(NULL_REVISION.into())?.data()?,
591 changelog.entry_for_rev(NULL_REVISION.into())?.data()?,
590 ChangelogRevisionData::null()
592 ChangelogRevisionData::null()
591 );
593 );
592 Ok(())
594 Ok(())
593 }
595 }
594
596
595 #[test]
597 #[test]
596 fn test_empty_files_list() {
598 fn test_empty_files_list() {
597 assert!(ChangelogRevisionData::null()
599 assert!(ChangelogRevisionData::null()
598 .files()
600 .files()
599 .collect_vec()
601 .collect_vec()
600 .is_empty());
602 .is_empty());
601 }
603 }
602
604
603 #[test]
605 #[test]
604 fn test_unescape_basic() {
606 fn test_unescape_basic() {
605 // '\0', '\\', '\n', and '\r' are correctly unescaped.
607 // '\0', '\\', '\n', and '\r' are correctly unescaped.
606 let expected = b"AAA\0BBB\\CCC\nDDD\rEEE";
608 let expected = b"AAA\0BBB\\CCC\nDDD\rEEE";
607 let escaped = br"AAA\0BBB\\CCC\nDDD\rEEE";
609 let escaped = br"AAA\0BBB\\CCC\nDDD\rEEE";
608 let unescaped = unescape_extra(escaped);
610 let unescaped = unescape_extra(escaped);
609 assert_eq!(&expected[..], &unescaped[..]);
611 assert_eq!(&expected[..], &unescaped[..]);
610 }
612 }
611
613
612 #[test]
614 #[test]
613 fn test_unescape_unsupported_sequence() {
615 fn test_unescape_unsupported_sequence() {
614 // Other escape sequences are left unaltered.
616 // Other escape sequences are left unaltered.
615 for c in 0u8..255 {
617 for c in 0u8..255 {
616 match c {
618 match c {
617 b'0' | b'\\' | b'n' | b'r' => continue,
619 b'0' | b'\\' | b'n' | b'r' => continue,
618 c => {
620 c => {
619 let expected = &[b'\\', c][..];
621 let expected = &[b'\\', c][..];
620 let unescaped = unescape_extra(expected);
622 let unescaped = unescape_extra(expected);
621 assert_eq!(expected, &unescaped[..]);
623 assert_eq!(expected, &unescaped[..]);
622 }
624 }
623 }
625 }
624 }
626 }
625 }
627 }
626
628
627 #[test]
629 #[test]
628 fn test_unescape_trailing_backslash() {
630 fn test_unescape_trailing_backslash() {
629 // Trailing backslashes are OK.
631 // Trailing backslashes are OK.
630 let expected = br"hi\";
632 let expected = br"hi\";
631 let unescaped = unescape_extra(expected);
633 let unescaped = unescape_extra(expected);
632 assert_eq!(&expected[..], &unescaped[..]);
634 assert_eq!(&expected[..], &unescaped[..]);
633 }
635 }
634
636
635 #[test]
637 #[test]
636 fn test_unescape_nul_followed_by_octal() {
638 fn test_unescape_nul_followed_by_octal() {
637 // Escaped NUL chars followed by octal digits are decoded correctly.
639 // Escaped NUL chars followed by octal digits are decoded correctly.
638 let expected = b"\x0012";
640 let expected = b"\x0012";
639 let escaped = br"\012";
641 let escaped = br"\012";
640 let unescaped = unescape_extra(escaped);
642 let unescaped = unescape_extra(escaped);
641 assert_eq!(&expected[..], &unescaped[..]);
643 assert_eq!(&expected[..], &unescaped[..]);
642 }
644 }
643
645
644 #[test]
646 #[test]
645 fn test_parse_float_timestamp() {
647 fn test_parse_float_timestamp() {
646 let test_cases = [
648 let test_cases = [
647 // Zero should map to the UNIX epoch.
649 // Zero should map to the UNIX epoch.
648 ("0.0", "1970-01-01 00:00:00"),
650 ("0.0", "1970-01-01 00:00:00"),
649 // Negative zero should be the same as positive zero.
651 // Negative zero should be the same as positive zero.
650 ("-0.0", "1970-01-01 00:00:00"),
652 ("-0.0", "1970-01-01 00:00:00"),
651 // Values without fractional components should work like integers.
653 // Values without fractional components should work like integers.
652 // (Assuming the timestamp is within the limits of f64 precision.)
654 // (Assuming the timestamp is within the limits of f64 precision.)
653 ("1115154970.0", "2005-05-03 21:16:10"),
655 ("1115154970.0", "2005-05-03 21:16:10"),
654 // We expect some loss of precision in the fractional component
656 // We expect some loss of precision in the fractional component
655 // when parsing arbitrary floating-point values.
657 // when parsing arbitrary floating-point values.
656 ("1115154970.123456789", "2005-05-03 21:16:10.123456716"),
658 ("1115154970.123456789", "2005-05-03 21:16:10.123456716"),
657 // But representable f64 values should parse losslessly.
659 // But representable f64 values should parse losslessly.
658 ("1115154970.123456716", "2005-05-03 21:16:10.123456716"),
660 ("1115154970.123456716", "2005-05-03 21:16:10.123456716"),
659 // Negative fractional components are subtracted from the epoch.
661 // Negative fractional components are subtracted from the epoch.
660 ("-1.333", "1969-12-31 23:59:58.667"),
662 ("-1.333", "1969-12-31 23:59:58.667"),
661 ];
663 ];
662
664
663 for (input, expected) in test_cases {
665 for (input, expected) in test_cases {
664 let res = parse_float_timestamp(input).unwrap().to_string();
666 let res = parse_float_timestamp(input).unwrap().to_string();
665 assert_eq!(res, expected);
667 assert_eq!(res, expected);
666 }
668 }
667 }
669 }
668
670
669 fn escape_extra(bytes: &[u8]) -> Vec<u8> {
671 fn escape_extra(bytes: &[u8]) -> Vec<u8> {
670 let mut output = Vec::with_capacity(bytes.len());
672 let mut output = Vec::with_capacity(bytes.len());
671
673
672 for c in bytes.iter().copied() {
674 for c in bytes.iter().copied() {
673 output.extend_from_slice(match c {
675 output.extend_from_slice(match c {
674 b'\0' => &b"\\0"[..],
676 b'\0' => &b"\\0"[..],
675 b'\\' => &b"\\\\"[..],
677 b'\\' => &b"\\\\"[..],
676 b'\n' => &b"\\n"[..],
678 b'\n' => &b"\\n"[..],
677 b'\r' => &b"\\r"[..],
679 b'\r' => &b"\\r"[..],
678 _ => {
680 _ => {
679 output.push(c);
681 output.push(c);
680 continue;
682 continue;
681 }
683 }
682 });
684 });
683 }
685 }
684
686
685 output
687 output
686 }
688 }
687
689
688 fn encode_extra<K, V>(pairs: impl IntoIterator<Item = (K, V)>) -> Vec<u8>
690 fn encode_extra<K, V>(pairs: impl IntoIterator<Item = (K, V)>) -> Vec<u8>
689 where
691 where
690 K: AsRef<[u8]>,
692 K: AsRef<[u8]>,
691 V: AsRef<[u8]>,
693 V: AsRef<[u8]>,
692 {
694 {
693 let extras = pairs.into_iter().map(|(k, v)| {
695 let extras = pairs.into_iter().map(|(k, v)| {
694 escape_extra(&[k.as_ref(), b":", v.as_ref()].concat())
696 escape_extra(&[k.as_ref(), b":", v.as_ref()].concat())
695 });
697 });
696 // Use fully-qualified syntax to avoid a future naming conflict with
698 // Use fully-qualified syntax to avoid a future naming conflict with
697 // the standard library: https://github.com/rust-lang/rust/issues/79524
699 // the standard library: https://github.com/rust-lang/rust/issues/79524
698 Itertools::intersperse(extras, b"\0".to_vec()).concat()
700 Itertools::intersperse(extras, b"\0".to_vec()).concat()
699 }
701 }
700
702
701 #[test]
703 #[test]
702 fn test_decode_extra() {
704 fn test_decode_extra() {
703 let extra = [
705 let extra = [
704 ("branch".into(), b"default".to_vec()),
706 ("branch".into(), b"default".to_vec()),
705 ("key-with-hyphens".into(), b"value1".to_vec()),
707 ("key-with-hyphens".into(), b"value1".to_vec()),
706 ("key_with_underscores".into(), b"value2".to_vec()),
708 ("key_with_underscores".into(), b"value2".to_vec()),
707 ("empty-value".into(), b"".to_vec()),
709 ("empty-value".into(), b"".to_vec()),
708 ("binary-value".into(), (0u8..=255).collect::<Vec<_>>()),
710 ("binary-value".into(), (0u8..=255).collect::<Vec<_>>()),
709 ]
711 ]
710 .into_iter()
712 .into_iter()
711 .collect::<BTreeMap<String, Vec<u8>>>();
713 .collect::<BTreeMap<String, Vec<u8>>>();
712
714
713 let encoded = encode_extra(&extra);
715 let encoded = encode_extra(&extra);
714 let decoded = decode_extra(&encoded).unwrap();
716 let decoded = decode_extra(&encoded).unwrap();
715
717
716 assert_eq!(extra, decoded);
718 assert_eq!(extra, decoded);
717 }
719 }
718
720
719 #[test]
721 #[test]
720 fn test_corrupt_extra() {
722 fn test_corrupt_extra() {
721 let test_cases = [
723 let test_cases = [
722 (&b""[..], "empty input"),
724 (&b""[..], "empty input"),
723 (&b"\0"[..], "unexpected null byte"),
725 (&b"\0"[..], "unexpected null byte"),
724 (&b":empty-key"[..], "empty key"),
726 (&b":empty-key"[..], "empty key"),
725 (&b"\0leading-null:"[..], "leading null"),
727 (&b"\0leading-null:"[..], "leading null"),
726 (&b"trailing-null:\0"[..], "trailing null"),
728 (&b"trailing-null:\0"[..], "trailing null"),
727 (&b"missing-value"[..], "missing value"),
729 (&b"missing-value"[..], "missing value"),
728 (&b"$!@# non-alphanum-key:"[..], "non-alphanumeric key"),
730 (&b"$!@# non-alphanum-key:"[..], "non-alphanumeric key"),
729 (&b"\xF0\x9F\xA6\x80 non-ascii-key:"[..], "non-ASCII key"),
731 (&b"\xF0\x9F\xA6\x80 non-ascii-key:"[..], "non-ASCII key"),
730 ];
732 ];
731
733
732 for (extra, msg) in test_cases {
734 for (extra, msg) in test_cases {
733 assert!(
735 assert!(
734 decode_extra(extra).is_err(),
736 decode_extra(extra).is_err(),
735 "corrupt extra should have failed to parse: {}",
737 "corrupt extra should have failed to parse: {}",
736 msg
738 msg
737 );
739 );
738 }
740 }
739 }
741 }
740
742
741 #[test]
743 #[test]
742 fn test_parse_timestamp_line() {
744 fn test_parse_timestamp_line() {
743 let extra = [
745 let extra = [
744 ("branch".into(), b"default".to_vec()),
746 ("branch".into(), b"default".to_vec()),
745 ("key-with-hyphens".into(), b"value1".to_vec()),
747 ("key-with-hyphens".into(), b"value1".to_vec()),
746 ("key_with_underscores".into(), b"value2".to_vec()),
748 ("key_with_underscores".into(), b"value2".to_vec()),
747 ("empty-value".into(), b"".to_vec()),
749 ("empty-value".into(), b"".to_vec()),
748 ("binary-value".into(), (0u8..=255).collect::<Vec<_>>()),
750 ("binary-value".into(), (0u8..=255).collect::<Vec<_>>()),
749 ]
751 ]
750 .into_iter()
752 .into_iter()
751 .collect::<BTreeMap<String, Vec<u8>>>();
753 .collect::<BTreeMap<String, Vec<u8>>>();
752
754
753 let mut line: Vec<u8> = b"1115154970 28800 ".to_vec();
755 let mut line: Vec<u8> = b"1115154970 28800 ".to_vec();
754 line.extend_from_slice(&encode_extra(&extra));
756 line.extend_from_slice(&encode_extra(&extra));
755
757
756 let timestamp = parse_timestamp(&line).unwrap();
758 let timestamp = parse_timestamp(&line).unwrap();
757 assert_eq!(&timestamp.to_rfc3339(), "2005-05-03T13:16:10-08:00");
759 assert_eq!(&timestamp.to_rfc3339(), "2005-05-03T13:16:10-08:00");
758
760
759 let parsed_extra = parse_timestamp_line_extra(&line).unwrap();
761 let parsed_extra = parse_timestamp_line_extra(&line).unwrap();
760 assert_eq!(extra, parsed_extra);
762 assert_eq!(extra, parsed_extra);
761 }
763 }
762 }
764 }
@@ -1,245 +1,245
1 use crate::errors::HgError;
1 use crate::errors::HgError;
2 use crate::exit_codes;
2 use crate::exit_codes;
3 use crate::repo::Repo;
3 use crate::repo::Repo;
4 use crate::revlog::path_encode::path_encode;
4 use crate::revlog::path_encode::path_encode;
5 use crate::revlog::NodePrefix;
5 use crate::revlog::NodePrefix;
6 use crate::revlog::Revision;
6 use crate::revlog::Revision;
7 use crate::revlog::RevlogEntry;
7 use crate::revlog::RevlogEntry;
8 use crate::revlog::{Revlog, RevlogError};
8 use crate::revlog::{Revlog, RevlogError};
9 use crate::utils::files::get_path_from_bytes;
9 use crate::utils::files::get_path_from_bytes;
10 use crate::utils::hg_path::HgPath;
10 use crate::utils::hg_path::HgPath;
11 use crate::utils::SliceExt;
11 use crate::utils::SliceExt;
12 use crate::Graph;
12 use crate::Graph;
13 use crate::GraphError;
13 use crate::GraphError;
14 use crate::RevlogOpenOptions;
14 use crate::RevlogOpenOptions;
15 use crate::UncheckedRevision;
15 use crate::UncheckedRevision;
16 use std::path::PathBuf;
16 use std::path::PathBuf;
17
17
18 /// A specialized `Revlog` to work with file data logs.
18 /// A specialized `Revlog` to work with file data logs.
19 pub struct Filelog {
19 pub struct Filelog {
20 /// The generic `revlog` format.
20 /// The generic `revlog` format.
21 revlog: Revlog,
21 revlog: Revlog,
22 }
22 }
23
23
24 impl Graph for Filelog {
24 impl Graph for Filelog {
25 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
25 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
26 self.revlog.parents(rev)
26 self.revlog.parents(rev)
27 }
27 }
28 }
28 }
29
29
30 impl Filelog {
30 impl Filelog {
31 pub fn open_vfs(
31 pub fn open_vfs(
32 store_vfs: &crate::vfs::Vfs<'_>,
32 store_vfs: &crate::vfs::VfsImpl,
33 file_path: &HgPath,
33 file_path: &HgPath,
34 options: RevlogOpenOptions,
34 options: RevlogOpenOptions,
35 ) -> Result<Self, HgError> {
35 ) -> Result<Self, HgError> {
36 let index_path = store_path(file_path, b".i");
36 let index_path = store_path(file_path, b".i");
37 let data_path = store_path(file_path, b".d");
37 let data_path = store_path(file_path, b".d");
38 let revlog =
38 let revlog =
39 Revlog::open(store_vfs, index_path, Some(&data_path), options)?;
39 Revlog::open(store_vfs, index_path, Some(&data_path), options)?;
40 Ok(Self { revlog })
40 Ok(Self { revlog })
41 }
41 }
42
42
43 pub fn open(
43 pub fn open(
44 repo: &Repo,
44 repo: &Repo,
45 file_path: &HgPath,
45 file_path: &HgPath,
46 options: RevlogOpenOptions,
46 options: RevlogOpenOptions,
47 ) -> Result<Self, HgError> {
47 ) -> Result<Self, HgError> {
48 Self::open_vfs(&repo.store_vfs(), file_path, options)
48 Self::open_vfs(&repo.store_vfs(), file_path, options)
49 }
49 }
50
50
51 /// The given node ID is that of the file as found in a filelog, not of a
51 /// The given node ID is that of the file as found in a filelog, not of a
52 /// changeset.
52 /// changeset.
53 pub fn data_for_node(
53 pub fn data_for_node(
54 &self,
54 &self,
55 file_node: impl Into<NodePrefix>,
55 file_node: impl Into<NodePrefix>,
56 ) -> Result<FilelogRevisionData, RevlogError> {
56 ) -> Result<FilelogRevisionData, RevlogError> {
57 let file_rev = self.revlog.rev_from_node(file_node.into())?;
57 let file_rev = self.revlog.rev_from_node(file_node.into())?;
58 self.data_for_rev(file_rev.into())
58 self.data_for_rev(file_rev.into())
59 }
59 }
60
60
61 /// The given revision is that of the file as found in a filelog, not of a
61 /// The given revision is that of the file as found in a filelog, not of a
62 /// changeset.
62 /// changeset.
63 pub fn data_for_rev(
63 pub fn data_for_rev(
64 &self,
64 &self,
65 file_rev: UncheckedRevision,
65 file_rev: UncheckedRevision,
66 ) -> Result<FilelogRevisionData, RevlogError> {
66 ) -> Result<FilelogRevisionData, RevlogError> {
67 let data: Vec<u8> = self.revlog.get_rev_data(file_rev)?.into_owned();
67 let data: Vec<u8> = self.revlog.get_rev_data(file_rev)?.into_owned();
68 Ok(FilelogRevisionData(data))
68 Ok(FilelogRevisionData(data))
69 }
69 }
70
70
71 /// The given node ID is that of the file as found in a filelog, not of a
71 /// The given node ID is that of the file as found in a filelog, not of a
72 /// changeset.
72 /// changeset.
73 pub fn entry_for_node(
73 pub fn entry_for_node(
74 &self,
74 &self,
75 file_node: impl Into<NodePrefix>,
75 file_node: impl Into<NodePrefix>,
76 ) -> Result<FilelogEntry, RevlogError> {
76 ) -> Result<FilelogEntry, RevlogError> {
77 let file_rev = self.revlog.rev_from_node(file_node.into())?;
77 let file_rev = self.revlog.rev_from_node(file_node.into())?;
78 self.entry_for_checked_rev(file_rev)
78 self.entry_for_checked_rev(file_rev)
79 }
79 }
80
80
81 /// The given revision is that of the file as found in a filelog, not of a
81 /// The given revision is that of the file as found in a filelog, not of a
82 /// changeset.
82 /// changeset.
83 pub fn entry_for_rev(
83 pub fn entry_for_rev(
84 &self,
84 &self,
85 file_rev: UncheckedRevision,
85 file_rev: UncheckedRevision,
86 ) -> Result<FilelogEntry, RevlogError> {
86 ) -> Result<FilelogEntry, RevlogError> {
87 Ok(FilelogEntry(self.revlog.get_entry(file_rev)?))
87 Ok(FilelogEntry(self.revlog.get_entry(file_rev)?))
88 }
88 }
89
89
90 fn entry_for_checked_rev(
90 fn entry_for_checked_rev(
91 &self,
91 &self,
92 file_rev: Revision,
92 file_rev: Revision,
93 ) -> Result<FilelogEntry, RevlogError> {
93 ) -> Result<FilelogEntry, RevlogError> {
94 Ok(FilelogEntry(
94 Ok(FilelogEntry(
95 self.revlog.get_entry_for_checked_rev(file_rev)?,
95 self.revlog.get_entry_for_checked_rev(file_rev)?,
96 ))
96 ))
97 }
97 }
98 }
98 }
99
99
100 fn store_path(hg_path: &HgPath, suffix: &[u8]) -> PathBuf {
100 fn store_path(hg_path: &HgPath, suffix: &[u8]) -> PathBuf {
101 let encoded_bytes =
101 let encoded_bytes =
102 path_encode(&[b"data/", hg_path.as_bytes(), suffix].concat());
102 path_encode(&[b"data/", hg_path.as_bytes(), suffix].concat());
103 get_path_from_bytes(&encoded_bytes).into()
103 get_path_from_bytes(&encoded_bytes).into()
104 }
104 }
105
105
106 pub struct FilelogEntry<'a>(RevlogEntry<'a>);
106 pub struct FilelogEntry<'a>(RevlogEntry<'a>);
107
107
108 impl FilelogEntry<'_> {
108 impl FilelogEntry<'_> {
109 /// `self.data()` can be expensive, with decompression and delta
109 /// `self.data()` can be expensive, with decompression and delta
110 /// resolution.
110 /// resolution.
111 ///
111 ///
112 /// *Without* paying this cost, based on revlog index information
112 /// *Without* paying this cost, based on revlog index information
113 /// including `RevlogEntry::uncompressed_len`:
113 /// including `RevlogEntry::uncompressed_len`:
114 ///
114 ///
115 /// * Returns `true` if the length that `self.data().file_data().len()`
115 /// * Returns `true` if the length that `self.data().file_data().len()`
116 /// would return is definitely **not equal** to `other_len`.
116 /// would return is definitely **not equal** to `other_len`.
117 /// * Returns `false` if available information is inconclusive.
117 /// * Returns `false` if available information is inconclusive.
118 pub fn file_data_len_not_equal_to(&self, other_len: u64) -> bool {
118 pub fn file_data_len_not_equal_to(&self, other_len: u64) -> bool {
119 // Relevant code that implement this behavior in Python code:
119 // Relevant code that implement this behavior in Python code:
120 // basefilectx.cmp, filelog.size, storageutil.filerevisioncopied,
120 // basefilectx.cmp, filelog.size, storageutil.filerevisioncopied,
121 // revlog.size, revlog.rawsize
121 // revlog.size, revlog.rawsize
122
122
123 // Let’s call `file_data_len` what would be returned by
123 // Let’s call `file_data_len` what would be returned by
124 // `self.data().file_data().len()`.
124 // `self.data().file_data().len()`.
125
125
126 if self.0.is_censored() {
126 if self.0.is_censored() {
127 let file_data_len = 0;
127 let file_data_len = 0;
128 return other_len != file_data_len;
128 return other_len != file_data_len;
129 }
129 }
130
130
131 if self.0.has_length_affecting_flag_processor() {
131 if self.0.has_length_affecting_flag_processor() {
132 // We can’t conclude anything about `file_data_len`.
132 // We can’t conclude anything about `file_data_len`.
133 return false;
133 return false;
134 }
134 }
135
135
136 // Revlog revisions (usually) have metadata for the size of
136 // Revlog revisions (usually) have metadata for the size of
137 // their data after decompression and delta resolution
137 // their data after decompression and delta resolution
138 // as would be returned by `Revlog::get_rev_data`.
138 // as would be returned by `Revlog::get_rev_data`.
139 //
139 //
140 // For filelogs this is the file’s contents preceded by an optional
140 // For filelogs this is the file’s contents preceded by an optional
141 // metadata block.
141 // metadata block.
142 let uncompressed_len = if let Some(l) = self.0.uncompressed_len() {
142 let uncompressed_len = if let Some(l) = self.0.uncompressed_len() {
143 l as u64
143 l as u64
144 } else {
144 } else {
145 // The field was set to -1, the actual uncompressed len is unknown.
145 // The field was set to -1, the actual uncompressed len is unknown.
146 // We need to decompress to say more.
146 // We need to decompress to say more.
147 return false;
147 return false;
148 };
148 };
149 // `uncompressed_len = file_data_len + optional_metadata_len`,
149 // `uncompressed_len = file_data_len + optional_metadata_len`,
150 // so `file_data_len <= uncompressed_len`.
150 // so `file_data_len <= uncompressed_len`.
151 if uncompressed_len < other_len {
151 if uncompressed_len < other_len {
152 // Transitively, `file_data_len < other_len`.
152 // Transitively, `file_data_len < other_len`.
153 // So `other_len != file_data_len` definitely.
153 // So `other_len != file_data_len` definitely.
154 return true;
154 return true;
155 }
155 }
156
156
157 if uncompressed_len == other_len + 4 {
157 if uncompressed_len == other_len + 4 {
158 // It’s possible that `file_data_len == other_len` with an empty
158 // It’s possible that `file_data_len == other_len` with an empty
159 // metadata block (2 start marker bytes + 2 end marker bytes).
159 // metadata block (2 start marker bytes + 2 end marker bytes).
160 // This happens when there wouldn’t otherwise be metadata, but
160 // This happens when there wouldn’t otherwise be metadata, but
161 // the first 2 bytes of file data happen to match a start marker
161 // the first 2 bytes of file data happen to match a start marker
162 // and would be ambiguous.
162 // and would be ambiguous.
163 return false;
163 return false;
164 }
164 }
165
165
166 if !self.0.has_p1() {
166 if !self.0.has_p1() {
167 // There may or may not be copy metadata, so we can’t deduce more
167 // There may or may not be copy metadata, so we can’t deduce more
168 // about `file_data_len` without computing file data.
168 // about `file_data_len` without computing file data.
169 return false;
169 return false;
170 }
170 }
171
171
172 // Filelog ancestry is not meaningful in the way changelog ancestry is.
172 // Filelog ancestry is not meaningful in the way changelog ancestry is.
173 // It only provides hints to delta generation.
173 // It only provides hints to delta generation.
174 // p1 and p2 are set to null when making a copy or rename since
174 // p1 and p2 are set to null when making a copy or rename since
175 // contents are likely unrelatedto what might have previously existed
175 // contents are likely unrelatedto what might have previously existed
176 // at the destination path.
176 // at the destination path.
177 //
177 //
178 // Conversely, since here p1 is non-null, there is no copy metadata.
178 // Conversely, since here p1 is non-null, there is no copy metadata.
179 // Note that this reasoning may be invalidated in the presence of
179 // Note that this reasoning may be invalidated in the presence of
180 // merges made by some previous versions of Mercurial that
180 // merges made by some previous versions of Mercurial that
181 // swapped p1 and p2. See <https://bz.mercurial-scm.org/show_bug.cgi?id=6528>
181 // swapped p1 and p2. See <https://bz.mercurial-scm.org/show_bug.cgi?id=6528>
182 // and `tests/test-issue6528.t`.
182 // and `tests/test-issue6528.t`.
183 //
183 //
184 // Since copy metadata is currently the only kind of metadata
184 // Since copy metadata is currently the only kind of metadata
185 // kept in revlog data of filelogs,
185 // kept in revlog data of filelogs,
186 // this `FilelogEntry` does not have such metadata:
186 // this `FilelogEntry` does not have such metadata:
187 let file_data_len = uncompressed_len;
187 let file_data_len = uncompressed_len;
188
188
189 file_data_len != other_len
189 file_data_len != other_len
190 }
190 }
191
191
192 pub fn data(&self) -> Result<FilelogRevisionData, HgError> {
192 pub fn data(&self) -> Result<FilelogRevisionData, HgError> {
193 let data = self.0.data();
193 let data = self.0.data();
194 match data {
194 match data {
195 Ok(data) => Ok(FilelogRevisionData(data.into_owned())),
195 Ok(data) => Ok(FilelogRevisionData(data.into_owned())),
196 // Errors other than `HgError` should not happen at this point
196 // Errors other than `HgError` should not happen at this point
197 Err(e) => match e {
197 Err(e) => match e {
198 RevlogError::Other(hg_error) => Err(hg_error),
198 RevlogError::Other(hg_error) => Err(hg_error),
199 revlog_error => Err(HgError::abort(
199 revlog_error => Err(HgError::abort(
200 revlog_error.to_string(),
200 revlog_error.to_string(),
201 exit_codes::ABORT,
201 exit_codes::ABORT,
202 None,
202 None,
203 )),
203 )),
204 },
204 },
205 }
205 }
206 }
206 }
207 }
207 }
208
208
209 /// The data for one revision in a filelog, uncompressed and delta-resolved.
209 /// The data for one revision in a filelog, uncompressed and delta-resolved.
210 pub struct FilelogRevisionData(Vec<u8>);
210 pub struct FilelogRevisionData(Vec<u8>);
211
211
212 impl FilelogRevisionData {
212 impl FilelogRevisionData {
213 /// Split into metadata and data
213 /// Split into metadata and data
214 pub fn split(&self) -> Result<(Option<&[u8]>, &[u8]), HgError> {
214 pub fn split(&self) -> Result<(Option<&[u8]>, &[u8]), HgError> {
215 const DELIMITER: &[u8; 2] = &[b'\x01', b'\n'];
215 const DELIMITER: &[u8; 2] = &[b'\x01', b'\n'];
216
216
217 if let Some(rest) = self.0.drop_prefix(DELIMITER) {
217 if let Some(rest) = self.0.drop_prefix(DELIMITER) {
218 if let Some((metadata, data)) = rest.split_2_by_slice(DELIMITER) {
218 if let Some((metadata, data)) = rest.split_2_by_slice(DELIMITER) {
219 Ok((Some(metadata), data))
219 Ok((Some(metadata), data))
220 } else {
220 } else {
221 Err(HgError::corrupted(
221 Err(HgError::corrupted(
222 "Missing metadata end delimiter in filelog entry",
222 "Missing metadata end delimiter in filelog entry",
223 ))
223 ))
224 }
224 }
225 } else {
225 } else {
226 Ok((None, &self.0))
226 Ok((None, &self.0))
227 }
227 }
228 }
228 }
229
229
230 /// Returns the file contents at this revision, stripped of any metadata
230 /// Returns the file contents at this revision, stripped of any metadata
231 pub fn file_data(&self) -> Result<&[u8], HgError> {
231 pub fn file_data(&self) -> Result<&[u8], HgError> {
232 let (_metadata, data) = self.split()?;
232 let (_metadata, data) = self.split()?;
233 Ok(data)
233 Ok(data)
234 }
234 }
235
235
236 /// Consume the entry, and convert it into data, discarding any metadata,
236 /// Consume the entry, and convert it into data, discarding any metadata,
237 /// if present.
237 /// if present.
238 pub fn into_file_data(self) -> Result<Vec<u8>, HgError> {
238 pub fn into_file_data(self) -> Result<Vec<u8>, HgError> {
239 if let (Some(_metadata), data) = self.split()? {
239 if let (Some(_metadata), data) = self.split()? {
240 Ok(data.to_owned())
240 Ok(data.to_owned())
241 } else {
241 } else {
242 Ok(self.0)
242 Ok(self.0)
243 }
243 }
244 }
244 }
245 }
245 }
@@ -1,213 +1,213
1 use crate::errors::HgError;
1 use crate::errors::HgError;
2 use crate::revlog::{Node, NodePrefix};
2 use crate::revlog::{Node, NodePrefix};
3 use crate::revlog::{Revlog, RevlogError};
3 use crate::revlog::{Revlog, RevlogError};
4 use crate::utils::hg_path::HgPath;
4 use crate::utils::hg_path::HgPath;
5 use crate::utils::SliceExt;
5 use crate::utils::SliceExt;
6 use crate::vfs::Vfs;
6 use crate::vfs::VfsImpl;
7 use crate::{
7 use crate::{
8 Graph, GraphError, Revision, RevlogOpenOptions, UncheckedRevision,
8 Graph, GraphError, Revision, RevlogOpenOptions, UncheckedRevision,
9 };
9 };
10
10
11 /// A specialized `Revlog` to work with `manifest` data format.
11 /// A specialized `Revlog` to work with `manifest` data format.
12 pub struct Manifestlog {
12 pub struct Manifestlog {
13 /// The generic `revlog` format.
13 /// The generic `revlog` format.
14 pub(crate) revlog: Revlog,
14 pub(crate) revlog: Revlog,
15 }
15 }
16
16
17 impl Graph for Manifestlog {
17 impl Graph for Manifestlog {
18 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
18 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
19 self.revlog.parents(rev)
19 self.revlog.parents(rev)
20 }
20 }
21 }
21 }
22
22
23 impl Manifestlog {
23 impl Manifestlog {
24 /// Open the `manifest` of a repository given by its root.
24 /// Open the `manifest` of a repository given by its root.
25 pub fn open(
25 pub fn open(
26 store_vfs: &Vfs,
26 store_vfs: &VfsImpl,
27 options: RevlogOpenOptions,
27 options: RevlogOpenOptions,
28 ) -> Result<Self, HgError> {
28 ) -> Result<Self, HgError> {
29 let revlog = Revlog::open(store_vfs, "00manifest.i", None, options)?;
29 let revlog = Revlog::open(store_vfs, "00manifest.i", None, options)?;
30 Ok(Self { revlog })
30 Ok(Self { revlog })
31 }
31 }
32
32
33 /// Return the `Manifest` for the given node ID.
33 /// Return the `Manifest` for the given node ID.
34 ///
34 ///
35 /// Note: this is a node ID in the manifestlog, typically found through
35 /// Note: this is a node ID in the manifestlog, typically found through
36 /// `ChangelogEntry::manifest_node`. It is *not* the node ID of any
36 /// `ChangelogEntry::manifest_node`. It is *not* the node ID of any
37 /// changeset.
37 /// changeset.
38 ///
38 ///
39 /// See also `Repo::manifest_for_node`
39 /// See also `Repo::manifest_for_node`
40 pub fn data_for_node(
40 pub fn data_for_node(
41 &self,
41 &self,
42 node: NodePrefix,
42 node: NodePrefix,
43 ) -> Result<Manifest, RevlogError> {
43 ) -> Result<Manifest, RevlogError> {
44 let rev = self.revlog.rev_from_node(node)?;
44 let rev = self.revlog.rev_from_node(node)?;
45 self.data_for_checked_rev(rev)
45 self.data_for_checked_rev(rev)
46 }
46 }
47
47
48 /// Return the `Manifest` of a given revision number.
48 /// Return the `Manifest` of a given revision number.
49 ///
49 ///
50 /// Note: this is a revision number in the manifestlog, *not* of any
50 /// Note: this is a revision number in the manifestlog, *not* of any
51 /// changeset.
51 /// changeset.
52 ///
52 ///
53 /// See also `Repo::manifest_for_rev`
53 /// See also `Repo::manifest_for_rev`
54 pub fn data_for_rev(
54 pub fn data_for_rev(
55 &self,
55 &self,
56 rev: UncheckedRevision,
56 rev: UncheckedRevision,
57 ) -> Result<Manifest, RevlogError> {
57 ) -> Result<Manifest, RevlogError> {
58 let bytes = self.revlog.get_rev_data(rev)?.into_owned();
58 let bytes = self.revlog.get_rev_data(rev)?.into_owned();
59 Ok(Manifest { bytes })
59 Ok(Manifest { bytes })
60 }
60 }
61
61
62 pub fn data_for_checked_rev(
62 pub fn data_for_checked_rev(
63 &self,
63 &self,
64 rev: Revision,
64 rev: Revision,
65 ) -> Result<Manifest, RevlogError> {
65 ) -> Result<Manifest, RevlogError> {
66 let bytes =
66 let bytes =
67 self.revlog.get_rev_data_for_checked_rev(rev)?.into_owned();
67 self.revlog.get_rev_data_for_checked_rev(rev)?.into_owned();
68 Ok(Manifest { bytes })
68 Ok(Manifest { bytes })
69 }
69 }
70 }
70 }
71
71
72 /// `Manifestlog` entry which knows how to interpret the `manifest` data bytes.
72 /// `Manifestlog` entry which knows how to interpret the `manifest` data bytes.
73 #[derive(Debug)]
73 #[derive(Debug)]
74 pub struct Manifest {
74 pub struct Manifest {
75 /// Format for a manifest: flat sequence of variable-size entries,
75 /// Format for a manifest: flat sequence of variable-size entries,
76 /// sorted by path, each as:
76 /// sorted by path, each as:
77 ///
77 ///
78 /// ```text
78 /// ```text
79 /// <path> \0 <hex_node_id> <flags> \n
79 /// <path> \0 <hex_node_id> <flags> \n
80 /// ```
80 /// ```
81 ///
81 ///
82 /// The last entry is also terminated by a newline character.
82 /// The last entry is also terminated by a newline character.
83 /// Flags is one of `b""` (the empty string), `b"x"`, `b"l"`, or `b"t"`.
83 /// Flags is one of `b""` (the empty string), `b"x"`, `b"l"`, or `b"t"`.
84 bytes: Vec<u8>,
84 bytes: Vec<u8>,
85 }
85 }
86
86
87 impl Manifest {
87 impl Manifest {
88 pub fn iter(
88 pub fn iter(
89 &self,
89 &self,
90 ) -> impl Iterator<Item = Result<ManifestEntry, HgError>> {
90 ) -> impl Iterator<Item = Result<ManifestEntry, HgError>> {
91 self.bytes
91 self.bytes
92 .split(|b| b == &b'\n')
92 .split(|b| b == &b'\n')
93 .filter(|line| !line.is_empty())
93 .filter(|line| !line.is_empty())
94 .map(ManifestEntry::from_raw)
94 .map(ManifestEntry::from_raw)
95 }
95 }
96
96
97 /// If the given path is in this manifest, return its filelog node ID
97 /// If the given path is in this manifest, return its filelog node ID
98 pub fn find_by_path(
98 pub fn find_by_path(
99 &self,
99 &self,
100 path: &HgPath,
100 path: &HgPath,
101 ) -> Result<Option<ManifestEntry>, HgError> {
101 ) -> Result<Option<ManifestEntry>, HgError> {
102 use std::cmp::Ordering::*;
102 use std::cmp::Ordering::*;
103 let path = path.as_bytes();
103 let path = path.as_bytes();
104 // Both boundaries of this `&[u8]` slice are always at the boundary of
104 // Both boundaries of this `&[u8]` slice are always at the boundary of
105 // an entry
105 // an entry
106 let mut bytes = &*self.bytes;
106 let mut bytes = &*self.bytes;
107
107
108 // Binary search algorithm derived from `[T]::binary_search_by`
108 // Binary search algorithm derived from `[T]::binary_search_by`
109 // <https://github.com/rust-lang/rust/blob/1.57.0/library/core/src/slice/mod.rs#L2221>
109 // <https://github.com/rust-lang/rust/blob/1.57.0/library/core/src/slice/mod.rs#L2221>
110 // except we don’t have a slice of entries. Instead we jump to the
110 // except we don’t have a slice of entries. Instead we jump to the
111 // middle of the byte slice and look around for entry delimiters
111 // middle of the byte slice and look around for entry delimiters
112 // (newlines).
112 // (newlines).
113 while let Some(entry_range) = Self::find_entry_near_middle_of(bytes)? {
113 while let Some(entry_range) = Self::find_entry_near_middle_of(bytes)? {
114 let (entry_path, rest) =
114 let (entry_path, rest) =
115 ManifestEntry::split_path(&bytes[entry_range.clone()])?;
115 ManifestEntry::split_path(&bytes[entry_range.clone()])?;
116 let cmp = entry_path.cmp(path);
116 let cmp = entry_path.cmp(path);
117 if cmp == Less {
117 if cmp == Less {
118 let after_newline = entry_range.end + 1;
118 let after_newline = entry_range.end + 1;
119 bytes = &bytes[after_newline..];
119 bytes = &bytes[after_newline..];
120 } else if cmp == Greater {
120 } else if cmp == Greater {
121 bytes = &bytes[..entry_range.start];
121 bytes = &bytes[..entry_range.start];
122 } else {
122 } else {
123 return Ok(Some(ManifestEntry::from_path_and_rest(
123 return Ok(Some(ManifestEntry::from_path_and_rest(
124 entry_path, rest,
124 entry_path, rest,
125 )));
125 )));
126 }
126 }
127 }
127 }
128 Ok(None)
128 Ok(None)
129 }
129 }
130
130
131 /// If there is at least one, return the byte range of an entry *excluding*
131 /// If there is at least one, return the byte range of an entry *excluding*
132 /// the final newline.
132 /// the final newline.
133 fn find_entry_near_middle_of(
133 fn find_entry_near_middle_of(
134 bytes: &[u8],
134 bytes: &[u8],
135 ) -> Result<Option<std::ops::Range<usize>>, HgError> {
135 ) -> Result<Option<std::ops::Range<usize>>, HgError> {
136 let len = bytes.len();
136 let len = bytes.len();
137 if len > 0 {
137 if len > 0 {
138 let middle = bytes.len() / 2;
138 let middle = bytes.len() / 2;
139 // Integer division rounds down, so `middle < len`.
139 // Integer division rounds down, so `middle < len`.
140 let (before, after) = bytes.split_at(middle);
140 let (before, after) = bytes.split_at(middle);
141 let is_newline = |&byte: &u8| byte == b'\n';
141 let is_newline = |&byte: &u8| byte == b'\n';
142 let entry_start = match before.iter().rposition(is_newline) {
142 let entry_start = match before.iter().rposition(is_newline) {
143 Some(i) => i + 1,
143 Some(i) => i + 1,
144 None => 0, // We choose the first entry in `bytes`
144 None => 0, // We choose the first entry in `bytes`
145 };
145 };
146 let entry_end = match after.iter().position(is_newline) {
146 let entry_end = match after.iter().position(is_newline) {
147 Some(i) => {
147 Some(i) => {
148 // No `+ 1` here to exclude this newline from the range
148 // No `+ 1` here to exclude this newline from the range
149 middle + i
149 middle + i
150 }
150 }
151 None => {
151 None => {
152 // In a well-formed manifest:
152 // In a well-formed manifest:
153 //
153 //
154 // * Since `len > 0`, `bytes` contains at least one entry
154 // * Since `len > 0`, `bytes` contains at least one entry
155 // * Every entry ends with a newline
155 // * Every entry ends with a newline
156 // * Since `middle < len`, `after` contains at least the
156 // * Since `middle < len`, `after` contains at least the
157 // newline at the end of the last entry of `bytes`.
157 // newline at the end of the last entry of `bytes`.
158 //
158 //
159 // We didn’t find a newline, so this manifest is not
159 // We didn’t find a newline, so this manifest is not
160 // well-formed.
160 // well-formed.
161 return Err(HgError::corrupted(
161 return Err(HgError::corrupted(
162 "manifest entry without \\n delimiter",
162 "manifest entry without \\n delimiter",
163 ));
163 ));
164 }
164 }
165 };
165 };
166 Ok(Some(entry_start..entry_end))
166 Ok(Some(entry_start..entry_end))
167 } else {
167 } else {
168 // len == 0
168 // len == 0
169 Ok(None)
169 Ok(None)
170 }
170 }
171 }
171 }
172 }
172 }
173
173
174 /// `Manifestlog` entry which knows how to interpret the `manifest` data bytes.
174 /// `Manifestlog` entry which knows how to interpret the `manifest` data bytes.
175 #[derive(Debug)]
175 #[derive(Debug)]
176 pub struct ManifestEntry<'manifest> {
176 pub struct ManifestEntry<'manifest> {
177 pub path: &'manifest HgPath,
177 pub path: &'manifest HgPath,
178 pub hex_node_id: &'manifest [u8],
178 pub hex_node_id: &'manifest [u8],
179
179
180 /// `Some` values are b'x', b'l', or 't'
180 /// `Some` values are b'x', b'l', or 't'
181 pub flags: Option<u8>,
181 pub flags: Option<u8>,
182 }
182 }
183
183
184 impl<'a> ManifestEntry<'a> {
184 impl<'a> ManifestEntry<'a> {
185 fn split_path(bytes: &[u8]) -> Result<(&[u8], &[u8]), HgError> {
185 fn split_path(bytes: &[u8]) -> Result<(&[u8], &[u8]), HgError> {
186 bytes.split_2(b'\0').ok_or_else(|| {
186 bytes.split_2(b'\0').ok_or_else(|| {
187 HgError::corrupted("manifest entry without \\0 delimiter")
187 HgError::corrupted("manifest entry without \\0 delimiter")
188 })
188 })
189 }
189 }
190
190
191 fn from_path_and_rest(path: &'a [u8], rest: &'a [u8]) -> Self {
191 fn from_path_and_rest(path: &'a [u8], rest: &'a [u8]) -> Self {
192 let (hex_node_id, flags) = match rest.split_last() {
192 let (hex_node_id, flags) = match rest.split_last() {
193 Some((&b'x', rest)) => (rest, Some(b'x')),
193 Some((&b'x', rest)) => (rest, Some(b'x')),
194 Some((&b'l', rest)) => (rest, Some(b'l')),
194 Some((&b'l', rest)) => (rest, Some(b'l')),
195 Some((&b't', rest)) => (rest, Some(b't')),
195 Some((&b't', rest)) => (rest, Some(b't')),
196 _ => (rest, None),
196 _ => (rest, None),
197 };
197 };
198 Self {
198 Self {
199 path: HgPath::new(path),
199 path: HgPath::new(path),
200 hex_node_id,
200 hex_node_id,
201 flags,
201 flags,
202 }
202 }
203 }
203 }
204
204
205 fn from_raw(bytes: &'a [u8]) -> Result<Self, HgError> {
205 fn from_raw(bytes: &'a [u8]) -> Result<Self, HgError> {
206 let (path, rest) = Self::split_path(bytes)?;
206 let (path, rest) = Self::split_path(bytes)?;
207 Ok(Self::from_path_and_rest(path, rest))
207 Ok(Self::from_path_and_rest(path, rest))
208 }
208 }
209
209
210 pub fn node_id(&self) -> Result<Node, HgError> {
210 pub fn node_id(&self) -> Result<Node, HgError> {
211 Node::from_hex_for_repo(self.hex_node_id)
211 Node::from_hex_for_repo(self.hex_node_id)
212 }
212 }
213 }
213 }
@@ -1,1458 +1,1466
1 // Copyright 2018-2023 Georges Racinet <georges.racinet@octobus.net>
1 // Copyright 2018-2023 Georges Racinet <georges.racinet@octobus.net>
2 // and Mercurial contributors
2 // and Mercurial contributors
3 //
3 //
4 // This software may be used and distributed according to the terms of the
4 // This software may be used and distributed according to the terms of the
5 // GNU General Public License version 2 or any later version.
5 // GNU General Public License version 2 or any later version.
6 //! Mercurial concepts for handling revision history
6 //! Mercurial concepts for handling revision history
7
7
8 pub mod node;
8 pub mod node;
9 pub mod nodemap;
9 pub mod nodemap;
10 mod nodemap_docket;
10 mod nodemap_docket;
11 pub mod path_encode;
11 pub mod path_encode;
12 pub use node::{FromHexError, Node, NodePrefix};
12 pub use node::{FromHexError, Node, NodePrefix};
13 pub mod changelog;
13 pub mod changelog;
14 pub mod filelog;
14 pub mod filelog;
15 pub mod index;
15 pub mod index;
16 pub mod manifest;
16 pub mod manifest;
17 pub mod patch;
17 pub mod patch;
18
18
19 use std::borrow::Cow;
19 use std::borrow::Cow;
20 use std::collections::HashSet;
20 use std::collections::HashSet;
21 use std::io::Read;
21 use std::io::Read;
22 use std::ops::Deref;
22 use std::ops::Deref;
23 use std::path::Path;
23 use std::path::Path;
24
24
25 use flate2::read::ZlibDecoder;
25 use flate2::read::ZlibDecoder;
26 use sha1::{Digest, Sha1};
26 use sha1::{Digest, Sha1};
27 use std::cell::RefCell;
27 use std::cell::RefCell;
28 use zstd;
28 use zstd;
29
29
30 use self::node::{NODE_BYTES_LENGTH, NULL_NODE};
30 use self::node::{NODE_BYTES_LENGTH, NULL_NODE};
31 use self::nodemap_docket::NodeMapDocket;
31 use self::nodemap_docket::NodeMapDocket;
32 use super::index::Index;
32 use super::index::Index;
33 use super::index::INDEX_ENTRY_SIZE;
33 use super::index::INDEX_ENTRY_SIZE;
34 use super::nodemap::{NodeMap, NodeMapError};
34 use super::nodemap::{NodeMap, NodeMapError};
35 use crate::config::{Config, ResourceProfileValue};
35 use crate::config::{Config, ResourceProfileValue};
36 use crate::errors::HgError;
36 use crate::errors::HgError;
37 use crate::exit_codes;
37 use crate::exit_codes;
38 use crate::requirements::{
38 use crate::requirements::{
39 GENERALDELTA_REQUIREMENT, NARROW_REQUIREMENT, SPARSEREVLOG_REQUIREMENT,
39 GENERALDELTA_REQUIREMENT, NARROW_REQUIREMENT, SPARSEREVLOG_REQUIREMENT,
40 };
40 };
41 use crate::vfs::Vfs;
41 use crate::vfs::VfsImpl;
42
42
43 /// As noted in revlog.c, revision numbers are actually encoded in
43 /// As noted in revlog.c, revision numbers are actually encoded in
44 /// 4 bytes, and are liberally converted to ints, whence the i32
44 /// 4 bytes, and are liberally converted to ints, whence the i32
45 pub type BaseRevision = i32;
45 pub type BaseRevision = i32;
46
46
47 /// Mercurial revision numbers
47 /// Mercurial revision numbers
48 /// In contrast to the more general [`UncheckedRevision`], these are "checked"
48 /// In contrast to the more general [`UncheckedRevision`], these are "checked"
49 /// in the sense that they should only be used for revisions that are
49 /// in the sense that they should only be used for revisions that are
50 /// valid for a given index (i.e. in bounds).
50 /// valid for a given index (i.e. in bounds).
51 #[derive(
51 #[derive(
52 Debug,
52 Debug,
53 derive_more::Display,
53 derive_more::Display,
54 Clone,
54 Clone,
55 Copy,
55 Copy,
56 Hash,
56 Hash,
57 PartialEq,
57 PartialEq,
58 Eq,
58 Eq,
59 PartialOrd,
59 PartialOrd,
60 Ord,
60 Ord,
61 )]
61 )]
62 pub struct Revision(pub BaseRevision);
62 pub struct Revision(pub BaseRevision);
63
63
64 impl format_bytes::DisplayBytes for Revision {
64 impl format_bytes::DisplayBytes for Revision {
65 fn display_bytes(
65 fn display_bytes(
66 &self,
66 &self,
67 output: &mut dyn std::io::Write,
67 output: &mut dyn std::io::Write,
68 ) -> std::io::Result<()> {
68 ) -> std::io::Result<()> {
69 self.0.display_bytes(output)
69 self.0.display_bytes(output)
70 }
70 }
71 }
71 }
72
72
73 /// Unchecked Mercurial revision numbers.
73 /// Unchecked Mercurial revision numbers.
74 ///
74 ///
75 /// Values of this type have no guarantee of being a valid revision number
75 /// Values of this type have no guarantee of being a valid revision number
76 /// in any context. Use method `check_revision` to get a valid revision within
76 /// in any context. Use method `check_revision` to get a valid revision within
77 /// the appropriate index object.
77 /// the appropriate index object.
78 #[derive(
78 #[derive(
79 Debug,
79 Debug,
80 derive_more::Display,
80 derive_more::Display,
81 Clone,
81 Clone,
82 Copy,
82 Copy,
83 Hash,
83 Hash,
84 PartialEq,
84 PartialEq,
85 Eq,
85 Eq,
86 PartialOrd,
86 PartialOrd,
87 Ord,
87 Ord,
88 )]
88 )]
89 pub struct UncheckedRevision(pub BaseRevision);
89 pub struct UncheckedRevision(pub BaseRevision);
90
90
91 impl format_bytes::DisplayBytes for UncheckedRevision {
91 impl format_bytes::DisplayBytes for UncheckedRevision {
92 fn display_bytes(
92 fn display_bytes(
93 &self,
93 &self,
94 output: &mut dyn std::io::Write,
94 output: &mut dyn std::io::Write,
95 ) -> std::io::Result<()> {
95 ) -> std::io::Result<()> {
96 self.0.display_bytes(output)
96 self.0.display_bytes(output)
97 }
97 }
98 }
98 }
99
99
100 impl From<Revision> for UncheckedRevision {
100 impl From<Revision> for UncheckedRevision {
101 fn from(value: Revision) -> Self {
101 fn from(value: Revision) -> Self {
102 Self(value.0)
102 Self(value.0)
103 }
103 }
104 }
104 }
105
105
106 impl From<BaseRevision> for UncheckedRevision {
106 impl From<BaseRevision> for UncheckedRevision {
107 fn from(value: BaseRevision) -> Self {
107 fn from(value: BaseRevision) -> Self {
108 Self(value)
108 Self(value)
109 }
109 }
110 }
110 }
111
111
112 /// Marker expressing the absence of a parent
112 /// Marker expressing the absence of a parent
113 ///
113 ///
114 /// Independently of the actual representation, `NULL_REVISION` is guaranteed
114 /// Independently of the actual representation, `NULL_REVISION` is guaranteed
115 /// to be smaller than all existing revisions.
115 /// to be smaller than all existing revisions.
116 pub const NULL_REVISION: Revision = Revision(-1);
116 pub const NULL_REVISION: Revision = Revision(-1);
117
117
118 /// Same as `mercurial.node.wdirrev`
118 /// Same as `mercurial.node.wdirrev`
119 ///
119 ///
120 /// This is also equal to `i32::max_value()`, but it's better to spell
120 /// This is also equal to `i32::max_value()`, but it's better to spell
121 /// it out explicitely, same as in `mercurial.node`
121 /// it out explicitely, same as in `mercurial.node`
122 #[allow(clippy::unreadable_literal)]
122 #[allow(clippy::unreadable_literal)]
123 pub const WORKING_DIRECTORY_REVISION: UncheckedRevision =
123 pub const WORKING_DIRECTORY_REVISION: UncheckedRevision =
124 UncheckedRevision(0x7fffffff);
124 UncheckedRevision(0x7fffffff);
125
125
126 pub const WORKING_DIRECTORY_HEX: &str =
126 pub const WORKING_DIRECTORY_HEX: &str =
127 "ffffffffffffffffffffffffffffffffffffffff";
127 "ffffffffffffffffffffffffffffffffffffffff";
128
128
129 /// The simplest expression of what we need of Mercurial DAGs.
129 /// The simplest expression of what we need of Mercurial DAGs.
130 pub trait Graph {
130 pub trait Graph {
131 /// Return the two parents of the given `Revision`.
131 /// Return the two parents of the given `Revision`.
132 ///
132 ///
133 /// Each of the parents can be independently `NULL_REVISION`
133 /// Each of the parents can be independently `NULL_REVISION`
134 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError>;
134 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError>;
135 }
135 }
136
136
137 #[derive(Clone, Debug, PartialEq)]
137 #[derive(Clone, Debug, PartialEq)]
138 pub enum GraphError {
138 pub enum GraphError {
139 ParentOutOfRange(Revision),
139 ParentOutOfRange(Revision),
140 }
140 }
141
141
142 impl<T: Graph> Graph for &T {
142 impl<T: Graph> Graph for &T {
143 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
143 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
144 (*self).parents(rev)
144 (*self).parents(rev)
145 }
145 }
146 }
146 }
147
147
148 /// The Mercurial Revlog Index
148 /// The Mercurial Revlog Index
149 ///
149 ///
150 /// This is currently limited to the minimal interface that is needed for
150 /// This is currently limited to the minimal interface that is needed for
151 /// the [`nodemap`](nodemap/index.html) module
151 /// the [`nodemap`](nodemap/index.html) module
152 pub trait RevlogIndex {
152 pub trait RevlogIndex {
153 /// Total number of Revisions referenced in this index
153 /// Total number of Revisions referenced in this index
154 fn len(&self) -> usize;
154 fn len(&self) -> usize;
155
155
156 fn is_empty(&self) -> bool {
156 fn is_empty(&self) -> bool {
157 self.len() == 0
157 self.len() == 0
158 }
158 }
159
159
160 /// Return a reference to the Node or `None` for `NULL_REVISION`
160 /// Return a reference to the Node or `None` for `NULL_REVISION`
161 fn node(&self, rev: Revision) -> Option<&Node>;
161 fn node(&self, rev: Revision) -> Option<&Node>;
162
162
163 /// Return a [`Revision`] if `rev` is a valid revision number for this
163 /// Return a [`Revision`] if `rev` is a valid revision number for this
164 /// index.
164 /// index.
165 ///
165 ///
166 /// [`NULL_REVISION`] is considered to be valid.
166 /// [`NULL_REVISION`] is considered to be valid.
167 #[inline(always)]
167 #[inline(always)]
168 fn check_revision(&self, rev: UncheckedRevision) -> Option<Revision> {
168 fn check_revision(&self, rev: UncheckedRevision) -> Option<Revision> {
169 let rev = rev.0;
169 let rev = rev.0;
170
170
171 if rev == NULL_REVISION.0 || (rev >= 0 && (rev as usize) < self.len())
171 if rev == NULL_REVISION.0 || (rev >= 0 && (rev as usize) < self.len())
172 {
172 {
173 Some(Revision(rev))
173 Some(Revision(rev))
174 } else {
174 } else {
175 None
175 None
176 }
176 }
177 }
177 }
178 }
178 }
179
179
180 const REVISION_FLAG_CENSORED: u16 = 1 << 15;
180 const REVISION_FLAG_CENSORED: u16 = 1 << 15;
181 const REVISION_FLAG_ELLIPSIS: u16 = 1 << 14;
181 const REVISION_FLAG_ELLIPSIS: u16 = 1 << 14;
182 const REVISION_FLAG_EXTSTORED: u16 = 1 << 13;
182 const REVISION_FLAG_EXTSTORED: u16 = 1 << 13;
183 const REVISION_FLAG_HASCOPIESINFO: u16 = 1 << 12;
183 const REVISION_FLAG_HASCOPIESINFO: u16 = 1 << 12;
184
184
185 // Keep this in sync with REVIDX_KNOWN_FLAGS in
185 // Keep this in sync with REVIDX_KNOWN_FLAGS in
186 // mercurial/revlogutils/flagutil.py
186 // mercurial/revlogutils/flagutil.py
187 const REVIDX_KNOWN_FLAGS: u16 = REVISION_FLAG_CENSORED
187 const REVIDX_KNOWN_FLAGS: u16 = REVISION_FLAG_CENSORED
188 | REVISION_FLAG_ELLIPSIS
188 | REVISION_FLAG_ELLIPSIS
189 | REVISION_FLAG_EXTSTORED
189 | REVISION_FLAG_EXTSTORED
190 | REVISION_FLAG_HASCOPIESINFO;
190 | REVISION_FLAG_HASCOPIESINFO;
191
191
192 const NULL_REVLOG_ENTRY_FLAGS: u16 = 0;
192 const NULL_REVLOG_ENTRY_FLAGS: u16 = 0;
193
193
194 #[derive(Debug, derive_more::From, derive_more::Display)]
194 #[derive(Debug, derive_more::From, derive_more::Display)]
195 pub enum RevlogError {
195 pub enum RevlogError {
196 InvalidRevision,
196 InvalidRevision,
197 /// Working directory is not supported
197 /// Working directory is not supported
198 WDirUnsupported,
198 WDirUnsupported,
199 /// Found more than one entry whose ID match the requested prefix
199 /// Found more than one entry whose ID match the requested prefix
200 AmbiguousPrefix,
200 AmbiguousPrefix,
201 #[from]
201 #[from]
202 Other(HgError),
202 Other(HgError),
203 }
203 }
204
204
205 impl From<NodeMapError> for RevlogError {
205 impl From<NodeMapError> for RevlogError {
206 fn from(error: NodeMapError) -> Self {
206 fn from(error: NodeMapError) -> Self {
207 match error {
207 match error {
208 NodeMapError::MultipleResults => RevlogError::AmbiguousPrefix,
208 NodeMapError::MultipleResults => RevlogError::AmbiguousPrefix,
209 NodeMapError::RevisionNotInIndex(rev) => RevlogError::corrupted(
209 NodeMapError::RevisionNotInIndex(rev) => RevlogError::corrupted(
210 format!("nodemap point to revision {} not in index", rev),
210 format!("nodemap point to revision {} not in index", rev),
211 ),
211 ),
212 }
212 }
213 }
213 }
214 }
214 }
215
215
216 fn corrupted<S: AsRef<str>>(context: S) -> HgError {
216 fn corrupted<S: AsRef<str>>(context: S) -> HgError {
217 HgError::corrupted(format!("corrupted revlog, {}", context.as_ref()))
217 HgError::corrupted(format!("corrupted revlog, {}", context.as_ref()))
218 }
218 }
219
219
220 impl RevlogError {
220 impl RevlogError {
221 fn corrupted<S: AsRef<str>>(context: S) -> Self {
221 fn corrupted<S: AsRef<str>>(context: S) -> Self {
222 RevlogError::Other(corrupted(context))
222 RevlogError::Other(corrupted(context))
223 }
223 }
224 }
224 }
225
225
226 #[derive(derive_more::Display, Debug, Copy, Clone, PartialEq, Eq)]
226 #[derive(derive_more::Display, Debug, Copy, Clone, PartialEq, Eq)]
227 pub enum RevlogType {
227 pub enum RevlogType {
228 Changelog,
228 Changelog,
229 Manifestlog,
229 Manifestlog,
230 Filelog,
230 Filelog,
231 }
231 }
232
232
233 impl TryFrom<usize> for RevlogType {
233 impl TryFrom<usize> for RevlogType {
234 type Error = HgError;
234 type Error = HgError;
235
235
236 fn try_from(value: usize) -> Result<Self, Self::Error> {
236 fn try_from(value: usize) -> Result<Self, Self::Error> {
237 match value {
237 match value {
238 1001 => Ok(Self::Changelog),
238 1001 => Ok(Self::Changelog),
239 1002 => Ok(Self::Manifestlog),
239 1002 => Ok(Self::Manifestlog),
240 1003 => Ok(Self::Filelog),
240 1003 => Ok(Self::Filelog),
241 t => Err(HgError::abort(
241 t => Err(HgError::abort(
242 format!("Unknown revlog type {}", t),
242 format!("Unknown revlog type {}", t),
243 exit_codes::ABORT,
243 exit_codes::ABORT,
244 None,
244 None,
245 )),
245 )),
246 }
246 }
247 }
247 }
248 }
248 }
249
249
250 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
250 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
251 pub enum CompressionEngine {
251 pub enum CompressionEngine {
252 Zlib {
252 Zlib {
253 /// Between 0 and 9 included
253 /// Between 0 and 9 included
254 level: u32,
254 level: u32,
255 },
255 },
256 Zstd {
256 Zstd {
257 /// Between 0 and 22 included
257 /// Between 0 and 22 included
258 level: u32,
258 level: u32,
259 /// Never used in practice for now
259 /// Never used in practice for now
260 threads: u32,
260 threads: u32,
261 },
261 },
262 /// No compression is performed
262 /// No compression is performed
263 None,
263 None,
264 }
264 }
265 impl CompressionEngine {
265 impl CompressionEngine {
266 pub fn set_level(&mut self, new_level: usize) -> Result<(), HgError> {
266 pub fn set_level(&mut self, new_level: usize) -> Result<(), HgError> {
267 match self {
267 match self {
268 CompressionEngine::Zlib { level } => {
268 CompressionEngine::Zlib { level } => {
269 if new_level > 9 {
269 if new_level > 9 {
270 return Err(HgError::abort(
270 return Err(HgError::abort(
271 format!(
271 format!(
272 "invalid compression zlib compression level {}",
272 "invalid compression zlib compression level {}",
273 new_level
273 new_level
274 ),
274 ),
275 exit_codes::ABORT,
275 exit_codes::ABORT,
276 None,
276 None,
277 ));
277 ));
278 }
278 }
279 *level = new_level as u32;
279 *level = new_level as u32;
280 }
280 }
281 CompressionEngine::Zstd { level, .. } => {
281 CompressionEngine::Zstd { level, .. } => {
282 if new_level > 22 {
282 if new_level > 22 {
283 return Err(HgError::abort(
283 return Err(HgError::abort(
284 format!(
284 format!(
285 "invalid compression zstd compression level {}",
285 "invalid compression zstd compression level {}",
286 new_level
286 new_level
287 ),
287 ),
288 exit_codes::ABORT,
288 exit_codes::ABORT,
289 None,
289 None,
290 ));
290 ));
291 }
291 }
292 *level = new_level as u32;
292 *level = new_level as u32;
293 }
293 }
294 CompressionEngine::None => {}
294 CompressionEngine::None => {}
295 }
295 }
296 Ok(())
296 Ok(())
297 }
297 }
298
298
299 pub fn zstd(
299 pub fn zstd(
300 zstd_level: Option<u32>,
300 zstd_level: Option<u32>,
301 ) -> Result<CompressionEngine, HgError> {
301 ) -> Result<CompressionEngine, HgError> {
302 let mut engine = CompressionEngine::Zstd {
302 let mut engine = CompressionEngine::Zstd {
303 level: 3,
303 level: 3,
304 threads: 0,
304 threads: 0,
305 };
305 };
306 if let Some(level) = zstd_level {
306 if let Some(level) = zstd_level {
307 engine.set_level(level as usize)?;
307 engine.set_level(level as usize)?;
308 }
308 }
309 Ok(engine)
309 Ok(engine)
310 }
310 }
311 }
311 }
312
312
313 impl Default for CompressionEngine {
313 impl Default for CompressionEngine {
314 fn default() -> Self {
314 fn default() -> Self {
315 Self::Zlib { level: 6 }
315 Self::Zlib { level: 6 }
316 }
316 }
317 }
317 }
318
318
319 #[derive(Debug, Clone, Copy, PartialEq)]
319 #[derive(Debug, Clone, Copy, PartialEq)]
320 /// Holds configuration values about how the revlog data is read
320 /// Holds configuration values about how the revlog data is read
321 pub struct RevlogDataConfig {
321 pub struct RevlogDataConfig {
322 /// Should we try to open the "pending" version of the revlog
322 /// Should we try to open the "pending" version of the revlog
323 pub try_pending: bool,
323 pub try_pending: bool,
324 /// Should we try to open the "split" version of the revlog
324 /// Should we try to open the "split" version of the revlog
325 pub try_split: bool,
325 pub try_split: bool,
326 /// When True, `indexfile` should be opened with `checkambig=True` at
326 /// When True, `indexfile` should be opened with `checkambig=True` at
327 /// writing time, to avoid file stat ambiguity
327 /// writing time, to avoid file stat ambiguity
328 pub check_ambig: bool,
328 pub check_ambig: bool,
329 /// If true, use mmap instead of reading to deal with large indexes
329 /// If true, use mmap instead of reading to deal with large indexes
330 pub mmap_large_index: bool,
330 pub mmap_large_index: bool,
331 /// How much data is considered large
331 /// How much data is considered large
332 pub mmap_index_threshold: Option<u64>,
332 pub mmap_index_threshold: Option<u64>,
333 /// How much data to read and cache into the raw revlog data cache
333 /// How much data to read and cache into the raw revlog data cache
334 pub chunk_cache_size: u64,
334 pub chunk_cache_size: u64,
335 /// The size of the uncompressed cache compared to the largest revision
335 /// The size of the uncompressed cache compared to the largest revision
336 /// seen
336 /// seen
337 pub uncompressed_cache_factor: Option<f64>,
337 pub uncompressed_cache_factor: Option<f64>,
338 /// The number of chunks cached
338 /// The number of chunks cached
339 pub uncompressed_cache_count: Option<u64>,
339 pub uncompressed_cache_count: Option<u64>,
340 /// Allow sparse reading of the revlog data
340 /// Allow sparse reading of the revlog data
341 pub with_sparse_read: bool,
341 pub with_sparse_read: bool,
342 /// Minimal density of a sparse read chunk
342 /// Minimal density of a sparse read chunk
343 pub sr_density_threshold: f64,
343 pub sr_density_threshold: f64,
344 /// Minimal size of the data we skip when performing sparse reads
344 /// Minimal size of the data we skip when performing sparse reads
345 pub sr_min_gap_size: u64,
345 pub sr_min_gap_size: u64,
346 /// Whether deltas are encoded against arbitrary bases
346 /// Whether deltas are encoded against arbitrary bases
347 pub general_delta: bool,
347 pub general_delta: bool,
348 }
348 }
349
349
350 impl RevlogDataConfig {
350 impl RevlogDataConfig {
351 pub fn new(
351 pub fn new(
352 config: &Config,
352 config: &Config,
353 requirements: &HashSet<String>,
353 requirements: &HashSet<String>,
354 ) -> Result<Self, HgError> {
354 ) -> Result<Self, HgError> {
355 let mut data_config = Self::default();
355 let mut data_config = Self::default();
356 if let Some(chunk_cache_size) =
356 if let Some(chunk_cache_size) =
357 config.get_byte_size(b"format", b"chunkcachesize")?
357 config.get_byte_size(b"format", b"chunkcachesize")?
358 {
358 {
359 data_config.chunk_cache_size = chunk_cache_size;
359 data_config.chunk_cache_size = chunk_cache_size;
360 }
360 }
361
361
362 let memory_profile = config.get_resource_profile(Some("memory"));
362 let memory_profile = config.get_resource_profile(Some("memory"));
363 if memory_profile.value >= ResourceProfileValue::Medium {
363 if memory_profile.value >= ResourceProfileValue::Medium {
364 data_config.uncompressed_cache_count = Some(10_000);
364 data_config.uncompressed_cache_count = Some(10_000);
365 data_config.uncompressed_cache_factor = Some(4.0);
365 data_config.uncompressed_cache_factor = Some(4.0);
366 if memory_profile.value >= ResourceProfileValue::High {
366 if memory_profile.value >= ResourceProfileValue::High {
367 data_config.uncompressed_cache_factor = Some(10.0)
367 data_config.uncompressed_cache_factor = Some(10.0)
368 }
368 }
369 }
369 }
370
370
371 if let Some(mmap_index_threshold) = config
371 if let Some(mmap_index_threshold) = config
372 .get_byte_size(b"storage", b"revlog.mmap.index:size-threshold")?
372 .get_byte_size(b"storage", b"revlog.mmap.index:size-threshold")?
373 {
373 {
374 data_config.mmap_index_threshold = Some(mmap_index_threshold);
374 data_config.mmap_index_threshold = Some(mmap_index_threshold);
375 }
375 }
376
376
377 let with_sparse_read =
377 let with_sparse_read =
378 config.get_bool(b"experimental", b"sparse-read")?;
378 config.get_bool(b"experimental", b"sparse-read")?;
379 if let Some(sr_density_threshold) = config
379 if let Some(sr_density_threshold) = config
380 .get_f64(b"experimental", b"sparse-read.density-threshold")?
380 .get_f64(b"experimental", b"sparse-read.density-threshold")?
381 {
381 {
382 data_config.sr_density_threshold = sr_density_threshold;
382 data_config.sr_density_threshold = sr_density_threshold;
383 }
383 }
384 data_config.with_sparse_read = with_sparse_read;
384 data_config.with_sparse_read = with_sparse_read;
385 if let Some(sr_min_gap_size) = config
385 if let Some(sr_min_gap_size) = config
386 .get_byte_size(b"experimental", b"sparse-read.min-gap-size")?
386 .get_byte_size(b"experimental", b"sparse-read.min-gap-size")?
387 {
387 {
388 data_config.sr_min_gap_size = sr_min_gap_size;
388 data_config.sr_min_gap_size = sr_min_gap_size;
389 }
389 }
390
390
391 data_config.with_sparse_read =
391 data_config.with_sparse_read =
392 requirements.contains(SPARSEREVLOG_REQUIREMENT);
392 requirements.contains(SPARSEREVLOG_REQUIREMENT);
393
393
394 Ok(data_config)
394 Ok(data_config)
395 }
395 }
396 }
396 }
397
397
398 impl Default for RevlogDataConfig {
398 impl Default for RevlogDataConfig {
399 fn default() -> Self {
399 fn default() -> Self {
400 Self {
400 Self {
401 chunk_cache_size: 65536,
401 chunk_cache_size: 65536,
402 sr_density_threshold: 0.50,
402 sr_density_threshold: 0.50,
403 sr_min_gap_size: 262144,
403 sr_min_gap_size: 262144,
404 try_pending: Default::default(),
404 try_pending: Default::default(),
405 try_split: Default::default(),
405 try_split: Default::default(),
406 check_ambig: Default::default(),
406 check_ambig: Default::default(),
407 mmap_large_index: Default::default(),
407 mmap_large_index: Default::default(),
408 mmap_index_threshold: Default::default(),
408 mmap_index_threshold: Default::default(),
409 uncompressed_cache_factor: Default::default(),
409 uncompressed_cache_factor: Default::default(),
410 uncompressed_cache_count: Default::default(),
410 uncompressed_cache_count: Default::default(),
411 with_sparse_read: Default::default(),
411 with_sparse_read: Default::default(),
412 general_delta: Default::default(),
412 general_delta: Default::default(),
413 }
413 }
414 }
414 }
415 }
415 }
416
416
417 #[derive(Debug, Clone, Copy, PartialEq)]
417 #[derive(Debug, Clone, Copy, PartialEq)]
418 /// Holds configuration values about how new deltas are computed.
418 /// Holds configuration values about how new deltas are computed.
419 ///
419 ///
420 /// Some attributes are duplicated from [`RevlogDataConfig`] to help having
420 /// Some attributes are duplicated from [`RevlogDataConfig`] to help having
421 /// each object self contained.
421 /// each object self contained.
422 pub struct RevlogDeltaConfig {
422 pub struct RevlogDeltaConfig {
423 /// Whether deltas can be encoded against arbitrary bases
423 /// Whether deltas can be encoded against arbitrary bases
424 pub general_delta: bool,
424 pub general_delta: bool,
425 /// Allow sparse writing of the revlog data
425 /// Allow sparse writing of the revlog data
426 pub sparse_revlog: bool,
426 pub sparse_revlog: bool,
427 /// Maximum length of a delta chain
427 /// Maximum length of a delta chain
428 pub max_chain_len: Option<u64>,
428 pub max_chain_len: Option<u64>,
429 /// Maximum distance between a delta chain's start and end
429 /// Maximum distance between a delta chain's start and end
430 pub max_deltachain_span: Option<u64>,
430 pub max_deltachain_span: Option<u64>,
431 /// If `upper_bound_comp` is not None, this is the expected maximal
431 /// If `upper_bound_comp` is not None, this is the expected maximal
432 /// gain from compression for the data content
432 /// gain from compression for the data content
433 pub upper_bound_comp: Option<f64>,
433 pub upper_bound_comp: Option<f64>,
434 /// Should we try a delta against both parents
434 /// Should we try a delta against both parents
435 pub delta_both_parents: bool,
435 pub delta_both_parents: bool,
436 /// Test delta base candidate groups by chunks of this maximal size
436 /// Test delta base candidate groups by chunks of this maximal size
437 pub candidate_group_chunk_size: u64,
437 pub candidate_group_chunk_size: u64,
438 /// Should we display debug information about delta computation
438 /// Should we display debug information about delta computation
439 pub debug_delta: bool,
439 pub debug_delta: bool,
440 /// Trust incoming deltas by default
440 /// Trust incoming deltas by default
441 pub lazy_delta: bool,
441 pub lazy_delta: bool,
442 /// Trust the base of incoming deltas by default
442 /// Trust the base of incoming deltas by default
443 pub lazy_delta_base: bool,
443 pub lazy_delta_base: bool,
444 }
444 }
445 impl RevlogDeltaConfig {
445 impl RevlogDeltaConfig {
446 pub fn new(
446 pub fn new(
447 config: &Config,
447 config: &Config,
448 requirements: &HashSet<String>,
448 requirements: &HashSet<String>,
449 revlog_type: RevlogType,
449 revlog_type: RevlogType,
450 ) -> Result<Self, HgError> {
450 ) -> Result<Self, HgError> {
451 let mut delta_config = Self {
451 let mut delta_config = Self {
452 delta_both_parents: config
452 delta_both_parents: config
453 .get_option_no_default(
453 .get_option_no_default(
454 b"storage",
454 b"storage",
455 b"revlog.optimize-delta-parent-choice",
455 b"revlog.optimize-delta-parent-choice",
456 )?
456 )?
457 .unwrap_or(true),
457 .unwrap_or(true),
458 candidate_group_chunk_size: config
458 candidate_group_chunk_size: config
459 .get_u64(
459 .get_u64(
460 b"storage",
460 b"storage",
461 b"revlog.delta-parent-search.candidate-group-chunk-size",
461 b"revlog.delta-parent-search.candidate-group-chunk-size",
462 )?
462 )?
463 .unwrap_or_default(),
463 .unwrap_or_default(),
464 ..Default::default()
464 ..Default::default()
465 };
465 };
466
466
467 delta_config.debug_delta =
467 delta_config.debug_delta =
468 config.get_bool(b"debug", b"revlog.debug-delta")?;
468 config.get_bool(b"debug", b"revlog.debug-delta")?;
469
469
470 delta_config.general_delta =
470 delta_config.general_delta =
471 requirements.contains(GENERALDELTA_REQUIREMENT);
471 requirements.contains(GENERALDELTA_REQUIREMENT);
472
472
473 let lazy_delta =
473 let lazy_delta =
474 config.get_bool(b"storage", b"revlog.reuse-external-delta")?;
474 config.get_bool(b"storage", b"revlog.reuse-external-delta")?;
475
475
476 if revlog_type == RevlogType::Manifestlog {
476 if revlog_type == RevlogType::Manifestlog {
477 // upper bound of what we expect from compression
477 // upper bound of what we expect from compression
478 // (real life value seems to be 3)
478 // (real life value seems to be 3)
479 delta_config.upper_bound_comp = Some(3.0)
479 delta_config.upper_bound_comp = Some(3.0)
480 }
480 }
481
481
482 let mut lazy_delta_base = false;
482 let mut lazy_delta_base = false;
483 if lazy_delta {
483 if lazy_delta {
484 lazy_delta_base = match config.get_option_no_default(
484 lazy_delta_base = match config.get_option_no_default(
485 b"storage",
485 b"storage",
486 b"revlog.reuse-external-delta-parent",
486 b"revlog.reuse-external-delta-parent",
487 )? {
487 )? {
488 Some(base) => base,
488 Some(base) => base,
489 None => config.get_bool(b"format", b"generaldelta")?,
489 None => config.get_bool(b"format", b"generaldelta")?,
490 };
490 };
491 }
491 }
492 delta_config.lazy_delta = lazy_delta;
492 delta_config.lazy_delta = lazy_delta;
493 delta_config.lazy_delta_base = lazy_delta_base;
493 delta_config.lazy_delta_base = lazy_delta_base;
494
494
495 delta_config.max_deltachain_span =
495 delta_config.max_deltachain_span =
496 match config.get_i64(b"experimental", b"maxdeltachainspan")? {
496 match config.get_i64(b"experimental", b"maxdeltachainspan")? {
497 Some(span) => {
497 Some(span) => {
498 if span < 0 {
498 if span < 0 {
499 None
499 None
500 } else {
500 } else {
501 Some(span as u64)
501 Some(span as u64)
502 }
502 }
503 }
503 }
504 None => None,
504 None => None,
505 };
505 };
506
506
507 delta_config.sparse_revlog =
507 delta_config.sparse_revlog =
508 requirements.contains(SPARSEREVLOG_REQUIREMENT);
508 requirements.contains(SPARSEREVLOG_REQUIREMENT);
509
509
510 delta_config.max_chain_len =
510 delta_config.max_chain_len =
511 config.get_byte_size_no_default(b"format", b"maxchainlen")?;
511 config.get_byte_size_no_default(b"format", b"maxchainlen")?;
512
512
513 Ok(delta_config)
513 Ok(delta_config)
514 }
514 }
515 }
515 }
516
516
517 impl Default for RevlogDeltaConfig {
517 impl Default for RevlogDeltaConfig {
518 fn default() -> Self {
518 fn default() -> Self {
519 Self {
519 Self {
520 delta_both_parents: true,
520 delta_both_parents: true,
521 lazy_delta: true,
521 lazy_delta: true,
522 general_delta: Default::default(),
522 general_delta: Default::default(),
523 sparse_revlog: Default::default(),
523 sparse_revlog: Default::default(),
524 max_chain_len: Default::default(),
524 max_chain_len: Default::default(),
525 max_deltachain_span: Default::default(),
525 max_deltachain_span: Default::default(),
526 upper_bound_comp: Default::default(),
526 upper_bound_comp: Default::default(),
527 candidate_group_chunk_size: Default::default(),
527 candidate_group_chunk_size: Default::default(),
528 debug_delta: Default::default(),
528 debug_delta: Default::default(),
529 lazy_delta_base: Default::default(),
529 lazy_delta_base: Default::default(),
530 }
530 }
531 }
531 }
532 }
532 }
533
533
534 #[derive(Debug, Default, Clone, Copy, PartialEq)]
534 #[derive(Debug, Default, Clone, Copy, PartialEq)]
535 /// Holds configuration values about the available revlog features
535 /// Holds configuration values about the available revlog features
536 pub struct RevlogFeatureConfig {
536 pub struct RevlogFeatureConfig {
537 /// The compression engine and its options
537 /// The compression engine and its options
538 pub compression_engine: CompressionEngine,
538 pub compression_engine: CompressionEngine,
539 /// Can we use censor on this revlog
539 /// Can we use censor on this revlog
540 pub censorable: bool,
540 pub censorable: bool,
541 /// Does this revlog use the "side data" feature
541 /// Does this revlog use the "side data" feature
542 pub has_side_data: bool,
542 pub has_side_data: bool,
543 /// Might remove this configuration once the rank computation has no
543 /// Might remove this configuration once the rank computation has no
544 /// impact
544 /// impact
545 pub compute_rank: bool,
545 pub compute_rank: bool,
546 /// Parent order is supposed to be semantically irrelevant, so we
546 /// Parent order is supposed to be semantically irrelevant, so we
547 /// normally re-sort parents to ensure that the first parent is non-null,
547 /// normally re-sort parents to ensure that the first parent is non-null,
548 /// if there is a non-null parent at all.
548 /// if there is a non-null parent at all.
549 /// filelog abuses the parent order as a flag to mark some instances of
549 /// filelog abuses the parent order as a flag to mark some instances of
550 /// meta-encoded files, so allow it to disable this behavior.
550 /// meta-encoded files, so allow it to disable this behavior.
551 pub canonical_parent_order: bool,
551 pub canonical_parent_order: bool,
552 /// Can ellipsis commit be used
552 /// Can ellipsis commit be used
553 pub enable_ellipsis: bool,
553 pub enable_ellipsis: bool,
554 }
554 }
555 impl RevlogFeatureConfig {
555 impl RevlogFeatureConfig {
556 pub fn new(
556 pub fn new(
557 config: &Config,
557 config: &Config,
558 requirements: &HashSet<String>,
558 requirements: &HashSet<String>,
559 ) -> Result<Self, HgError> {
559 ) -> Result<Self, HgError> {
560 let mut feature_config = Self::default();
560 let mut feature_config = Self::default();
561
561
562 let zlib_level = config.get_u32(b"storage", b"revlog.zlib.level")?;
562 let zlib_level = config.get_u32(b"storage", b"revlog.zlib.level")?;
563 let zstd_level = config.get_u32(b"storage", b"revlog.zstd.level")?;
563 let zstd_level = config.get_u32(b"storage", b"revlog.zstd.level")?;
564
564
565 feature_config.compression_engine = CompressionEngine::default();
565 feature_config.compression_engine = CompressionEngine::default();
566
566
567 for requirement in requirements {
567 for requirement in requirements {
568 if requirement.starts_with("revlog-compression-")
568 if requirement.starts_with("revlog-compression-")
569 || requirement.starts_with("exp-compression-")
569 || requirement.starts_with("exp-compression-")
570 {
570 {
571 let split = &mut requirement.splitn(3, '-');
571 let split = &mut requirement.splitn(3, '-');
572 split.next();
572 split.next();
573 split.next();
573 split.next();
574 feature_config.compression_engine = match split.next().unwrap()
574 feature_config.compression_engine = match split.next().unwrap()
575 {
575 {
576 "zstd" => CompressionEngine::zstd(zstd_level)?,
576 "zstd" => CompressionEngine::zstd(zstd_level)?,
577 e => {
577 e => {
578 return Err(HgError::UnsupportedFeature(format!(
578 return Err(HgError::UnsupportedFeature(format!(
579 "Unsupported compression engine '{e}'"
579 "Unsupported compression engine '{e}'"
580 )))
580 )))
581 }
581 }
582 };
582 };
583 }
583 }
584 }
584 }
585 if let Some(level) = zlib_level {
585 if let Some(level) = zlib_level {
586 if matches!(
586 if matches!(
587 feature_config.compression_engine,
587 feature_config.compression_engine,
588 CompressionEngine::Zlib { .. }
588 CompressionEngine::Zlib { .. }
589 ) {
589 ) {
590 feature_config
590 feature_config
591 .compression_engine
591 .compression_engine
592 .set_level(level as usize)?;
592 .set_level(level as usize)?;
593 }
593 }
594 }
594 }
595
595
596 feature_config.enable_ellipsis =
596 feature_config.enable_ellipsis =
597 requirements.contains(NARROW_REQUIREMENT);
597 requirements.contains(NARROW_REQUIREMENT);
598
598
599 Ok(feature_config)
599 Ok(feature_config)
600 }
600 }
601 }
601 }
602
602
603 /// Read only implementation of revlog.
603 /// Read only implementation of revlog.
604 pub struct Revlog {
604 pub struct Revlog {
605 /// When index and data are not interleaved: bytes of the revlog index.
605 /// When index and data are not interleaved: bytes of the revlog index.
606 /// When index and data are interleaved: bytes of the revlog index and
606 /// When index and data are interleaved: bytes of the revlog index and
607 /// data.
607 /// data.
608 index: Index,
608 index: Index,
609 /// When index and data are not interleaved: bytes of the revlog data
609 /// When index and data are not interleaved: bytes of the revlog data
610 data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>>,
610 data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>>,
611 /// When present on disk: the persistent nodemap for this revlog
611 /// When present on disk: the persistent nodemap for this revlog
612 nodemap: Option<nodemap::NodeTree>,
612 nodemap: Option<nodemap::NodeTree>,
613 }
613 }
614
614
615 impl Graph for Revlog {
615 impl Graph for Revlog {
616 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
616 fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> {
617 self.index.parents(rev)
617 self.index.parents(rev)
618 }
618 }
619 }
619 }
620
620
621 #[derive(Debug, Copy, Clone, PartialEq)]
621 #[derive(Debug, Copy, Clone, PartialEq)]
622 pub enum RevlogVersionOptions {
622 pub enum RevlogVersionOptions {
623 V0,
623 V0,
624 V1 { general_delta: bool, inline: bool },
624 V1 { general_delta: bool, inline: bool },
625 V2,
625 V2,
626 ChangelogV2 { compute_rank: bool },
626 ChangelogV2 { compute_rank: bool },
627 }
627 }
628
628
629 /// Options to govern how a revlog should be opened, usually from the
629 /// Options to govern how a revlog should be opened, usually from the
630 /// repository configuration or requirements.
630 /// repository configuration or requirements.
631 #[derive(Debug, Copy, Clone)]
631 #[derive(Debug, Copy, Clone)]
632 pub struct RevlogOpenOptions {
632 pub struct RevlogOpenOptions {
633 /// The revlog version, along with any option specific to this version
633 /// The revlog version, along with any option specific to this version
634 pub version: RevlogVersionOptions,
634 pub version: RevlogVersionOptions,
635 /// Whether the revlog uses a persistent nodemap.
635 /// Whether the revlog uses a persistent nodemap.
636 pub use_nodemap: bool,
636 pub use_nodemap: bool,
637 pub delta_config: RevlogDeltaConfig,
637 pub delta_config: RevlogDeltaConfig,
638 pub data_config: RevlogDataConfig,
638 pub data_config: RevlogDataConfig,
639 pub feature_config: RevlogFeatureConfig,
639 pub feature_config: RevlogFeatureConfig,
640 }
640 }
641
641
642 #[cfg(test)]
642 #[cfg(test)]
643 impl Default for RevlogOpenOptions {
643 impl Default for RevlogOpenOptions {
644 fn default() -> Self {
644 fn default() -> Self {
645 Self {
645 Self {
646 version: RevlogVersionOptions::V1 {
646 version: RevlogVersionOptions::V1 {
647 general_delta: true,
647 general_delta: true,
648 inline: false,
648 inline: false,
649 },
649 },
650 use_nodemap: true,
650 use_nodemap: true,
651 data_config: Default::default(),
651 data_config: Default::default(),
652 delta_config: Default::default(),
652 delta_config: Default::default(),
653 feature_config: Default::default(),
653 feature_config: Default::default(),
654 }
654 }
655 }
655 }
656 }
656 }
657
657
658 impl RevlogOpenOptions {
658 impl RevlogOpenOptions {
659 pub fn new(
659 pub fn new(
660 inline: bool,
660 inline: bool,
661 data_config: RevlogDataConfig,
661 data_config: RevlogDataConfig,
662 delta_config: RevlogDeltaConfig,
662 delta_config: RevlogDeltaConfig,
663 feature_config: RevlogFeatureConfig,
663 feature_config: RevlogFeatureConfig,
664 ) -> Self {
664 ) -> Self {
665 Self {
665 Self {
666 version: RevlogVersionOptions::V1 {
666 version: RevlogVersionOptions::V1 {
667 general_delta: data_config.general_delta,
667 general_delta: data_config.general_delta,
668 inline,
668 inline,
669 },
669 },
670 use_nodemap: false,
670 use_nodemap: false,
671 data_config,
671 data_config,
672 delta_config,
672 delta_config,
673 feature_config,
673 feature_config,
674 }
674 }
675 }
675 }
676
676
677 pub fn index_header(&self) -> index::IndexHeader {
677 pub fn index_header(&self) -> index::IndexHeader {
678 index::IndexHeader {
678 index::IndexHeader {
679 header_bytes: match self.version {
679 header_bytes: match self.version {
680 RevlogVersionOptions::V0 => [0, 0, 0, 0],
680 RevlogVersionOptions::V0 => [0, 0, 0, 0],
681 RevlogVersionOptions::V1 {
681 RevlogVersionOptions::V1 {
682 general_delta,
682 general_delta,
683 inline,
683 inline,
684 } => [
684 } => [
685 0,
685 0,
686 if general_delta && inline {
686 if general_delta && inline {
687 3
687 3
688 } else if general_delta {
688 } else if general_delta {
689 2
689 2
690 } else {
690 } else {
691 u8::from(inline)
691 u8::from(inline)
692 },
692 },
693 0,
693 0,
694 1,
694 1,
695 ],
695 ],
696 RevlogVersionOptions::V2 => 0xDEADu32.to_be_bytes(),
696 RevlogVersionOptions::V2 => 0xDEADu32.to_be_bytes(),
697 RevlogVersionOptions::ChangelogV2 { compute_rank: _ } => {
697 RevlogVersionOptions::ChangelogV2 { compute_rank: _ } => {
698 0xD34Du32.to_be_bytes()
698 0xD34Du32.to_be_bytes()
699 }
699 }
700 },
700 },
701 }
701 }
702 }
702 }
703 }
703 }
704
704
705 impl Revlog {
705 impl Revlog {
706 /// Open a revlog index file.
706 /// Open a revlog index file.
707 ///
707 ///
708 /// It will also open the associated data file if index and data are not
708 /// It will also open the associated data file if index and data are not
709 /// interleaved.
709 /// interleaved.
710 pub fn open(
710 pub fn open(
711 store_vfs: &Vfs,
711 // Todo use the `Vfs` trait here once we create a function for mmap
712 store_vfs: &VfsImpl,
712 index_path: impl AsRef<Path>,
713 index_path: impl AsRef<Path>,
713 data_path: Option<&Path>,
714 data_path: Option<&Path>,
714 options: RevlogOpenOptions,
715 options: RevlogOpenOptions,
715 ) -> Result<Self, HgError> {
716 ) -> Result<Self, HgError> {
716 Self::open_gen(store_vfs, index_path, data_path, options, None)
717 Self::open_gen(store_vfs, index_path, data_path, options, None)
717 }
718 }
718
719
719 fn open_gen(
720 fn open_gen(
720 store_vfs: &Vfs,
721 // Todo use the `Vfs` trait here once we create a function for mmap
722 store_vfs: &VfsImpl,
721 index_path: impl AsRef<Path>,
723 index_path: impl AsRef<Path>,
722 data_path: Option<&Path>,
724 data_path: Option<&Path>,
723 options: RevlogOpenOptions,
725 options: RevlogOpenOptions,
724 nodemap_for_test: Option<nodemap::NodeTree>,
726 nodemap_for_test: Option<nodemap::NodeTree>,
725 ) -> Result<Self, HgError> {
727 ) -> Result<Self, HgError> {
726 let index_path = index_path.as_ref();
728 let index_path = index_path.as_ref();
727 let index = {
729 let index = {
728 match store_vfs.mmap_open_opt(index_path)? {
730 match store_vfs.mmap_open_opt(index_path)? {
729 None => Index::new(
731 None => Index::new(
730 Box::<Vec<_>>::default(),
732 Box::<Vec<_>>::default(),
731 options.index_header(),
733 options.index_header(),
732 ),
734 ),
733 Some(index_mmap) => {
735 Some(index_mmap) => {
734 let index = Index::new(
736 let index = Index::new(
735 Box::new(index_mmap),
737 Box::new(index_mmap),
736 options.index_header(),
738 options.index_header(),
737 )?;
739 )?;
738 Ok(index)
740 Ok(index)
739 }
741 }
740 }
742 }
741 }?;
743 }?;
742
744
743 let default_data_path = index_path.with_extension("d");
745 let default_data_path = index_path.with_extension("d");
744
746
745 // type annotation required
747 // type annotation required
746 // won't recognize Mmap as Deref<Target = [u8]>
748 // won't recognize Mmap as Deref<Target = [u8]>
747 let data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>> =
749 let data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>> =
748 if index.is_inline() {
750 if index.is_inline() {
749 None
751 None
750 } else if index.is_empty() {
752 } else if index.is_empty() {
751 // No need to even try to open the data file then.
753 // No need to even try to open the data file then.
752 Some(Box::new(&[][..]))
754 Some(Box::new(&[][..]))
753 } else {
755 } else {
754 let data_path = data_path.unwrap_or(&default_data_path);
756 let data_path = data_path.unwrap_or(&default_data_path);
755 let data_mmap = store_vfs.mmap_open(data_path)?;
757 let data_mmap = store_vfs.mmap_open(data_path)?;
756 Some(Box::new(data_mmap))
758 Some(Box::new(data_mmap))
757 };
759 };
758
760
759 let nodemap = if index.is_inline() || !options.use_nodemap {
761 let nodemap = if index.is_inline() || !options.use_nodemap {
760 None
762 None
761 } else {
763 } else {
762 NodeMapDocket::read_from_file(store_vfs, index_path)?.map(
764 NodeMapDocket::read_from_file(store_vfs, index_path)?.map(
763 |(docket, data)| {
765 |(docket, data)| {
764 nodemap::NodeTree::load_bytes(
766 nodemap::NodeTree::load_bytes(
765 Box::new(data),
767 Box::new(data),
766 docket.data_length,
768 docket.data_length,
767 )
769 )
768 },
770 },
769 )
771 )
770 };
772 };
771
773
772 let nodemap = nodemap_for_test.or(nodemap);
774 let nodemap = nodemap_for_test.or(nodemap);
773
775
774 Ok(Revlog {
776 Ok(Revlog {
775 index,
777 index,
776 data_bytes,
778 data_bytes,
777 nodemap,
779 nodemap,
778 })
780 })
779 }
781 }
780
782
781 /// Return number of entries of the `Revlog`.
783 /// Return number of entries of the `Revlog`.
782 pub fn len(&self) -> usize {
784 pub fn len(&self) -> usize {
783 self.index.len()
785 self.index.len()
784 }
786 }
785
787
786 /// Returns `true` if the `Revlog` has zero `entries`.
788 /// Returns `true` if the `Revlog` has zero `entries`.
787 pub fn is_empty(&self) -> bool {
789 pub fn is_empty(&self) -> bool {
788 self.index.is_empty()
790 self.index.is_empty()
789 }
791 }
790
792
791 /// Returns the node ID for the given revision number, if it exists in this
793 /// Returns the node ID for the given revision number, if it exists in this
792 /// revlog
794 /// revlog
793 pub fn node_from_rev(&self, rev: UncheckedRevision) -> Option<&Node> {
795 pub fn node_from_rev(&self, rev: UncheckedRevision) -> Option<&Node> {
794 if rev == NULL_REVISION.into() {
796 if rev == NULL_REVISION.into() {
795 return Some(&NULL_NODE);
797 return Some(&NULL_NODE);
796 }
798 }
797 let rev = self.index.check_revision(rev)?;
799 let rev = self.index.check_revision(rev)?;
798 Some(self.index.get_entry(rev)?.hash())
800 Some(self.index.get_entry(rev)?.hash())
799 }
801 }
800
802
801 /// Return the revision number for the given node ID, if it exists in this
803 /// Return the revision number for the given node ID, if it exists in this
802 /// revlog
804 /// revlog
803 pub fn rev_from_node(
805 pub fn rev_from_node(
804 &self,
806 &self,
805 node: NodePrefix,
807 node: NodePrefix,
806 ) -> Result<Revision, RevlogError> {
808 ) -> Result<Revision, RevlogError> {
807 if let Some(nodemap) = &self.nodemap {
809 if let Some(nodemap) = &self.nodemap {
808 nodemap
810 nodemap
809 .find_bin(&self.index, node)?
811 .find_bin(&self.index, node)?
810 .ok_or(RevlogError::InvalidRevision)
812 .ok_or(RevlogError::InvalidRevision)
811 } else {
813 } else {
812 self.rev_from_node_no_persistent_nodemap(node)
814 self.rev_from_node_no_persistent_nodemap(node)
813 }
815 }
814 }
816 }
815
817
816 /// Same as `rev_from_node`, without using a persistent nodemap
818 /// Same as `rev_from_node`, without using a persistent nodemap
817 ///
819 ///
818 /// This is used as fallback when a persistent nodemap is not present.
820 /// This is used as fallback when a persistent nodemap is not present.
819 /// This happens when the persistent-nodemap experimental feature is not
821 /// This happens when the persistent-nodemap experimental feature is not
820 /// enabled, or for small revlogs.
822 /// enabled, or for small revlogs.
821 fn rev_from_node_no_persistent_nodemap(
823 fn rev_from_node_no_persistent_nodemap(
822 &self,
824 &self,
823 node: NodePrefix,
825 node: NodePrefix,
824 ) -> Result<Revision, RevlogError> {
826 ) -> Result<Revision, RevlogError> {
825 // Linear scan of the revlog
827 // Linear scan of the revlog
826 // TODO: consider building a non-persistent nodemap in memory to
828 // TODO: consider building a non-persistent nodemap in memory to
827 // optimize these cases.
829 // optimize these cases.
828 let mut found_by_prefix = None;
830 let mut found_by_prefix = None;
829 for rev in (-1..self.len() as BaseRevision).rev() {
831 for rev in (-1..self.len() as BaseRevision).rev() {
830 let rev = Revision(rev as BaseRevision);
832 let rev = Revision(rev as BaseRevision);
831 let candidate_node = if rev == Revision(-1) {
833 let candidate_node = if rev == Revision(-1) {
832 NULL_NODE
834 NULL_NODE
833 } else {
835 } else {
834 let index_entry =
836 let index_entry =
835 self.index.get_entry(rev).ok_or_else(|| {
837 self.index.get_entry(rev).ok_or_else(|| {
836 HgError::corrupted(
838 HgError::corrupted(
837 "revlog references a revision not in the index",
839 "revlog references a revision not in the index",
838 )
840 )
839 })?;
841 })?;
840 *index_entry.hash()
842 *index_entry.hash()
841 };
843 };
842 if node == candidate_node {
844 if node == candidate_node {
843 return Ok(rev);
845 return Ok(rev);
844 }
846 }
845 if node.is_prefix_of(&candidate_node) {
847 if node.is_prefix_of(&candidate_node) {
846 if found_by_prefix.is_some() {
848 if found_by_prefix.is_some() {
847 return Err(RevlogError::AmbiguousPrefix);
849 return Err(RevlogError::AmbiguousPrefix);
848 }
850 }
849 found_by_prefix = Some(rev)
851 found_by_prefix = Some(rev)
850 }
852 }
851 }
853 }
852 found_by_prefix.ok_or(RevlogError::InvalidRevision)
854 found_by_prefix.ok_or(RevlogError::InvalidRevision)
853 }
855 }
854
856
855 /// Returns whether the given revision exists in this revlog.
857 /// Returns whether the given revision exists in this revlog.
856 pub fn has_rev(&self, rev: UncheckedRevision) -> bool {
858 pub fn has_rev(&self, rev: UncheckedRevision) -> bool {
857 self.index.check_revision(rev).is_some()
859 self.index.check_revision(rev).is_some()
858 }
860 }
859
861
860 /// Return the full data associated to a revision.
862 /// Return the full data associated to a revision.
861 ///
863 ///
862 /// All entries required to build the final data out of deltas will be
864 /// All entries required to build the final data out of deltas will be
863 /// retrieved as needed, and the deltas will be applied to the inital
865 /// retrieved as needed, and the deltas will be applied to the inital
864 /// snapshot to rebuild the final data.
866 /// snapshot to rebuild the final data.
865 pub fn get_rev_data(
867 pub fn get_rev_data(
866 &self,
868 &self,
867 rev: UncheckedRevision,
869 rev: UncheckedRevision,
868 ) -> Result<Cow<[u8]>, RevlogError> {
870 ) -> Result<Cow<[u8]>, RevlogError> {
869 if rev == NULL_REVISION.into() {
871 if rev == NULL_REVISION.into() {
870 return Ok(Cow::Borrowed(&[]));
872 return Ok(Cow::Borrowed(&[]));
871 };
873 };
872 self.get_entry(rev)?.data()
874 self.get_entry(rev)?.data()
873 }
875 }
874
876
875 /// [`Self::get_rev_data`] for checked revisions.
877 /// [`Self::get_rev_data`] for checked revisions.
876 pub fn get_rev_data_for_checked_rev(
878 pub fn get_rev_data_for_checked_rev(
877 &self,
879 &self,
878 rev: Revision,
880 rev: Revision,
879 ) -> Result<Cow<[u8]>, RevlogError> {
881 ) -> Result<Cow<[u8]>, RevlogError> {
880 if rev == NULL_REVISION {
882 if rev == NULL_REVISION {
881 return Ok(Cow::Borrowed(&[]));
883 return Ok(Cow::Borrowed(&[]));
882 };
884 };
883 self.get_entry_for_checked_rev(rev)?.data()
885 self.get_entry_for_checked_rev(rev)?.data()
884 }
886 }
885
887
886 /// Check the hash of some given data against the recorded hash.
888 /// Check the hash of some given data against the recorded hash.
887 pub fn check_hash(
889 pub fn check_hash(
888 &self,
890 &self,
889 p1: Revision,
891 p1: Revision,
890 p2: Revision,
892 p2: Revision,
891 expected: &[u8],
893 expected: &[u8],
892 data: &[u8],
894 data: &[u8],
893 ) -> bool {
895 ) -> bool {
894 let e1 = self.index.get_entry(p1);
896 let e1 = self.index.get_entry(p1);
895 let h1 = match e1 {
897 let h1 = match e1 {
896 Some(ref entry) => entry.hash(),
898 Some(ref entry) => entry.hash(),
897 None => &NULL_NODE,
899 None => &NULL_NODE,
898 };
900 };
899 let e2 = self.index.get_entry(p2);
901 let e2 = self.index.get_entry(p2);
900 let h2 = match e2 {
902 let h2 = match e2 {
901 Some(ref entry) => entry.hash(),
903 Some(ref entry) => entry.hash(),
902 None => &NULL_NODE,
904 None => &NULL_NODE,
903 };
905 };
904
906
905 hash(data, h1.as_bytes(), h2.as_bytes()) == expected
907 hash(data, h1.as_bytes(), h2.as_bytes()) == expected
906 }
908 }
907
909
908 /// Build the full data of a revision out its snapshot
910 /// Build the full data of a revision out its snapshot
909 /// and its deltas.
911 /// and its deltas.
910 fn build_data_from_deltas(
912 fn build_data_from_deltas(
911 snapshot: RevlogEntry,
913 snapshot: RevlogEntry,
912 deltas: &[RevlogEntry],
914 deltas: &[RevlogEntry],
913 ) -> Result<Vec<u8>, HgError> {
915 ) -> Result<Vec<u8>, HgError> {
914 let snapshot = snapshot.data_chunk()?;
916 let snapshot = snapshot.data_chunk()?;
915 let deltas = deltas
917 let deltas = deltas
916 .iter()
918 .iter()
917 .rev()
919 .rev()
918 .map(RevlogEntry::data_chunk)
920 .map(RevlogEntry::data_chunk)
919 .collect::<Result<Vec<_>, _>>()?;
921 .collect::<Result<Vec<_>, _>>()?;
920 let patches: Vec<_> =
922 let patches: Vec<_> =
921 deltas.iter().map(|d| patch::PatchList::new(d)).collect();
923 deltas.iter().map(|d| patch::PatchList::new(d)).collect();
922 let patch = patch::fold_patch_lists(&patches);
924 let patch = patch::fold_patch_lists(&patches);
923 Ok(patch.apply(&snapshot))
925 Ok(patch.apply(&snapshot))
924 }
926 }
925
927
926 /// Return the revlog data.
928 /// Return the revlog data.
927 fn data(&self) -> &[u8] {
929 fn data(&self) -> &[u8] {
928 match &self.data_bytes {
930 match &self.data_bytes {
929 Some(data_bytes) => data_bytes,
931 Some(data_bytes) => data_bytes,
930 None => panic!(
932 None => panic!(
931 "forgot to load the data or trying to access inline data"
933 "forgot to load the data or trying to access inline data"
932 ),
934 ),
933 }
935 }
934 }
936 }
935
937
936 pub fn make_null_entry(&self) -> RevlogEntry {
938 pub fn make_null_entry(&self) -> RevlogEntry {
937 RevlogEntry {
939 RevlogEntry {
938 revlog: self,
940 revlog: self,
939 rev: NULL_REVISION,
941 rev: NULL_REVISION,
940 bytes: b"",
942 bytes: b"",
941 compressed_len: 0,
943 compressed_len: 0,
942 uncompressed_len: 0,
944 uncompressed_len: 0,
943 base_rev_or_base_of_delta_chain: None,
945 base_rev_or_base_of_delta_chain: None,
944 p1: NULL_REVISION,
946 p1: NULL_REVISION,
945 p2: NULL_REVISION,
947 p2: NULL_REVISION,
946 flags: NULL_REVLOG_ENTRY_FLAGS,
948 flags: NULL_REVLOG_ENTRY_FLAGS,
947 hash: NULL_NODE,
949 hash: NULL_NODE,
948 }
950 }
949 }
951 }
950
952
951 fn get_entry_for_checked_rev(
953 fn get_entry_for_checked_rev(
952 &self,
954 &self,
953 rev: Revision,
955 rev: Revision,
954 ) -> Result<RevlogEntry, RevlogError> {
956 ) -> Result<RevlogEntry, RevlogError> {
955 if rev == NULL_REVISION {
957 if rev == NULL_REVISION {
956 return Ok(self.make_null_entry());
958 return Ok(self.make_null_entry());
957 }
959 }
958 let index_entry = self
960 let index_entry = self
959 .index
961 .index
960 .get_entry(rev)
962 .get_entry(rev)
961 .ok_or(RevlogError::InvalidRevision)?;
963 .ok_or(RevlogError::InvalidRevision)?;
962 let offset = index_entry.offset();
964 let offset = index_entry.offset();
963 let start = if self.index.is_inline() {
965 let start = if self.index.is_inline() {
964 offset + ((rev.0 as usize + 1) * INDEX_ENTRY_SIZE)
966 offset + ((rev.0 as usize + 1) * INDEX_ENTRY_SIZE)
965 } else {
967 } else {
966 offset
968 offset
967 };
969 };
968 let end = start + index_entry.compressed_len() as usize;
970 let end = start + index_entry.compressed_len() as usize;
969 let data = if self.index.is_inline() {
971 let data = if self.index.is_inline() {
970 self.index.data(start, end)
972 self.index.data(start, end)
971 } else {
973 } else {
972 &self.data()[start..end]
974 &self.data()[start..end]
973 };
975 };
974 let base_rev = self
976 let base_rev = self
975 .index
977 .index
976 .check_revision(index_entry.base_revision_or_base_of_delta_chain())
978 .check_revision(index_entry.base_revision_or_base_of_delta_chain())
977 .ok_or_else(|| {
979 .ok_or_else(|| {
978 RevlogError::corrupted(format!(
980 RevlogError::corrupted(format!(
979 "base revision for rev {} is invalid",
981 "base revision for rev {} is invalid",
980 rev
982 rev
981 ))
983 ))
982 })?;
984 })?;
983 let p1 =
985 let p1 =
984 self.index.check_revision(index_entry.p1()).ok_or_else(|| {
986 self.index.check_revision(index_entry.p1()).ok_or_else(|| {
985 RevlogError::corrupted(format!(
987 RevlogError::corrupted(format!(
986 "p1 for rev {} is invalid",
988 "p1 for rev {} is invalid",
987 rev
989 rev
988 ))
990 ))
989 })?;
991 })?;
990 let p2 =
992 let p2 =
991 self.index.check_revision(index_entry.p2()).ok_or_else(|| {
993 self.index.check_revision(index_entry.p2()).ok_or_else(|| {
992 RevlogError::corrupted(format!(
994 RevlogError::corrupted(format!(
993 "p2 for rev {} is invalid",
995 "p2 for rev {} is invalid",
994 rev
996 rev
995 ))
997 ))
996 })?;
998 })?;
997 let entry = RevlogEntry {
999 let entry = RevlogEntry {
998 revlog: self,
1000 revlog: self,
999 rev,
1001 rev,
1000 bytes: data,
1002 bytes: data,
1001 compressed_len: index_entry.compressed_len(),
1003 compressed_len: index_entry.compressed_len(),
1002 uncompressed_len: index_entry.uncompressed_len(),
1004 uncompressed_len: index_entry.uncompressed_len(),
1003 base_rev_or_base_of_delta_chain: if base_rev == rev {
1005 base_rev_or_base_of_delta_chain: if base_rev == rev {
1004 None
1006 None
1005 } else {
1007 } else {
1006 Some(base_rev)
1008 Some(base_rev)
1007 },
1009 },
1008 p1,
1010 p1,
1009 p2,
1011 p2,
1010 flags: index_entry.flags(),
1012 flags: index_entry.flags(),
1011 hash: *index_entry.hash(),
1013 hash: *index_entry.hash(),
1012 };
1014 };
1013 Ok(entry)
1015 Ok(entry)
1014 }
1016 }
1015
1017
1016 /// Get an entry of the revlog.
1018 /// Get an entry of the revlog.
1017 pub fn get_entry(
1019 pub fn get_entry(
1018 &self,
1020 &self,
1019 rev: UncheckedRevision,
1021 rev: UncheckedRevision,
1020 ) -> Result<RevlogEntry, RevlogError> {
1022 ) -> Result<RevlogEntry, RevlogError> {
1021 if rev == NULL_REVISION.into() {
1023 if rev == NULL_REVISION.into() {
1022 return Ok(self.make_null_entry());
1024 return Ok(self.make_null_entry());
1023 }
1025 }
1024 let rev = self.index.check_revision(rev).ok_or_else(|| {
1026 let rev = self.index.check_revision(rev).ok_or_else(|| {
1025 RevlogError::corrupted(format!("rev {} is invalid", rev))
1027 RevlogError::corrupted(format!("rev {} is invalid", rev))
1026 })?;
1028 })?;
1027 self.get_entry_for_checked_rev(rev)
1029 self.get_entry_for_checked_rev(rev)
1028 }
1030 }
1029 }
1031 }
1030
1032
1031 /// The revlog entry's bytes and the necessary informations to extract
1033 /// The revlog entry's bytes and the necessary informations to extract
1032 /// the entry's data.
1034 /// the entry's data.
1033 #[derive(Clone)]
1035 #[derive(Clone)]
1034 pub struct RevlogEntry<'revlog> {
1036 pub struct RevlogEntry<'revlog> {
1035 revlog: &'revlog Revlog,
1037 revlog: &'revlog Revlog,
1036 rev: Revision,
1038 rev: Revision,
1037 bytes: &'revlog [u8],
1039 bytes: &'revlog [u8],
1038 compressed_len: u32,
1040 compressed_len: u32,
1039 uncompressed_len: i32,
1041 uncompressed_len: i32,
1040 base_rev_or_base_of_delta_chain: Option<Revision>,
1042 base_rev_or_base_of_delta_chain: Option<Revision>,
1041 p1: Revision,
1043 p1: Revision,
1042 p2: Revision,
1044 p2: Revision,
1043 flags: u16,
1045 flags: u16,
1044 hash: Node,
1046 hash: Node,
1045 }
1047 }
1046
1048
1047 thread_local! {
1049 thread_local! {
1048 // seems fine to [unwrap] here: this can only fail due to memory allocation
1050 // seems fine to [unwrap] here: this can only fail due to memory allocation
1049 // failing, and it's normal for that to cause panic.
1051 // failing, and it's normal for that to cause panic.
1050 static ZSTD_DECODER : RefCell<zstd::bulk::Decompressor<'static>> =
1052 static ZSTD_DECODER : RefCell<zstd::bulk::Decompressor<'static>> =
1051 RefCell::new(zstd::bulk::Decompressor::new().ok().unwrap());
1053 RefCell::new(zstd::bulk::Decompressor::new().ok().unwrap());
1052 }
1054 }
1053
1055
1054 fn zstd_decompress_to_buffer(
1056 fn zstd_decompress_to_buffer(
1055 bytes: &[u8],
1057 bytes: &[u8],
1056 buf: &mut Vec<u8>,
1058 buf: &mut Vec<u8>,
1057 ) -> Result<usize, std::io::Error> {
1059 ) -> Result<usize, std::io::Error> {
1058 ZSTD_DECODER
1060 ZSTD_DECODER
1059 .with(|decoder| decoder.borrow_mut().decompress_to_buffer(bytes, buf))
1061 .with(|decoder| decoder.borrow_mut().decompress_to_buffer(bytes, buf))
1060 }
1062 }
1061
1063
1062 impl<'revlog> RevlogEntry<'revlog> {
1064 impl<'revlog> RevlogEntry<'revlog> {
1063 pub fn revision(&self) -> Revision {
1065 pub fn revision(&self) -> Revision {
1064 self.rev
1066 self.rev
1065 }
1067 }
1066
1068
1067 pub fn node(&self) -> &Node {
1069 pub fn node(&self) -> &Node {
1068 &self.hash
1070 &self.hash
1069 }
1071 }
1070
1072
1071 pub fn uncompressed_len(&self) -> Option<u32> {
1073 pub fn uncompressed_len(&self) -> Option<u32> {
1072 u32::try_from(self.uncompressed_len).ok()
1074 u32::try_from(self.uncompressed_len).ok()
1073 }
1075 }
1074
1076
1075 pub fn has_p1(&self) -> bool {
1077 pub fn has_p1(&self) -> bool {
1076 self.p1 != NULL_REVISION
1078 self.p1 != NULL_REVISION
1077 }
1079 }
1078
1080
1079 pub fn p1_entry(
1081 pub fn p1_entry(
1080 &self,
1082 &self,
1081 ) -> Result<Option<RevlogEntry<'revlog>>, RevlogError> {
1083 ) -> Result<Option<RevlogEntry<'revlog>>, RevlogError> {
1082 if self.p1 == NULL_REVISION {
1084 if self.p1 == NULL_REVISION {
1083 Ok(None)
1085 Ok(None)
1084 } else {
1086 } else {
1085 Ok(Some(self.revlog.get_entry_for_checked_rev(self.p1)?))
1087 Ok(Some(self.revlog.get_entry_for_checked_rev(self.p1)?))
1086 }
1088 }
1087 }
1089 }
1088
1090
1089 pub fn p2_entry(
1091 pub fn p2_entry(
1090 &self,
1092 &self,
1091 ) -> Result<Option<RevlogEntry<'revlog>>, RevlogError> {
1093 ) -> Result<Option<RevlogEntry<'revlog>>, RevlogError> {
1092 if self.p2 == NULL_REVISION {
1094 if self.p2 == NULL_REVISION {
1093 Ok(None)
1095 Ok(None)
1094 } else {
1096 } else {
1095 Ok(Some(self.revlog.get_entry_for_checked_rev(self.p2)?))
1097 Ok(Some(self.revlog.get_entry_for_checked_rev(self.p2)?))
1096 }
1098 }
1097 }
1099 }
1098
1100
1099 pub fn p1(&self) -> Option<Revision> {
1101 pub fn p1(&self) -> Option<Revision> {
1100 if self.p1 == NULL_REVISION {
1102 if self.p1 == NULL_REVISION {
1101 None
1103 None
1102 } else {
1104 } else {
1103 Some(self.p1)
1105 Some(self.p1)
1104 }
1106 }
1105 }
1107 }
1106
1108
1107 pub fn p2(&self) -> Option<Revision> {
1109 pub fn p2(&self) -> Option<Revision> {
1108 if self.p2 == NULL_REVISION {
1110 if self.p2 == NULL_REVISION {
1109 None
1111 None
1110 } else {
1112 } else {
1111 Some(self.p2)
1113 Some(self.p2)
1112 }
1114 }
1113 }
1115 }
1114
1116
1115 pub fn is_censored(&self) -> bool {
1117 pub fn is_censored(&self) -> bool {
1116 (self.flags & REVISION_FLAG_CENSORED) != 0
1118 (self.flags & REVISION_FLAG_CENSORED) != 0
1117 }
1119 }
1118
1120
1119 pub fn has_length_affecting_flag_processor(&self) -> bool {
1121 pub fn has_length_affecting_flag_processor(&self) -> bool {
1120 // Relevant Python code: revlog.size()
1122 // Relevant Python code: revlog.size()
1121 // note: ELLIPSIS is known to not change the content
1123 // note: ELLIPSIS is known to not change the content
1122 (self.flags & (REVIDX_KNOWN_FLAGS ^ REVISION_FLAG_ELLIPSIS)) != 0
1124 (self.flags & (REVIDX_KNOWN_FLAGS ^ REVISION_FLAG_ELLIPSIS)) != 0
1123 }
1125 }
1124
1126
1125 /// The data for this entry, after resolving deltas if any.
1127 /// The data for this entry, after resolving deltas if any.
1126 pub fn rawdata(&self) -> Result<Cow<'revlog, [u8]>, RevlogError> {
1128 pub fn rawdata(&self) -> Result<Cow<'revlog, [u8]>, RevlogError> {
1127 let mut entry = self.clone();
1129 let mut entry = self.clone();
1128 let mut delta_chain = vec![];
1130 let mut delta_chain = vec![];
1129
1131
1130 // The meaning of `base_rev_or_base_of_delta_chain` depends on
1132 // The meaning of `base_rev_or_base_of_delta_chain` depends on
1131 // generaldelta. See the doc on `ENTRY_DELTA_BASE` in
1133 // generaldelta. See the doc on `ENTRY_DELTA_BASE` in
1132 // `mercurial/revlogutils/constants.py` and the code in
1134 // `mercurial/revlogutils/constants.py` and the code in
1133 // [_chaininfo] and in [index_deltachain].
1135 // [_chaininfo] and in [index_deltachain].
1134 let uses_generaldelta = self.revlog.index.uses_generaldelta();
1136 let uses_generaldelta = self.revlog.index.uses_generaldelta();
1135 while let Some(base_rev) = entry.base_rev_or_base_of_delta_chain {
1137 while let Some(base_rev) = entry.base_rev_or_base_of_delta_chain {
1136 entry = if uses_generaldelta {
1138 entry = if uses_generaldelta {
1137 delta_chain.push(entry);
1139 delta_chain.push(entry);
1138 self.revlog.get_entry_for_checked_rev(base_rev)?
1140 self.revlog.get_entry_for_checked_rev(base_rev)?
1139 } else {
1141 } else {
1140 let base_rev = UncheckedRevision(entry.rev.0 - 1);
1142 let base_rev = UncheckedRevision(entry.rev.0 - 1);
1141 delta_chain.push(entry);
1143 delta_chain.push(entry);
1142 self.revlog.get_entry(base_rev)?
1144 self.revlog.get_entry(base_rev)?
1143 };
1145 };
1144 }
1146 }
1145
1147
1146 let data = if delta_chain.is_empty() {
1148 let data = if delta_chain.is_empty() {
1147 entry.data_chunk()?
1149 entry.data_chunk()?
1148 } else {
1150 } else {
1149 Revlog::build_data_from_deltas(entry, &delta_chain)?.into()
1151 Revlog::build_data_from_deltas(entry, &delta_chain)?.into()
1150 };
1152 };
1151
1153
1152 Ok(data)
1154 Ok(data)
1153 }
1155 }
1154
1156
1155 fn check_data(
1157 fn check_data(
1156 &self,
1158 &self,
1157 data: Cow<'revlog, [u8]>,
1159 data: Cow<'revlog, [u8]>,
1158 ) -> Result<Cow<'revlog, [u8]>, RevlogError> {
1160 ) -> Result<Cow<'revlog, [u8]>, RevlogError> {
1159 if self.revlog.check_hash(
1161 if self.revlog.check_hash(
1160 self.p1,
1162 self.p1,
1161 self.p2,
1163 self.p2,
1162 self.hash.as_bytes(),
1164 self.hash.as_bytes(),
1163 &data,
1165 &data,
1164 ) {
1166 ) {
1165 Ok(data)
1167 Ok(data)
1166 } else {
1168 } else {
1167 if (self.flags & REVISION_FLAG_ELLIPSIS) != 0 {
1169 if (self.flags & REVISION_FLAG_ELLIPSIS) != 0 {
1168 return Err(HgError::unsupported(
1170 return Err(HgError::unsupported(
1169 "ellipsis revisions are not supported by rhg",
1171 "ellipsis revisions are not supported by rhg",
1170 )
1172 )
1171 .into());
1173 .into());
1172 }
1174 }
1173 Err(corrupted(format!(
1175 Err(corrupted(format!(
1174 "hash check failed for revision {}",
1176 "hash check failed for revision {}",
1175 self.rev
1177 self.rev
1176 ))
1178 ))
1177 .into())
1179 .into())
1178 }
1180 }
1179 }
1181 }
1180
1182
1181 pub fn data(&self) -> Result<Cow<'revlog, [u8]>, RevlogError> {
1183 pub fn data(&self) -> Result<Cow<'revlog, [u8]>, RevlogError> {
1182 let data = self.rawdata()?;
1184 let data = self.rawdata()?;
1183 if self.rev == NULL_REVISION {
1185 if self.rev == NULL_REVISION {
1184 return Ok(data);
1186 return Ok(data);
1185 }
1187 }
1186 if self.is_censored() {
1188 if self.is_censored() {
1187 return Err(HgError::CensoredNodeError.into());
1189 return Err(HgError::CensoredNodeError.into());
1188 }
1190 }
1189 self.check_data(data)
1191 self.check_data(data)
1190 }
1192 }
1191
1193
1192 /// Extract the data contained in the entry.
1194 /// Extract the data contained in the entry.
1193 /// This may be a delta. (See `is_delta`.)
1195 /// This may be a delta. (See `is_delta`.)
1194 fn data_chunk(&self) -> Result<Cow<'revlog, [u8]>, HgError> {
1196 fn data_chunk(&self) -> Result<Cow<'revlog, [u8]>, HgError> {
1195 if self.bytes.is_empty() {
1197 if self.bytes.is_empty() {
1196 return Ok(Cow::Borrowed(&[]));
1198 return Ok(Cow::Borrowed(&[]));
1197 }
1199 }
1198 match self.bytes[0] {
1200 match self.bytes[0] {
1199 // Revision data is the entirety of the entry, including this
1201 // Revision data is the entirety of the entry, including this
1200 // header.
1202 // header.
1201 b'\0' => Ok(Cow::Borrowed(self.bytes)),
1203 b'\0' => Ok(Cow::Borrowed(self.bytes)),
1202 // Raw revision data follows.
1204 // Raw revision data follows.
1203 b'u' => Ok(Cow::Borrowed(&self.bytes[1..])),
1205 b'u' => Ok(Cow::Borrowed(&self.bytes[1..])),
1204 // zlib (RFC 1950) data.
1206 // zlib (RFC 1950) data.
1205 b'x' => Ok(Cow::Owned(self.uncompressed_zlib_data()?)),
1207 b'x' => Ok(Cow::Owned(self.uncompressed_zlib_data()?)),
1206 // zstd data.
1208 // zstd data.
1207 b'\x28' => Ok(Cow::Owned(self.uncompressed_zstd_data()?)),
1209 b'\x28' => Ok(Cow::Owned(self.uncompressed_zstd_data()?)),
1208 // A proper new format should have had a repo/store requirement.
1210 // A proper new format should have had a repo/store requirement.
1209 format_type => Err(corrupted(format!(
1211 format_type => Err(corrupted(format!(
1210 "unknown compression header '{}'",
1212 "unknown compression header '{}'",
1211 format_type
1213 format_type
1212 ))),
1214 ))),
1213 }
1215 }
1214 }
1216 }
1215
1217
1216 fn uncompressed_zlib_data(&self) -> Result<Vec<u8>, HgError> {
1218 fn uncompressed_zlib_data(&self) -> Result<Vec<u8>, HgError> {
1217 let mut decoder = ZlibDecoder::new(self.bytes);
1219 let mut decoder = ZlibDecoder::new(self.bytes);
1218 if self.is_delta() {
1220 if self.is_delta() {
1219 let mut buf = Vec::with_capacity(self.compressed_len as usize);
1221 let mut buf = Vec::with_capacity(self.compressed_len as usize);
1220 decoder
1222 decoder
1221 .read_to_end(&mut buf)
1223 .read_to_end(&mut buf)
1222 .map_err(|e| corrupted(e.to_string()))?;
1224 .map_err(|e| corrupted(e.to_string()))?;
1223 Ok(buf)
1225 Ok(buf)
1224 } else {
1226 } else {
1225 let cap = self.uncompressed_len.max(0) as usize;
1227 let cap = self.uncompressed_len.max(0) as usize;
1226 let mut buf = vec![0; cap];
1228 let mut buf = vec![0; cap];
1227 decoder
1229 decoder
1228 .read_exact(&mut buf)
1230 .read_exact(&mut buf)
1229 .map_err(|e| corrupted(e.to_string()))?;
1231 .map_err(|e| corrupted(e.to_string()))?;
1230 Ok(buf)
1232 Ok(buf)
1231 }
1233 }
1232 }
1234 }
1233
1235
1234 fn uncompressed_zstd_data(&self) -> Result<Vec<u8>, HgError> {
1236 fn uncompressed_zstd_data(&self) -> Result<Vec<u8>, HgError> {
1235 let cap = self.uncompressed_len.max(0) as usize;
1237 let cap = self.uncompressed_len.max(0) as usize;
1236 if self.is_delta() {
1238 if self.is_delta() {
1237 // [cap] is usually an over-estimate of the space needed because
1239 // [cap] is usually an over-estimate of the space needed because
1238 // it's the length of delta-decoded data, but we're interested
1240 // it's the length of delta-decoded data, but we're interested
1239 // in the size of the delta.
1241 // in the size of the delta.
1240 // This means we have to [shrink_to_fit] to avoid holding on
1242 // This means we have to [shrink_to_fit] to avoid holding on
1241 // to a large chunk of memory, but it also means we must have a
1243 // to a large chunk of memory, but it also means we must have a
1242 // fallback branch, for the case when the delta is longer than
1244 // fallback branch, for the case when the delta is longer than
1243 // the original data (surprisingly, this does happen in practice)
1245 // the original data (surprisingly, this does happen in practice)
1244 let mut buf = Vec::with_capacity(cap);
1246 let mut buf = Vec::with_capacity(cap);
1245 match zstd_decompress_to_buffer(self.bytes, &mut buf) {
1247 match zstd_decompress_to_buffer(self.bytes, &mut buf) {
1246 Ok(_) => buf.shrink_to_fit(),
1248 Ok(_) => buf.shrink_to_fit(),
1247 Err(_) => {
1249 Err(_) => {
1248 buf.clear();
1250 buf.clear();
1249 zstd::stream::copy_decode(self.bytes, &mut buf)
1251 zstd::stream::copy_decode(self.bytes, &mut buf)
1250 .map_err(|e| corrupted(e.to_string()))?;
1252 .map_err(|e| corrupted(e.to_string()))?;
1251 }
1253 }
1252 };
1254 };
1253 Ok(buf)
1255 Ok(buf)
1254 } else {
1256 } else {
1255 let mut buf = Vec::with_capacity(cap);
1257 let mut buf = Vec::with_capacity(cap);
1256 let len = zstd_decompress_to_buffer(self.bytes, &mut buf)
1258 let len = zstd_decompress_to_buffer(self.bytes, &mut buf)
1257 .map_err(|e| corrupted(e.to_string()))?;
1259 .map_err(|e| corrupted(e.to_string()))?;
1258 if len != self.uncompressed_len as usize {
1260 if len != self.uncompressed_len as usize {
1259 Err(corrupted("uncompressed length does not match"))
1261 Err(corrupted("uncompressed length does not match"))
1260 } else {
1262 } else {
1261 Ok(buf)
1263 Ok(buf)
1262 }
1264 }
1263 }
1265 }
1264 }
1266 }
1265
1267
1266 /// Tell if the entry is a snapshot or a delta
1268 /// Tell if the entry is a snapshot or a delta
1267 /// (influences on decompression).
1269 /// (influences on decompression).
1268 fn is_delta(&self) -> bool {
1270 fn is_delta(&self) -> bool {
1269 self.base_rev_or_base_of_delta_chain.is_some()
1271 self.base_rev_or_base_of_delta_chain.is_some()
1270 }
1272 }
1271 }
1273 }
1272
1274
1273 /// Calculate the hash of a revision given its data and its parents.
1275 /// Calculate the hash of a revision given its data and its parents.
1274 fn hash(
1276 fn hash(
1275 data: &[u8],
1277 data: &[u8],
1276 p1_hash: &[u8],
1278 p1_hash: &[u8],
1277 p2_hash: &[u8],
1279 p2_hash: &[u8],
1278 ) -> [u8; NODE_BYTES_LENGTH] {
1280 ) -> [u8; NODE_BYTES_LENGTH] {
1279 let mut hasher = Sha1::new();
1281 let mut hasher = Sha1::new();
1280 let (a, b) = (p1_hash, p2_hash);
1282 let (a, b) = (p1_hash, p2_hash);
1281 if a > b {
1283 if a > b {
1282 hasher.update(b);
1284 hasher.update(b);
1283 hasher.update(a);
1285 hasher.update(a);
1284 } else {
1286 } else {
1285 hasher.update(a);
1287 hasher.update(a);
1286 hasher.update(b);
1288 hasher.update(b);
1287 }
1289 }
1288 hasher.update(data);
1290 hasher.update(data);
1289 *hasher.finalize().as_ref()
1291 *hasher.finalize().as_ref()
1290 }
1292 }
1291
1293
1292 #[cfg(test)]
1294 #[cfg(test)]
1293 mod tests {
1295 mod tests {
1294 use super::*;
1296 use super::*;
1295 use crate::index::IndexEntryBuilder;
1297 use crate::index::IndexEntryBuilder;
1296 use itertools::Itertools;
1298 use itertools::Itertools;
1297
1299
1298 #[test]
1300 #[test]
1299 fn test_empty() {
1301 fn test_empty() {
1300 let temp = tempfile::tempdir().unwrap();
1302 let temp = tempfile::tempdir().unwrap();
1301 let vfs = Vfs { base: temp.path() };
1303 let vfs = VfsImpl {
1304 base: temp.path().to_owned(),
1305 };
1302 std::fs::write(temp.path().join("foo.i"), b"").unwrap();
1306 std::fs::write(temp.path().join("foo.i"), b"").unwrap();
1303 std::fs::write(temp.path().join("foo.d"), b"").unwrap();
1307 std::fs::write(temp.path().join("foo.d"), b"").unwrap();
1304 let revlog =
1308 let revlog =
1305 Revlog::open(&vfs, "foo.i", None, RevlogOpenOptions::default())
1309 Revlog::open(&vfs, "foo.i", None, RevlogOpenOptions::default())
1306 .unwrap();
1310 .unwrap();
1307 assert!(revlog.is_empty());
1311 assert!(revlog.is_empty());
1308 assert_eq!(revlog.len(), 0);
1312 assert_eq!(revlog.len(), 0);
1309 assert!(revlog.get_entry(0.into()).is_err());
1313 assert!(revlog.get_entry(0.into()).is_err());
1310 assert!(!revlog.has_rev(0.into()));
1314 assert!(!revlog.has_rev(0.into()));
1311 assert_eq!(
1315 assert_eq!(
1312 revlog.rev_from_node(NULL_NODE.into()).unwrap(),
1316 revlog.rev_from_node(NULL_NODE.into()).unwrap(),
1313 NULL_REVISION
1317 NULL_REVISION
1314 );
1318 );
1315 let null_entry = revlog.get_entry(NULL_REVISION.into()).ok().unwrap();
1319 let null_entry = revlog.get_entry(NULL_REVISION.into()).ok().unwrap();
1316 assert_eq!(null_entry.revision(), NULL_REVISION);
1320 assert_eq!(null_entry.revision(), NULL_REVISION);
1317 assert!(null_entry.data().unwrap().is_empty());
1321 assert!(null_entry.data().unwrap().is_empty());
1318 }
1322 }
1319
1323
1320 #[test]
1324 #[test]
1321 fn test_inline() {
1325 fn test_inline() {
1322 let temp = tempfile::tempdir().unwrap();
1326 let temp = tempfile::tempdir().unwrap();
1323 let vfs = Vfs { base: temp.path() };
1327 let vfs = VfsImpl {
1328 base: temp.path().to_owned(),
1329 };
1324 let node0 = Node::from_hex("2ed2a3912a0b24502043eae84ee4b279c18b90dd")
1330 let node0 = Node::from_hex("2ed2a3912a0b24502043eae84ee4b279c18b90dd")
1325 .unwrap();
1331 .unwrap();
1326 let node1 = Node::from_hex("b004912a8510032a0350a74daa2803dadfb00e12")
1332 let node1 = Node::from_hex("b004912a8510032a0350a74daa2803dadfb00e12")
1327 .unwrap();
1333 .unwrap();
1328 let node2 = Node::from_hex("dd6ad206e907be60927b5a3117b97dffb2590582")
1334 let node2 = Node::from_hex("dd6ad206e907be60927b5a3117b97dffb2590582")
1329 .unwrap();
1335 .unwrap();
1330 let entry0_bytes = IndexEntryBuilder::new()
1336 let entry0_bytes = IndexEntryBuilder::new()
1331 .is_first(true)
1337 .is_first(true)
1332 .with_version(1)
1338 .with_version(1)
1333 .with_inline(true)
1339 .with_inline(true)
1334 .with_node(node0)
1340 .with_node(node0)
1335 .build();
1341 .build();
1336 let entry1_bytes = IndexEntryBuilder::new().with_node(node1).build();
1342 let entry1_bytes = IndexEntryBuilder::new().with_node(node1).build();
1337 let entry2_bytes = IndexEntryBuilder::new()
1343 let entry2_bytes = IndexEntryBuilder::new()
1338 .with_p1(Revision(0))
1344 .with_p1(Revision(0))
1339 .with_p2(Revision(1))
1345 .with_p2(Revision(1))
1340 .with_node(node2)
1346 .with_node(node2)
1341 .build();
1347 .build();
1342 let contents = vec![entry0_bytes, entry1_bytes, entry2_bytes]
1348 let contents = vec![entry0_bytes, entry1_bytes, entry2_bytes]
1343 .into_iter()
1349 .into_iter()
1344 .flatten()
1350 .flatten()
1345 .collect_vec();
1351 .collect_vec();
1346 std::fs::write(temp.path().join("foo.i"), contents).unwrap();
1352 std::fs::write(temp.path().join("foo.i"), contents).unwrap();
1347 let revlog =
1353 let revlog =
1348 Revlog::open(&vfs, "foo.i", None, RevlogOpenOptions::default())
1354 Revlog::open(&vfs, "foo.i", None, RevlogOpenOptions::default())
1349 .unwrap();
1355 .unwrap();
1350
1356
1351 let entry0 = revlog.get_entry(0.into()).ok().unwrap();
1357 let entry0 = revlog.get_entry(0.into()).ok().unwrap();
1352 assert_eq!(entry0.revision(), Revision(0));
1358 assert_eq!(entry0.revision(), Revision(0));
1353 assert_eq!(*entry0.node(), node0);
1359 assert_eq!(*entry0.node(), node0);
1354 assert!(!entry0.has_p1());
1360 assert!(!entry0.has_p1());
1355 assert_eq!(entry0.p1(), None);
1361 assert_eq!(entry0.p1(), None);
1356 assert_eq!(entry0.p2(), None);
1362 assert_eq!(entry0.p2(), None);
1357 let p1_entry = entry0.p1_entry().unwrap();
1363 let p1_entry = entry0.p1_entry().unwrap();
1358 assert!(p1_entry.is_none());
1364 assert!(p1_entry.is_none());
1359 let p2_entry = entry0.p2_entry().unwrap();
1365 let p2_entry = entry0.p2_entry().unwrap();
1360 assert!(p2_entry.is_none());
1366 assert!(p2_entry.is_none());
1361
1367
1362 let entry1 = revlog.get_entry(1.into()).ok().unwrap();
1368 let entry1 = revlog.get_entry(1.into()).ok().unwrap();
1363 assert_eq!(entry1.revision(), Revision(1));
1369 assert_eq!(entry1.revision(), Revision(1));
1364 assert_eq!(*entry1.node(), node1);
1370 assert_eq!(*entry1.node(), node1);
1365 assert!(!entry1.has_p1());
1371 assert!(!entry1.has_p1());
1366 assert_eq!(entry1.p1(), None);
1372 assert_eq!(entry1.p1(), None);
1367 assert_eq!(entry1.p2(), None);
1373 assert_eq!(entry1.p2(), None);
1368 let p1_entry = entry1.p1_entry().unwrap();
1374 let p1_entry = entry1.p1_entry().unwrap();
1369 assert!(p1_entry.is_none());
1375 assert!(p1_entry.is_none());
1370 let p2_entry = entry1.p2_entry().unwrap();
1376 let p2_entry = entry1.p2_entry().unwrap();
1371 assert!(p2_entry.is_none());
1377 assert!(p2_entry.is_none());
1372
1378
1373 let entry2 = revlog.get_entry(2.into()).ok().unwrap();
1379 let entry2 = revlog.get_entry(2.into()).ok().unwrap();
1374 assert_eq!(entry2.revision(), Revision(2));
1380 assert_eq!(entry2.revision(), Revision(2));
1375 assert_eq!(*entry2.node(), node2);
1381 assert_eq!(*entry2.node(), node2);
1376 assert!(entry2.has_p1());
1382 assert!(entry2.has_p1());
1377 assert_eq!(entry2.p1(), Some(Revision(0)));
1383 assert_eq!(entry2.p1(), Some(Revision(0)));
1378 assert_eq!(entry2.p2(), Some(Revision(1)));
1384 assert_eq!(entry2.p2(), Some(Revision(1)));
1379 let p1_entry = entry2.p1_entry().unwrap();
1385 let p1_entry = entry2.p1_entry().unwrap();
1380 assert!(p1_entry.is_some());
1386 assert!(p1_entry.is_some());
1381 assert_eq!(p1_entry.unwrap().revision(), Revision(0));
1387 assert_eq!(p1_entry.unwrap().revision(), Revision(0));
1382 let p2_entry = entry2.p2_entry().unwrap();
1388 let p2_entry = entry2.p2_entry().unwrap();
1383 assert!(p2_entry.is_some());
1389 assert!(p2_entry.is_some());
1384 assert_eq!(p2_entry.unwrap().revision(), Revision(1));
1390 assert_eq!(p2_entry.unwrap().revision(), Revision(1));
1385 }
1391 }
1386
1392
1387 #[test]
1393 #[test]
1388 fn test_nodemap() {
1394 fn test_nodemap() {
1389 let temp = tempfile::tempdir().unwrap();
1395 let temp = tempfile::tempdir().unwrap();
1390 let vfs = Vfs { base: temp.path() };
1396 let vfs = VfsImpl {
1397 base: temp.path().to_owned(),
1398 };
1391
1399
1392 // building a revlog with a forced Node starting with zeros
1400 // building a revlog with a forced Node starting with zeros
1393 // This is a corruption, but it does not preclude using the nodemap
1401 // This is a corruption, but it does not preclude using the nodemap
1394 // if we don't try and access the data
1402 // if we don't try and access the data
1395 let node0 = Node::from_hex("00d2a3912a0b24502043eae84ee4b279c18b90dd")
1403 let node0 = Node::from_hex("00d2a3912a0b24502043eae84ee4b279c18b90dd")
1396 .unwrap();
1404 .unwrap();
1397 let node1 = Node::from_hex("b004912a8510032a0350a74daa2803dadfb00e12")
1405 let node1 = Node::from_hex("b004912a8510032a0350a74daa2803dadfb00e12")
1398 .unwrap();
1406 .unwrap();
1399 let entry0_bytes = IndexEntryBuilder::new()
1407 let entry0_bytes = IndexEntryBuilder::new()
1400 .is_first(true)
1408 .is_first(true)
1401 .with_version(1)
1409 .with_version(1)
1402 .with_inline(true)
1410 .with_inline(true)
1403 .with_node(node0)
1411 .with_node(node0)
1404 .build();
1412 .build();
1405 let entry1_bytes = IndexEntryBuilder::new().with_node(node1).build();
1413 let entry1_bytes = IndexEntryBuilder::new().with_node(node1).build();
1406 let contents = vec![entry0_bytes, entry1_bytes]
1414 let contents = vec![entry0_bytes, entry1_bytes]
1407 .into_iter()
1415 .into_iter()
1408 .flatten()
1416 .flatten()
1409 .collect_vec();
1417 .collect_vec();
1410 std::fs::write(temp.path().join("foo.i"), contents).unwrap();
1418 std::fs::write(temp.path().join("foo.i"), contents).unwrap();
1411
1419
1412 let mut idx = nodemap::tests::TestNtIndex::new();
1420 let mut idx = nodemap::tests::TestNtIndex::new();
1413 idx.insert_node(Revision(0), node0).unwrap();
1421 idx.insert_node(Revision(0), node0).unwrap();
1414 idx.insert_node(Revision(1), node1).unwrap();
1422 idx.insert_node(Revision(1), node1).unwrap();
1415
1423
1416 let revlog = Revlog::open_gen(
1424 let revlog = Revlog::open_gen(
1417 &vfs,
1425 &vfs,
1418 "foo.i",
1426 "foo.i",
1419 None,
1427 None,
1420 RevlogOpenOptions::default(),
1428 RevlogOpenOptions::default(),
1421 Some(idx.nt),
1429 Some(idx.nt),
1422 )
1430 )
1423 .unwrap();
1431 .unwrap();
1424
1432
1425 // accessing the data shows the corruption
1433 // accessing the data shows the corruption
1426 revlog.get_entry(0.into()).unwrap().data().unwrap_err();
1434 revlog.get_entry(0.into()).unwrap().data().unwrap_err();
1427
1435
1428 assert_eq!(
1436 assert_eq!(
1429 revlog.rev_from_node(NULL_NODE.into()).unwrap(),
1437 revlog.rev_from_node(NULL_NODE.into()).unwrap(),
1430 Revision(-1)
1438 Revision(-1)
1431 );
1439 );
1432 assert_eq!(revlog.rev_from_node(node0.into()).unwrap(), Revision(0));
1440 assert_eq!(revlog.rev_from_node(node0.into()).unwrap(), Revision(0));
1433 assert_eq!(revlog.rev_from_node(node1.into()).unwrap(), Revision(1));
1441 assert_eq!(revlog.rev_from_node(node1.into()).unwrap(), Revision(1));
1434 assert_eq!(
1442 assert_eq!(
1435 revlog
1443 revlog
1436 .rev_from_node(NodePrefix::from_hex("000").unwrap())
1444 .rev_from_node(NodePrefix::from_hex("000").unwrap())
1437 .unwrap(),
1445 .unwrap(),
1438 Revision(-1)
1446 Revision(-1)
1439 );
1447 );
1440 assert_eq!(
1448 assert_eq!(
1441 revlog
1449 revlog
1442 .rev_from_node(NodePrefix::from_hex("b00").unwrap())
1450 .rev_from_node(NodePrefix::from_hex("b00").unwrap())
1443 .unwrap(),
1451 .unwrap(),
1444 Revision(1)
1452 Revision(1)
1445 );
1453 );
1446 // RevlogError does not implement PartialEq
1454 // RevlogError does not implement PartialEq
1447 // (ultimately because io::Error does not)
1455 // (ultimately because io::Error does not)
1448 match revlog
1456 match revlog
1449 .rev_from_node(NodePrefix::from_hex("00").unwrap())
1457 .rev_from_node(NodePrefix::from_hex("00").unwrap())
1450 .expect_err("Expected to give AmbiguousPrefix error")
1458 .expect_err("Expected to give AmbiguousPrefix error")
1451 {
1459 {
1452 RevlogError::AmbiguousPrefix => (),
1460 RevlogError::AmbiguousPrefix => (),
1453 e => {
1461 e => {
1454 panic!("Got another error than AmbiguousPrefix: {:?}", e);
1462 panic!("Got another error than AmbiguousPrefix: {:?}", e);
1455 }
1463 }
1456 };
1464 };
1457 }
1465 }
1458 }
1466 }
@@ -1,108 +1,108
1 use crate::errors::{HgError, HgResultExt};
1 use crate::errors::{HgError, HgResultExt};
2 use bytes_cast::{unaligned, BytesCast};
2 use bytes_cast::{unaligned, BytesCast};
3 use memmap2::Mmap;
3 use memmap2::Mmap;
4 use std::path::{Path, PathBuf};
4 use std::path::{Path, PathBuf};
5
5
6 use crate::vfs::Vfs;
6 use crate::vfs::VfsImpl;
7
7
8 const ONDISK_VERSION: u8 = 1;
8 const ONDISK_VERSION: u8 = 1;
9
9
10 pub(super) struct NodeMapDocket {
10 pub(super) struct NodeMapDocket {
11 pub data_length: usize,
11 pub data_length: usize,
12 // TODO: keep here more of the data from `parse()` when we need it
12 // TODO: keep here more of the data from `parse()` when we need it
13 }
13 }
14
14
15 #[derive(BytesCast)]
15 #[derive(BytesCast)]
16 #[repr(C)]
16 #[repr(C)]
17 struct DocketHeader {
17 struct DocketHeader {
18 uid_size: u8,
18 uid_size: u8,
19 _tip_rev: unaligned::U64Be,
19 _tip_rev: unaligned::U64Be,
20 data_length: unaligned::U64Be,
20 data_length: unaligned::U64Be,
21 _data_unused: unaligned::U64Be,
21 _data_unused: unaligned::U64Be,
22 tip_node_size: unaligned::U64Be,
22 tip_node_size: unaligned::U64Be,
23 }
23 }
24
24
25 impl NodeMapDocket {
25 impl NodeMapDocket {
26 /// Return `Ok(None)` when the caller should proceed without a persistent
26 /// Return `Ok(None)` when the caller should proceed without a persistent
27 /// nodemap:
27 /// nodemap:
28 ///
28 ///
29 /// * This revlog does not have a `.n` docket file (it is not generated for
29 /// * This revlog does not have a `.n` docket file (it is not generated for
30 /// small revlogs), or
30 /// small revlogs), or
31 /// * The docket has an unsupported version number (repositories created by
31 /// * The docket has an unsupported version number (repositories created by
32 /// later hg, maybe that should be a requirement instead?), or
32 /// later hg, maybe that should be a requirement instead?), or
33 /// * The docket file points to a missing (likely deleted) data file (this
33 /// * The docket file points to a missing (likely deleted) data file (this
34 /// can happen in a rare race condition).
34 /// can happen in a rare race condition).
35 pub fn read_from_file(
35 pub fn read_from_file(
36 store_vfs: &Vfs,
36 store_vfs: &VfsImpl,
37 index_path: &Path,
37 index_path: &Path,
38 ) -> Result<Option<(Self, Mmap)>, HgError> {
38 ) -> Result<Option<(Self, Mmap)>, HgError> {
39 let docket_path = index_path.with_extension("n");
39 let docket_path = index_path.with_extension("n");
40 let docket_bytes = if let Some(bytes) =
40 let docket_bytes = if let Some(bytes) =
41 store_vfs.read(&docket_path).io_not_found_as_none()?
41 store_vfs.read(&docket_path).io_not_found_as_none()?
42 {
42 {
43 bytes
43 bytes
44 } else {
44 } else {
45 return Ok(None);
45 return Ok(None);
46 };
46 };
47
47
48 let input = if let Some((&ONDISK_VERSION, rest)) =
48 let input = if let Some((&ONDISK_VERSION, rest)) =
49 docket_bytes.split_first()
49 docket_bytes.split_first()
50 {
50 {
51 rest
51 rest
52 } else {
52 } else {
53 return Ok(None);
53 return Ok(None);
54 };
54 };
55
55
56 /// Treat any error as a parse error
56 /// Treat any error as a parse error
57 fn parse<T, E>(result: Result<T, E>) -> Result<T, HgError> {
57 fn parse<T, E>(result: Result<T, E>) -> Result<T, HgError> {
58 result
58 result
59 .map_err(|_| HgError::corrupted("nodemap docket parse error"))
59 .map_err(|_| HgError::corrupted("nodemap docket parse error"))
60 }
60 }
61
61
62 let (header, rest) = parse(DocketHeader::from_bytes(input))?;
62 let (header, rest) = parse(DocketHeader::from_bytes(input))?;
63 let uid_size = header.uid_size as usize;
63 let uid_size = header.uid_size as usize;
64 // TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit
64 // TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit
65 // systems?
65 // systems?
66 let tip_node_size = header.tip_node_size.get() as usize;
66 let tip_node_size = header.tip_node_size.get() as usize;
67 let data_length = header.data_length.get() as usize;
67 let data_length = header.data_length.get() as usize;
68 let (uid, rest) = parse(u8::slice_from_bytes(rest, uid_size))?;
68 let (uid, rest) = parse(u8::slice_from_bytes(rest, uid_size))?;
69 let (_tip_node, _rest) =
69 let (_tip_node, _rest) =
70 parse(u8::slice_from_bytes(rest, tip_node_size))?;
70 parse(u8::slice_from_bytes(rest, tip_node_size))?;
71 let uid = parse(std::str::from_utf8(uid))?;
71 let uid = parse(std::str::from_utf8(uid))?;
72 let docket = NodeMapDocket { data_length };
72 let docket = NodeMapDocket { data_length };
73
73
74 let data_path = rawdata_path(&docket_path, uid);
74 let data_path = rawdata_path(&docket_path, uid);
75 // TODO: use `vfs.read()` here when the `persistent-nodemap.mmap`
75 // TODO: use `vfs.read()` here when the `persistent-nodemap.mmap`
76 // config is false?
76 // config is false?
77 if let Some(mmap) =
77 if let Some(mmap) =
78 store_vfs.mmap_open(data_path).io_not_found_as_none()?
78 store_vfs.mmap_open(data_path).io_not_found_as_none()?
79 {
79 {
80 if mmap.len() >= data_length {
80 if mmap.len() >= data_length {
81 Ok(Some((docket, mmap)))
81 Ok(Some((docket, mmap)))
82 } else {
82 } else {
83 Err(HgError::corrupted("persistent nodemap too short"))
83 Err(HgError::corrupted("persistent nodemap too short"))
84 }
84 }
85 } else {
85 } else {
86 // Even if .hg/requires opted in, some revlogs are deemed small
86 // Even if .hg/requires opted in, some revlogs are deemed small
87 // enough to not need a persistent nodemap.
87 // enough to not need a persistent nodemap.
88 Ok(None)
88 Ok(None)
89 }
89 }
90 }
90 }
91 }
91 }
92
92
93 fn rawdata_path(docket_path: &Path, uid: &str) -> PathBuf {
93 fn rawdata_path(docket_path: &Path, uid: &str) -> PathBuf {
94 let docket_name = docket_path
94 let docket_name = docket_path
95 .file_name()
95 .file_name()
96 .expect("expected a base name")
96 .expect("expected a base name")
97 .to_str()
97 .to_str()
98 .expect("expected an ASCII file name in the store");
98 .expect("expected an ASCII file name in the store");
99 let prefix = docket_name
99 let prefix = docket_name
100 .strip_suffix(".n.a")
100 .strip_suffix(".n.a")
101 .or_else(|| docket_name.strip_suffix(".n"))
101 .or_else(|| docket_name.strip_suffix(".n"))
102 .expect("expected docket path in .n or .n.a");
102 .expect("expected docket path in .n or .n.a");
103 let name = format!("{}-{}.nd", prefix, uid);
103 let name = format!("{}-{}.nd", prefix, uid);
104 docket_path
104 docket_path
105 .parent()
105 .parent()
106 .expect("expected a non-root path")
106 .expect("expected a non-root path")
107 .join(name)
107 .join(name)
108 }
108 }
@@ -1,205 +1,382
1 use crate::errors::{HgError, IoErrorContext, IoResultExt};
1 use crate::errors::{HgError, IoErrorContext, IoResultExt};
2 use crate::exit_codes;
3 use dyn_clone::DynClone;
2 use memmap2::{Mmap, MmapOptions};
4 use memmap2::{Mmap, MmapOptions};
5 use std::fs::File;
3 use std::io::{ErrorKind, Write};
6 use std::io::{ErrorKind, Write};
7 use std::os::unix::fs::MetadataExt;
4 use std::path::{Path, PathBuf};
8 use std::path::{Path, PathBuf};
5
9
6 /// Filesystem access abstraction for the contents of a given "base" diretory
10 /// Filesystem access abstraction for the contents of a given "base" diretory
7 #[derive(Clone, Copy)]
11 #[derive(Clone)]
8 pub struct Vfs<'a> {
12 pub struct VfsImpl {
9 pub(crate) base: &'a Path,
13 pub(crate) base: PathBuf,
10 }
14 }
11
15
12 struct FileNotFound(std::io::Error, PathBuf);
16 struct FileNotFound(std::io::Error, PathBuf);
13
17
14 impl Vfs<'_> {
18 impl VfsImpl {
15 pub fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf {
19 pub fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf {
16 self.base.join(relative_path)
20 self.base.join(relative_path)
17 }
21 }
18
22
19 pub fn symlink_metadata(
23 pub fn symlink_metadata(
20 &self,
24 &self,
21 relative_path: impl AsRef<Path>,
25 relative_path: impl AsRef<Path>,
22 ) -> Result<std::fs::Metadata, HgError> {
26 ) -> Result<std::fs::Metadata, HgError> {
23 let path = self.join(relative_path);
27 let path = self.join(relative_path);
24 std::fs::symlink_metadata(&path).when_reading_file(&path)
28 std::fs::symlink_metadata(&path).when_reading_file(&path)
25 }
29 }
26
30
27 pub fn read_link(
31 pub fn read_link(
28 &self,
32 &self,
29 relative_path: impl AsRef<Path>,
33 relative_path: impl AsRef<Path>,
30 ) -> Result<PathBuf, HgError> {
34 ) -> Result<PathBuf, HgError> {
31 let path = self.join(relative_path);
35 let path = self.join(relative_path);
32 std::fs::read_link(&path).when_reading_file(&path)
36 std::fs::read_link(&path).when_reading_file(&path)
33 }
37 }
34
38
35 pub fn read(
39 pub fn read(
36 &self,
40 &self,
37 relative_path: impl AsRef<Path>,
41 relative_path: impl AsRef<Path>,
38 ) -> Result<Vec<u8>, HgError> {
42 ) -> Result<Vec<u8>, HgError> {
39 let path = self.join(relative_path);
43 let path = self.join(relative_path);
40 std::fs::read(&path).when_reading_file(&path)
44 std::fs::read(&path).when_reading_file(&path)
41 }
45 }
42
46
43 /// Returns `Ok(None)` if the file does not exist.
47 /// Returns `Ok(None)` if the file does not exist.
44 pub fn try_read(
48 pub fn try_read(
45 &self,
49 &self,
46 relative_path: impl AsRef<Path>,
50 relative_path: impl AsRef<Path>,
47 ) -> Result<Option<Vec<u8>>, HgError> {
51 ) -> Result<Option<Vec<u8>>, HgError> {
48 match self.read(relative_path) {
52 match self.read(relative_path) {
49 Err(e) => match &e {
53 Err(e) => match &e {
50 HgError::IoError { error, .. } => match error.kind() {
54 HgError::IoError { error, .. } => match error.kind() {
51 ErrorKind::NotFound => Ok(None),
55 ErrorKind::NotFound => Ok(None),
52 _ => Err(e),
56 _ => Err(e),
53 },
57 },
54 _ => Err(e),
58 _ => Err(e),
55 },
59 },
56 Ok(v) => Ok(Some(v)),
60 Ok(v) => Ok(Some(v)),
57 }
61 }
58 }
62 }
59
63
60 fn mmap_open_gen(
64 fn mmap_open_gen(
61 &self,
65 &self,
62 relative_path: impl AsRef<Path>,
66 relative_path: impl AsRef<Path>,
63 ) -> Result<Result<Mmap, FileNotFound>, HgError> {
67 ) -> Result<Result<Mmap, FileNotFound>, HgError> {
64 let path = self.join(relative_path);
68 let path = self.join(relative_path);
65 let file = match std::fs::File::open(&path) {
69 let file = match std::fs::File::open(&path) {
66 Err(err) => {
70 Err(err) => {
67 if let ErrorKind::NotFound = err.kind() {
71 if let ErrorKind::NotFound = err.kind() {
68 return Ok(Err(FileNotFound(err, path)));
72 return Ok(Err(FileNotFound(err, path)));
69 };
73 };
70 return (Err(err)).when_reading_file(&path);
74 return (Err(err)).when_reading_file(&path);
71 }
75 }
72 Ok(file) => file,
76 Ok(file) => file,
73 };
77 };
74 // TODO: what are the safety requirements here?
78 // Safety is "enforced" by locks and assuming other processes are
79 // well-behaved. If any misbehaving or malicious process does touch
80 // the index, it could lead to corruption. This is inherent
81 // to file-based `mmap`, though some platforms have some ways of
82 // mitigating.
83 // TODO linux: set the immutable flag with `chattr(1)`?
75 let mmap = unsafe { MmapOptions::new().map(&file) }
84 let mmap = unsafe { MmapOptions::new().map(&file) }
76 .when_reading_file(&path)?;
85 .when_reading_file(&path)?;
77 Ok(Ok(mmap))
86 Ok(Ok(mmap))
78 }
87 }
79
88
80 pub fn mmap_open_opt(
89 pub fn mmap_open_opt(
81 &self,
90 &self,
82 relative_path: impl AsRef<Path>,
91 relative_path: impl AsRef<Path>,
83 ) -> Result<Option<Mmap>, HgError> {
92 ) -> Result<Option<Mmap>, HgError> {
84 self.mmap_open_gen(relative_path).map(|res| res.ok())
93 self.mmap_open_gen(relative_path).map(|res| res.ok())
85 }
94 }
86
95
87 pub fn mmap_open(
96 pub fn mmap_open(
88 &self,
97 &self,
89 relative_path: impl AsRef<Path>,
98 relative_path: impl AsRef<Path>,
90 ) -> Result<Mmap, HgError> {
99 ) -> Result<Mmap, HgError> {
91 match self.mmap_open_gen(relative_path)? {
100 match self.mmap_open_gen(relative_path)? {
92 Err(FileNotFound(err, path)) => Err(err).when_reading_file(&path),
101 Err(FileNotFound(err, path)) => Err(err).when_reading_file(&path),
93 Ok(res) => Ok(res),
102 Ok(res) => Ok(res),
94 }
103 }
95 }
104 }
96
105
97 pub fn rename(
106 pub fn rename(
98 &self,
107 &self,
99 relative_from: impl AsRef<Path>,
108 relative_from: impl AsRef<Path>,
100 relative_to: impl AsRef<Path>,
109 relative_to: impl AsRef<Path>,
101 ) -> Result<(), HgError> {
110 ) -> Result<(), HgError> {
102 let from = self.join(relative_from);
111 let from = self.join(relative_from);
103 let to = self.join(relative_to);
112 let to = self.join(relative_to);
104 std::fs::rename(&from, &to)
113 std::fs::rename(&from, &to)
105 .with_context(|| IoErrorContext::RenamingFile { from, to })
114 .with_context(|| IoErrorContext::RenamingFile { from, to })
106 }
115 }
107
116
108 pub fn remove_file(
117 pub fn remove_file(
109 &self,
118 &self,
110 relative_path: impl AsRef<Path>,
119 relative_path: impl AsRef<Path>,
111 ) -> Result<(), HgError> {
120 ) -> Result<(), HgError> {
112 let path = self.join(relative_path);
121 let path = self.join(relative_path);
113 std::fs::remove_file(&path)
122 std::fs::remove_file(&path)
114 .with_context(|| IoErrorContext::RemovingFile(path))
123 .with_context(|| IoErrorContext::RemovingFile(path))
115 }
124 }
116
125
117 #[cfg(unix)]
126 #[cfg(unix)]
118 pub fn create_symlink(
127 pub fn create_symlink(
119 &self,
128 &self,
120 relative_link_path: impl AsRef<Path>,
129 relative_link_path: impl AsRef<Path>,
121 target_path: impl AsRef<Path>,
130 target_path: impl AsRef<Path>,
122 ) -> Result<(), HgError> {
131 ) -> Result<(), HgError> {
123 let link_path = self.join(relative_link_path);
132 let link_path = self.join(relative_link_path);
124 std::os::unix::fs::symlink(target_path, &link_path)
133 std::os::unix::fs::symlink(target_path, &link_path)
125 .when_writing_file(&link_path)
134 .when_writing_file(&link_path)
126 }
135 }
127
136
128 /// Write `contents` into a temporary file, then rename to `relative_path`.
137 /// Write `contents` into a temporary file, then rename to `relative_path`.
129 /// This makes writing to a file "atomic": a reader opening that path will
138 /// This makes writing to a file "atomic": a reader opening that path will
130 /// see either the previous contents of the file or the complete new
139 /// see either the previous contents of the file or the complete new
131 /// content, never a partial write.
140 /// content, never a partial write.
132 pub fn atomic_write(
141 pub fn atomic_write(
133 &self,
142 &self,
134 relative_path: impl AsRef<Path>,
143 relative_path: impl AsRef<Path>,
135 contents: &[u8],
144 contents: &[u8],
136 ) -> Result<(), HgError> {
145 ) -> Result<(), HgError> {
137 let mut tmp = tempfile::NamedTempFile::new_in(self.base)
146 let mut tmp = tempfile::NamedTempFile::new_in(&self.base)
138 .when_writing_file(self.base)?;
147 .when_writing_file(&self.base)?;
139 tmp.write_all(contents)
148 tmp.write_all(contents)
140 .and_then(|()| tmp.flush())
149 .and_then(|()| tmp.flush())
141 .when_writing_file(tmp.path())?;
150 .when_writing_file(tmp.path())?;
142 let path = self.join(relative_path);
151 let path = self.join(relative_path);
143 tmp.persist(&path)
152 tmp.persist(&path)
144 .map_err(|e| e.error)
153 .map_err(|e| e.error)
145 .when_writing_file(&path)?;
154 .when_writing_file(&path)?;
146 Ok(())
155 Ok(())
147 }
156 }
148 }
157 }
149
158
150 fn fs_metadata(
159 fn fs_metadata(
151 path: impl AsRef<Path>,
160 path: impl AsRef<Path>,
152 ) -> Result<Option<std::fs::Metadata>, HgError> {
161 ) -> Result<Option<std::fs::Metadata>, HgError> {
153 let path = path.as_ref();
162 let path = path.as_ref();
154 match std::fs::metadata(path) {
163 match std::fs::metadata(path) {
155 Ok(meta) => Ok(Some(meta)),
164 Ok(meta) => Ok(Some(meta)),
156 Err(error) => match error.kind() {
165 Err(error) => match error.kind() {
157 // TODO: when we require a Rust version where `NotADirectory` is
166 // TODO: when we require a Rust version where `NotADirectory` is
158 // stable, invert this logic and return None for it and `NotFound`
167 // stable, invert this logic and return None for it and `NotFound`
159 // and propagate any other error.
168 // and propagate any other error.
160 ErrorKind::PermissionDenied => Err(error).with_context(|| {
169 ErrorKind::PermissionDenied => Err(error).with_context(|| {
161 IoErrorContext::ReadingMetadata(path.to_owned())
170 IoErrorContext::ReadingMetadata(path.to_owned())
162 }),
171 }),
163 _ => Ok(None),
172 _ => Ok(None),
164 },
173 },
165 }
174 }
166 }
175 }
167
176
177 /// Writable file object that atomically updates a file
178 ///
179 /// All writes will go to a temporary copy of the original file. Call
180 /// [`Self::close`] when you are done writing, and [`Self`] will rename
181 /// the temporary copy to the original name, making the changes
182 /// visible. If the object is destroyed without being closed, all your
183 /// writes are discarded.
184 pub struct AtomicFile {
185 /// The temporary file to write to
186 fp: std::fs::File,
187 /// Path of the temp file
188 temp_path: PathBuf,
189 /// Used when stat'ing the file, is useful only if the target file is
190 /// guarded by any lock (e.g. repo.lock or repo.wlock).
191 check_ambig: bool,
192 /// Path of the target file
193 target_name: PathBuf,
194 /// Whether the file is open or not
195 is_open: bool,
196 }
197
198 impl AtomicFile {
199 pub fn new(
200 fp: std::fs::File,
201 check_ambig: bool,
202 temp_name: PathBuf,
203 target_name: PathBuf,
204 ) -> Self {
205 Self {
206 fp,
207 check_ambig,
208 temp_path: temp_name,
209 target_name,
210 is_open: true,
211 }
212 }
213
214 /// Write `buf` to the temporary file
215 pub fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> {
216 self.fp.write_all(buf)
217 }
218
219 fn target(&self) -> PathBuf {
220 self.temp_path
221 .parent()
222 .expect("should not be at the filesystem root")
223 .join(&self.target_name)
224 }
225
226 /// Close the temporary file and rename to the target
227 pub fn close(mut self) -> Result<(), std::io::Error> {
228 self.fp.flush()?;
229 let target = self.target();
230 if self.check_ambig {
231 if let Ok(stat) = std::fs::metadata(&target) {
232 std::fs::rename(&self.temp_path, &target)?;
233 let new_stat = std::fs::metadata(&target)?;
234 let ctime = new_stat.ctime();
235 let is_ambiguous = ctime == stat.ctime();
236 if is_ambiguous {
237 let advanced =
238 filetime::FileTime::from_unix_time(ctime + 1, 0);
239 filetime::set_file_times(target, advanced, advanced)?;
240 }
241 } else {
242 std::fs::rename(&self.temp_path, target)?;
243 }
244 } else {
245 std::fs::rename(&self.temp_path, target).unwrap();
246 }
247 self.is_open = false;
248 Ok(())
249 }
250 }
251
252 impl Drop for AtomicFile {
253 fn drop(&mut self) {
254 if self.is_open {
255 std::fs::remove_file(self.target()).ok();
256 }
257 }
258 }
259
260 /// Abstracts over the VFS to allow for different implementations of the
261 /// filesystem layer (like passing one from Python).
262 pub trait Vfs: Sync + Send + DynClone {
263 fn open(&self, filename: &Path) -> Result<std::fs::File, HgError>;
264 fn open_read(&self, filename: &Path) -> Result<std::fs::File, HgError>;
265 fn open_check_ambig(
266 &self,
267 filename: &Path,
268 ) -> Result<std::fs::File, HgError>;
269 fn create(&self, filename: &Path) -> Result<std::fs::File, HgError>;
270 /// Must truncate the new file if exist
271 fn create_atomic(
272 &self,
273 filename: &Path,
274 check_ambig: bool,
275 ) -> Result<AtomicFile, HgError>;
276 fn file_size(&self, file: &File) -> Result<u64, HgError>;
277 fn exists(&self, filename: &Path) -> bool;
278 fn unlink(&self, filename: &Path) -> Result<(), HgError>;
279 fn rename(
280 &self,
281 from: &Path,
282 to: &Path,
283 check_ambig: bool,
284 ) -> Result<(), HgError>;
285 fn copy(&self, from: &Path, to: &Path) -> Result<(), HgError>;
286 }
287
288 /// These methods will need to be implemented once `rhg` (and other) non-Python
289 /// users of `hg-core` start doing more on their own, like writing to files.
290 impl Vfs for VfsImpl {
291 fn open(&self, _filename: &Path) -> Result<std::fs::File, HgError> {
292 todo!()
293 }
294 fn open_read(&self, filename: &Path) -> Result<std::fs::File, HgError> {
295 let path = self.base.join(filename);
296 std::fs::File::open(&path).when_reading_file(&path)
297 }
298 fn open_check_ambig(
299 &self,
300 _filename: &Path,
301 ) -> Result<std::fs::File, HgError> {
302 todo!()
303 }
304 fn create(&self, _filename: &Path) -> Result<std::fs::File, HgError> {
305 todo!()
306 }
307 fn create_atomic(
308 &self,
309 _filename: &Path,
310 _check_ambig: bool,
311 ) -> Result<AtomicFile, HgError> {
312 todo!()
313 }
314 fn file_size(&self, file: &File) -> Result<u64, HgError> {
315 Ok(file
316 .metadata()
317 .map_err(|e| {
318 HgError::abort(
319 format!("Could not get file metadata: {}", e),
320 exit_codes::ABORT,
321 None,
322 )
323 })?
324 .size())
325 }
326 fn exists(&self, _filename: &Path) -> bool {
327 todo!()
328 }
329 fn unlink(&self, _filename: &Path) -> Result<(), HgError> {
330 todo!()
331 }
332 fn rename(
333 &self,
334 _from: &Path,
335 _to: &Path,
336 _check_ambig: bool,
337 ) -> Result<(), HgError> {
338 todo!()
339 }
340 fn copy(&self, _from: &Path, _to: &Path) -> Result<(), HgError> {
341 todo!()
342 }
343 }
344
168 pub(crate) fn is_dir(path: impl AsRef<Path>) -> Result<bool, HgError> {
345 pub(crate) fn is_dir(path: impl AsRef<Path>) -> Result<bool, HgError> {
169 Ok(fs_metadata(path)?.map_or(false, |meta| meta.is_dir()))
346 Ok(fs_metadata(path)?.map_or(false, |meta| meta.is_dir()))
170 }
347 }
171
348
172 pub(crate) fn is_file(path: impl AsRef<Path>) -> Result<bool, HgError> {
349 pub(crate) fn is_file(path: impl AsRef<Path>) -> Result<bool, HgError> {
173 Ok(fs_metadata(path)?.map_or(false, |meta| meta.is_file()))
350 Ok(fs_metadata(path)?.map_or(false, |meta| meta.is_file()))
174 }
351 }
175
352
176 /// Returns whether the given `path` is on a network file system.
353 /// Returns whether the given `path` is on a network file system.
177 /// Taken from `cargo`'s codebase.
354 /// Taken from `cargo`'s codebase.
178 #[cfg(target_os = "linux")]
355 #[cfg(target_os = "linux")]
179 pub(crate) fn is_on_nfs_mount(path: impl AsRef<Path>) -> bool {
356 pub(crate) fn is_on_nfs_mount(path: impl AsRef<Path>) -> bool {
180 use std::ffi::CString;
357 use std::ffi::CString;
181 use std::mem;
358 use std::mem;
182 use std::os::unix::prelude::*;
359 use std::os::unix::prelude::*;
183
360
184 let path = match CString::new(path.as_ref().as_os_str().as_bytes()) {
361 let path = match CString::new(path.as_ref().as_os_str().as_bytes()) {
185 Ok(path) => path,
362 Ok(path) => path,
186 Err(_) => return false,
363 Err(_) => return false,
187 };
364 };
188
365
189 unsafe {
366 unsafe {
190 let mut buf: libc::statfs = mem::zeroed();
367 let mut buf: libc::statfs = mem::zeroed();
191 let r = libc::statfs(path.as_ptr(), &mut buf);
368 let r = libc::statfs(path.as_ptr(), &mut buf);
192
369
193 r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32
370 r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32
194 }
371 }
195 }
372 }
196
373
197 /// Similar to what Cargo does; although detecting NFS (or non-local
374 /// Similar to what Cargo does; although detecting NFS (or non-local
198 /// file systems) _should_ be possible on other operating systems,
375 /// file systems) _should_ be possible on other operating systems,
199 /// we'll just assume that mmap() works there, for now; after all,
376 /// we'll just assume that mmap() works there, for now; after all,
200 /// _some_ functionality is better than a compile error, i.e. none at
377 /// _some_ functionality is better than a compile error, i.e. none at
201 /// all
378 /// all
202 #[cfg(not(target_os = "linux"))]
379 #[cfg(not(target_os = "linux"))]
203 pub(crate) fn is_on_nfs_mount(_path: impl AsRef<Path>) -> bool {
380 pub(crate) fn is_on_nfs_mount(_path: impl AsRef<Path>) -> bool {
204 false
381 false
205 }
382 }
@@ -1,826 +1,826
1 // status.rs
1 // status.rs
2 //
2 //
3 // Copyright 2020, Georges Racinet <georges.racinets@octobus.net>
3 // Copyright 2020, Georges Racinet <georges.racinets@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 use crate::error::CommandError;
8 use crate::error::CommandError;
9 use crate::ui::{
9 use crate::ui::{
10 format_pattern_file_warning, print_narrow_sparse_warnings, relative_paths,
10 format_pattern_file_warning, print_narrow_sparse_warnings, relative_paths,
11 RelativePaths, Ui,
11 RelativePaths, Ui,
12 };
12 };
13 use crate::utils::path_utils::RelativizePaths;
13 use crate::utils::path_utils::RelativizePaths;
14 use clap::Arg;
14 use clap::Arg;
15 use format_bytes::format_bytes;
15 use format_bytes::format_bytes;
16 use hg::config::Config;
16 use hg::config::Config;
17 use hg::dirstate::has_exec_bit;
17 use hg::dirstate::has_exec_bit;
18 use hg::dirstate::status::StatusPath;
18 use hg::dirstate::status::StatusPath;
19 use hg::dirstate::TruncatedTimestamp;
19 use hg::dirstate::TruncatedTimestamp;
20 use hg::errors::{HgError, IoResultExt};
20 use hg::errors::{HgError, IoResultExt};
21 use hg::filepatterns::parse_pattern_args;
21 use hg::filepatterns::parse_pattern_args;
22 use hg::lock::LockError;
22 use hg::lock::LockError;
23 use hg::manifest::Manifest;
23 use hg::manifest::Manifest;
24 use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
24 use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
25 use hg::repo::Repo;
25 use hg::repo::Repo;
26 use hg::utils::debug::debug_wait_for_file;
26 use hg::utils::debug::debug_wait_for_file;
27 use hg::utils::files::{
27 use hg::utils::files::{
28 get_bytes_from_os_str, get_bytes_from_os_string, get_path_from_bytes,
28 get_bytes_from_os_str, get_bytes_from_os_string, get_path_from_bytes,
29 };
29 };
30 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
30 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
31 use hg::Revision;
31 use hg::Revision;
32 use hg::StatusError;
32 use hg::StatusError;
33 use hg::StatusOptions;
33 use hg::StatusOptions;
34 use hg::{self, narrow, sparse};
34 use hg::{self, narrow, sparse};
35 use hg::{DirstateStatus, RevlogOpenOptions};
35 use hg::{DirstateStatus, RevlogOpenOptions};
36 use hg::{PatternFileWarning, RevlogType};
36 use hg::{PatternFileWarning, RevlogType};
37 use log::info;
37 use log::info;
38 use rayon::prelude::*;
38 use rayon::prelude::*;
39 use std::borrow::Cow;
39 use std::borrow::Cow;
40 use std::io;
40 use std::io;
41 use std::mem::take;
41 use std::mem::take;
42 use std::path::PathBuf;
42 use std::path::PathBuf;
43
43
44 pub const HELP_TEXT: &str = "
44 pub const HELP_TEXT: &str = "
45 Show changed files in the working directory
45 Show changed files in the working directory
46
46
47 This is a pure Rust version of `hg status`.
47 This is a pure Rust version of `hg status`.
48
48
49 Some options might be missing, check the list below.
49 Some options might be missing, check the list below.
50 ";
50 ";
51
51
52 pub fn args() -> clap::Command {
52 pub fn args() -> clap::Command {
53 clap::command!("status")
53 clap::command!("status")
54 .alias("st")
54 .alias("st")
55 .about(HELP_TEXT)
55 .about(HELP_TEXT)
56 .arg(
56 .arg(
57 Arg::new("file")
57 Arg::new("file")
58 .value_parser(clap::value_parser!(std::ffi::OsString))
58 .value_parser(clap::value_parser!(std::ffi::OsString))
59 .help("show only these files")
59 .help("show only these files")
60 .action(clap::ArgAction::Append),
60 .action(clap::ArgAction::Append),
61 )
61 )
62 .arg(
62 .arg(
63 Arg::new("all")
63 Arg::new("all")
64 .help("show status of all files")
64 .help("show status of all files")
65 .short('A')
65 .short('A')
66 .action(clap::ArgAction::SetTrue)
66 .action(clap::ArgAction::SetTrue)
67 .long("all"),
67 .long("all"),
68 )
68 )
69 .arg(
69 .arg(
70 Arg::new("modified")
70 Arg::new("modified")
71 .help("show only modified files")
71 .help("show only modified files")
72 .short('m')
72 .short('m')
73 .action(clap::ArgAction::SetTrue)
73 .action(clap::ArgAction::SetTrue)
74 .long("modified"),
74 .long("modified"),
75 )
75 )
76 .arg(
76 .arg(
77 Arg::new("added")
77 Arg::new("added")
78 .help("show only added files")
78 .help("show only added files")
79 .short('a')
79 .short('a')
80 .action(clap::ArgAction::SetTrue)
80 .action(clap::ArgAction::SetTrue)
81 .long("added"),
81 .long("added"),
82 )
82 )
83 .arg(
83 .arg(
84 Arg::new("removed")
84 Arg::new("removed")
85 .help("show only removed files")
85 .help("show only removed files")
86 .short('r')
86 .short('r')
87 .action(clap::ArgAction::SetTrue)
87 .action(clap::ArgAction::SetTrue)
88 .long("removed"),
88 .long("removed"),
89 )
89 )
90 .arg(
90 .arg(
91 Arg::new("clean")
91 Arg::new("clean")
92 .help("show only clean files")
92 .help("show only clean files")
93 .short('c')
93 .short('c')
94 .action(clap::ArgAction::SetTrue)
94 .action(clap::ArgAction::SetTrue)
95 .long("clean"),
95 .long("clean"),
96 )
96 )
97 .arg(
97 .arg(
98 Arg::new("deleted")
98 Arg::new("deleted")
99 .help("show only deleted files")
99 .help("show only deleted files")
100 .short('d')
100 .short('d')
101 .action(clap::ArgAction::SetTrue)
101 .action(clap::ArgAction::SetTrue)
102 .long("deleted"),
102 .long("deleted"),
103 )
103 )
104 .arg(
104 .arg(
105 Arg::new("unknown")
105 Arg::new("unknown")
106 .help("show only unknown (not tracked) files")
106 .help("show only unknown (not tracked) files")
107 .short('u')
107 .short('u')
108 .action(clap::ArgAction::SetTrue)
108 .action(clap::ArgAction::SetTrue)
109 .long("unknown"),
109 .long("unknown"),
110 )
110 )
111 .arg(
111 .arg(
112 Arg::new("ignored")
112 Arg::new("ignored")
113 .help("show only ignored files")
113 .help("show only ignored files")
114 .short('i')
114 .short('i')
115 .action(clap::ArgAction::SetTrue)
115 .action(clap::ArgAction::SetTrue)
116 .long("ignored"),
116 .long("ignored"),
117 )
117 )
118 .arg(
118 .arg(
119 Arg::new("copies")
119 Arg::new("copies")
120 .help("show source of copied files (DEFAULT: ui.statuscopies)")
120 .help("show source of copied files (DEFAULT: ui.statuscopies)")
121 .short('C')
121 .short('C')
122 .action(clap::ArgAction::SetTrue)
122 .action(clap::ArgAction::SetTrue)
123 .long("copies"),
123 .long("copies"),
124 )
124 )
125 .arg(
125 .arg(
126 Arg::new("print0")
126 Arg::new("print0")
127 .help("end filenames with NUL, for use with xargs")
127 .help("end filenames with NUL, for use with xargs")
128 .short('0')
128 .short('0')
129 .action(clap::ArgAction::SetTrue)
129 .action(clap::ArgAction::SetTrue)
130 .long("print0"),
130 .long("print0"),
131 )
131 )
132 .arg(
132 .arg(
133 Arg::new("no-status")
133 Arg::new("no-status")
134 .help("hide status prefix")
134 .help("hide status prefix")
135 .short('n')
135 .short('n')
136 .action(clap::ArgAction::SetTrue)
136 .action(clap::ArgAction::SetTrue)
137 .long("no-status"),
137 .long("no-status"),
138 )
138 )
139 .arg(
139 .arg(
140 Arg::new("verbose")
140 Arg::new("verbose")
141 .help("enable additional output")
141 .help("enable additional output")
142 .short('v')
142 .short('v')
143 .action(clap::ArgAction::SetTrue)
143 .action(clap::ArgAction::SetTrue)
144 .long("verbose"),
144 .long("verbose"),
145 )
145 )
146 .arg(
146 .arg(
147 Arg::new("rev")
147 Arg::new("rev")
148 .help("show difference from/to revision")
148 .help("show difference from/to revision")
149 .long("rev")
149 .long("rev")
150 .num_args(1)
150 .num_args(1)
151 .action(clap::ArgAction::Append)
151 .action(clap::ArgAction::Append)
152 .value_name("REV"),
152 .value_name("REV"),
153 )
153 )
154 }
154 }
155
155
156 fn parse_revpair(
156 fn parse_revpair(
157 repo: &Repo,
157 repo: &Repo,
158 revs: Option<Vec<String>>,
158 revs: Option<Vec<String>>,
159 ) -> Result<Option<(Revision, Revision)>, CommandError> {
159 ) -> Result<Option<(Revision, Revision)>, CommandError> {
160 let revs = match revs {
160 let revs = match revs {
161 None => return Ok(None),
161 None => return Ok(None),
162 Some(revs) => revs,
162 Some(revs) => revs,
163 };
163 };
164 if revs.is_empty() {
164 if revs.is_empty() {
165 return Ok(None);
165 return Ok(None);
166 }
166 }
167 if revs.len() != 2 {
167 if revs.len() != 2 {
168 return Err(CommandError::unsupported("expected 0 or 2 --rev flags"));
168 return Err(CommandError::unsupported("expected 0 or 2 --rev flags"));
169 }
169 }
170
170
171 let rev1 = &revs[0];
171 let rev1 = &revs[0];
172 let rev2 = &revs[1];
172 let rev2 = &revs[1];
173 let rev1 = hg::revset::resolve_single(rev1, repo)
173 let rev1 = hg::revset::resolve_single(rev1, repo)
174 .map_err(|e| (e, rev1.as_str()))?;
174 .map_err(|e| (e, rev1.as_str()))?;
175 let rev2 = hg::revset::resolve_single(rev2, repo)
175 let rev2 = hg::revset::resolve_single(rev2, repo)
176 .map_err(|e| (e, rev2.as_str()))?;
176 .map_err(|e| (e, rev2.as_str()))?;
177 Ok(Some((rev1, rev2)))
177 Ok(Some((rev1, rev2)))
178 }
178 }
179
179
180 /// Pure data type allowing the caller to specify file states to display
180 /// Pure data type allowing the caller to specify file states to display
181 #[derive(Copy, Clone, Debug)]
181 #[derive(Copy, Clone, Debug)]
182 pub struct DisplayStates {
182 pub struct DisplayStates {
183 pub modified: bool,
183 pub modified: bool,
184 pub added: bool,
184 pub added: bool,
185 pub removed: bool,
185 pub removed: bool,
186 pub clean: bool,
186 pub clean: bool,
187 pub deleted: bool,
187 pub deleted: bool,
188 pub unknown: bool,
188 pub unknown: bool,
189 pub ignored: bool,
189 pub ignored: bool,
190 }
190 }
191
191
192 pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates {
192 pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates {
193 modified: true,
193 modified: true,
194 added: true,
194 added: true,
195 removed: true,
195 removed: true,
196 clean: false,
196 clean: false,
197 deleted: true,
197 deleted: true,
198 unknown: true,
198 unknown: true,
199 ignored: false,
199 ignored: false,
200 };
200 };
201
201
202 pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates {
202 pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates {
203 modified: true,
203 modified: true,
204 added: true,
204 added: true,
205 removed: true,
205 removed: true,
206 clean: true,
206 clean: true,
207 deleted: true,
207 deleted: true,
208 unknown: true,
208 unknown: true,
209 ignored: true,
209 ignored: true,
210 };
210 };
211
211
212 impl DisplayStates {
212 impl DisplayStates {
213 pub fn is_empty(&self) -> bool {
213 pub fn is_empty(&self) -> bool {
214 !(self.modified
214 !(self.modified
215 || self.added
215 || self.added
216 || self.removed
216 || self.removed
217 || self.clean
217 || self.clean
218 || self.deleted
218 || self.deleted
219 || self.unknown
219 || self.unknown
220 || self.ignored)
220 || self.ignored)
221 }
221 }
222 }
222 }
223
223
224 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
224 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
225 Ok(repo.dirstate_parents()?.is_merge())
225 Ok(repo.dirstate_parents()?.is_merge())
226 }
226 }
227
227
228 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
228 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
229 // These are all the known values for the [fname] argument of
229 // These are all the known values for the [fname] argument of
230 // [addunfinished] function in [state.py]
230 // [addunfinished] function in [state.py]
231 let known_state_files: &[&str] = &[
231 let known_state_files: &[&str] = &[
232 "bisect.state",
232 "bisect.state",
233 "graftstate",
233 "graftstate",
234 "histedit-state",
234 "histedit-state",
235 "rebasestate",
235 "rebasestate",
236 "shelvedstate",
236 "shelvedstate",
237 "transplant/journal",
237 "transplant/journal",
238 "updatestate",
238 "updatestate",
239 ];
239 ];
240 if has_unfinished_merge(repo)? {
240 if has_unfinished_merge(repo)? {
241 return Ok(true);
241 return Ok(true);
242 };
242 };
243 for f in known_state_files {
243 for f in known_state_files {
244 if repo.hg_vfs().join(f).exists() {
244 if repo.hg_vfs().join(f).exists() {
245 return Ok(true);
245 return Ok(true);
246 }
246 }
247 }
247 }
248 Ok(false)
248 Ok(false)
249 }
249 }
250
250
251 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
251 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
252 // TODO: lift these limitations
252 // TODO: lift these limitations
253 if invocation
253 if invocation
254 .config
254 .config
255 .get(b"commands", b"status.terse")
255 .get(b"commands", b"status.terse")
256 .is_some()
256 .is_some()
257 {
257 {
258 return Err(CommandError::unsupported(
258 return Err(CommandError::unsupported(
259 "status.terse is not yet supported with rhg status",
259 "status.terse is not yet supported with rhg status",
260 ));
260 ));
261 }
261 }
262
262
263 let ui = invocation.ui;
263 let ui = invocation.ui;
264 let config = invocation.config;
264 let config = invocation.config;
265 let args = invocation.subcommand_args;
265 let args = invocation.subcommand_args;
266
266
267 let revs = args.get_many::<String>("rev");
267 let revs = args.get_many::<String>("rev");
268 let print0 = args.get_flag("print0");
268 let print0 = args.get_flag("print0");
269 let verbose = args.get_flag("verbose")
269 let verbose = args.get_flag("verbose")
270 || config.get_bool(b"ui", b"verbose")?
270 || config.get_bool(b"ui", b"verbose")?
271 || config.get_bool(b"commands", b"status.verbose")?;
271 || config.get_bool(b"commands", b"status.verbose")?;
272 let verbose = verbose && !print0;
272 let verbose = verbose && !print0;
273
273
274 let all = args.get_flag("all");
274 let all = args.get_flag("all");
275 let display_states = if all {
275 let display_states = if all {
276 // TODO when implementing `--quiet`: it excludes clean files
276 // TODO when implementing `--quiet`: it excludes clean files
277 // from `--all`
277 // from `--all`
278 ALL_DISPLAY_STATES
278 ALL_DISPLAY_STATES
279 } else {
279 } else {
280 let requested = DisplayStates {
280 let requested = DisplayStates {
281 modified: args.get_flag("modified"),
281 modified: args.get_flag("modified"),
282 added: args.get_flag("added"),
282 added: args.get_flag("added"),
283 removed: args.get_flag("removed"),
283 removed: args.get_flag("removed"),
284 clean: args.get_flag("clean"),
284 clean: args.get_flag("clean"),
285 deleted: args.get_flag("deleted"),
285 deleted: args.get_flag("deleted"),
286 unknown: args.get_flag("unknown"),
286 unknown: args.get_flag("unknown"),
287 ignored: args.get_flag("ignored"),
287 ignored: args.get_flag("ignored"),
288 };
288 };
289 if requested.is_empty() {
289 if requested.is_empty() {
290 DEFAULT_DISPLAY_STATES
290 DEFAULT_DISPLAY_STATES
291 } else {
291 } else {
292 requested
292 requested
293 }
293 }
294 };
294 };
295 let no_status = args.get_flag("no-status");
295 let no_status = args.get_flag("no-status");
296 let list_copies = all
296 let list_copies = all
297 || args.get_flag("copies")
297 || args.get_flag("copies")
298 || config.get_bool(b"ui", b"statuscopies")?;
298 || config.get_bool(b"ui", b"statuscopies")?;
299
299
300 let repo = invocation.repo?;
300 let repo = invocation.repo?;
301 let revpair = parse_revpair(repo, revs.map(|i| i.cloned().collect()))?;
301 let revpair = parse_revpair(repo, revs.map(|i| i.cloned().collect()))?;
302
302
303 if verbose && has_unfinished_state(repo)? {
303 if verbose && has_unfinished_state(repo)? {
304 return Err(CommandError::unsupported(
304 return Err(CommandError::unsupported(
305 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
305 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
306 ));
306 ));
307 }
307 }
308
308
309 let mut dmap = repo.dirstate_map_mut()?;
309 let mut dmap = repo.dirstate_map_mut()?;
310
310
311 let check_exec = hg::checkexec::check_exec(repo.working_directory_path());
311 let check_exec = hg::checkexec::check_exec(repo.working_directory_path());
312
312
313 let options = StatusOptions {
313 let options = StatusOptions {
314 check_exec,
314 check_exec,
315 list_clean: display_states.clean,
315 list_clean: display_states.clean,
316 list_unknown: display_states.unknown,
316 list_unknown: display_states.unknown,
317 list_ignored: display_states.ignored,
317 list_ignored: display_states.ignored,
318 list_copies,
318 list_copies,
319 collect_traversed_dirs: false,
319 collect_traversed_dirs: false,
320 };
320 };
321
321
322 type StatusResult<'a> =
322 type StatusResult<'a> =
323 Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
323 Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
324
324
325 let relative_status = config
325 let relative_status = config
326 .get_option(b"commands", b"status.relative")?
326 .get_option(b"commands", b"status.relative")?
327 .expect("commands.status.relative should have a default value");
327 .expect("commands.status.relative should have a default value");
328
328
329 let relativize_paths = relative_status || {
329 let relativize_paths = relative_status || {
330 // See in Python code with `getuipathfn` usage in `commands.py`.
330 // See in Python code with `getuipathfn` usage in `commands.py`.
331 let legacy_relative_behavior = args.contains_id("file");
331 let legacy_relative_behavior = args.contains_id("file");
332 match relative_paths(invocation.config)? {
332 match relative_paths(invocation.config)? {
333 RelativePaths::Legacy => legacy_relative_behavior,
333 RelativePaths::Legacy => legacy_relative_behavior,
334 RelativePaths::Bool(v) => v,
334 RelativePaths::Bool(v) => v,
335 }
335 }
336 };
336 };
337
337
338 let mut output = DisplayStatusPaths {
338 let mut output = DisplayStatusPaths {
339 ui,
339 ui,
340 no_status,
340 no_status,
341 relativize: if relativize_paths {
341 relativize: if relativize_paths {
342 Some(RelativizePaths::new(repo)?)
342 Some(RelativizePaths::new(repo)?)
343 } else {
343 } else {
344 None
344 None
345 },
345 },
346 print0,
346 print0,
347 };
347 };
348
348
349 let after_status = |res: StatusResult| -> Result<_, CommandError> {
349 let after_status = |res: StatusResult| -> Result<_, CommandError> {
350 let (mut ds_status, pattern_warnings) = res?;
350 let (mut ds_status, pattern_warnings) = res?;
351 for warning in pattern_warnings {
351 for warning in pattern_warnings {
352 ui.write_stderr(&format_pattern_file_warning(&warning, repo))?;
352 ui.write_stderr(&format_pattern_file_warning(&warning, repo))?;
353 }
353 }
354
354
355 for (path, error) in take(&mut ds_status.bad) {
355 for (path, error) in take(&mut ds_status.bad) {
356 let error = match error {
356 let error = match error {
357 hg::BadMatch::OsError(code) => {
357 hg::BadMatch::OsError(code) => {
358 std::io::Error::from_raw_os_error(code).to_string()
358 std::io::Error::from_raw_os_error(code).to_string()
359 }
359 }
360 hg::BadMatch::BadType(ty) => {
360 hg::BadMatch::BadType(ty) => {
361 format!("unsupported file type (type is {})", ty)
361 format!("unsupported file type (type is {})", ty)
362 }
362 }
363 };
363 };
364 ui.write_stderr(&format_bytes!(
364 ui.write_stderr(&format_bytes!(
365 b"{}: {}\n",
365 b"{}: {}\n",
366 path.as_bytes(),
366 path.as_bytes(),
367 error.as_bytes()
367 error.as_bytes()
368 ))?
368 ))?
369 }
369 }
370 if !ds_status.unsure.is_empty() {
370 if !ds_status.unsure.is_empty() {
371 info!(
371 info!(
372 "Files to be rechecked by retrieval from filelog: {:?}",
372 "Files to be rechecked by retrieval from filelog: {:?}",
373 ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>()
373 ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>()
374 );
374 );
375 }
375 }
376 let mut fixup = Vec::new();
376 let mut fixup = Vec::new();
377 if !ds_status.unsure.is_empty()
377 if !ds_status.unsure.is_empty()
378 && (display_states.modified || display_states.clean)
378 && (display_states.modified || display_states.clean)
379 {
379 {
380 let p1 = repo.dirstate_parents()?.p1;
380 let p1 = repo.dirstate_parents()?.p1;
381 let manifest = repo.manifest_for_node(p1).map_err(|e| {
381 let manifest = repo.manifest_for_node(p1).map_err(|e| {
382 CommandError::from((e, &*format!("{:x}", p1.short())))
382 CommandError::from((e, &*format!("{:x}", p1.short())))
383 })?;
383 })?;
384 let working_directory_vfs = repo.working_directory_vfs();
384 let working_directory_vfs = repo.working_directory_vfs();
385 let store_vfs = repo.store_vfs();
385 let store_vfs = repo.store_vfs();
386 let revlog_open_options =
386 let revlog_open_options =
387 repo.default_revlog_options(RevlogType::Manifestlog)?;
387 repo.default_revlog_options(RevlogType::Manifestlog)?;
388 let res: Vec<_> = take(&mut ds_status.unsure)
388 let res: Vec<_> = take(&mut ds_status.unsure)
389 .into_par_iter()
389 .into_par_iter()
390 .map(|to_check| {
390 .map(|to_check| {
391 // The compiler seems to get a bit confused with complex
391 // The compiler seems to get a bit confused with complex
392 // inference when using a parallel iterator + map
392 // inference when using a parallel iterator + map
393 // + map_err + collect, so let's just inline some of the
393 // + map_err + collect, so let's just inline some of the
394 // logic.
394 // logic.
395 match unsure_is_modified(
395 match unsure_is_modified(
396 working_directory_vfs,
396 &working_directory_vfs,
397 store_vfs,
397 &store_vfs,
398 check_exec,
398 check_exec,
399 &manifest,
399 &manifest,
400 &to_check.path,
400 &to_check.path,
401 revlog_open_options,
401 revlog_open_options,
402 ) {
402 ) {
403 Err(HgError::IoError { .. }) => {
403 Err(HgError::IoError { .. }) => {
404 // IO errors most likely stem from the file being
404 // IO errors most likely stem from the file being
405 // deleted even though we know it's in the
405 // deleted even though we know it's in the
406 // dirstate.
406 // dirstate.
407 Ok((to_check, UnsureOutcome::Deleted))
407 Ok((to_check, UnsureOutcome::Deleted))
408 }
408 }
409 Ok(outcome) => Ok((to_check, outcome)),
409 Ok(outcome) => Ok((to_check, outcome)),
410 Err(e) => Err(e),
410 Err(e) => Err(e),
411 }
411 }
412 })
412 })
413 .collect::<Result<_, _>>()?;
413 .collect::<Result<_, _>>()?;
414 for (status_path, outcome) in res.into_iter() {
414 for (status_path, outcome) in res.into_iter() {
415 match outcome {
415 match outcome {
416 UnsureOutcome::Clean => {
416 UnsureOutcome::Clean => {
417 if display_states.clean {
417 if display_states.clean {
418 ds_status.clean.push(status_path.clone());
418 ds_status.clean.push(status_path.clone());
419 }
419 }
420 fixup.push(status_path.path.into_owned())
420 fixup.push(status_path.path.into_owned())
421 }
421 }
422 UnsureOutcome::Modified => {
422 UnsureOutcome::Modified => {
423 if display_states.modified {
423 if display_states.modified {
424 ds_status.modified.push(status_path);
424 ds_status.modified.push(status_path);
425 }
425 }
426 }
426 }
427 UnsureOutcome::Deleted => {
427 UnsureOutcome::Deleted => {
428 if display_states.deleted {
428 if display_states.deleted {
429 ds_status.deleted.push(status_path);
429 ds_status.deleted.push(status_path);
430 }
430 }
431 }
431 }
432 }
432 }
433 }
433 }
434 }
434 }
435
435
436 let dirstate_write_needed = ds_status.dirty;
436 let dirstate_write_needed = ds_status.dirty;
437 let filesystem_time_at_status_start =
437 let filesystem_time_at_status_start =
438 ds_status.filesystem_time_at_status_start;
438 ds_status.filesystem_time_at_status_start;
439
439
440 output.output(display_states, ds_status)?;
440 output.output(display_states, ds_status)?;
441
441
442 Ok((
442 Ok((
443 fixup,
443 fixup,
444 dirstate_write_needed,
444 dirstate_write_needed,
445 filesystem_time_at_status_start,
445 filesystem_time_at_status_start,
446 ))
446 ))
447 };
447 };
448 let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
448 let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
449
449
450 if let Some((rev1, rev2)) = revpair {
450 if let Some((rev1, rev2)) = revpair {
451 let mut ds_status = DirstateStatus::default();
451 let mut ds_status = DirstateStatus::default();
452 if list_copies {
452 if list_copies {
453 return Err(CommandError::unsupported(
453 return Err(CommandError::unsupported(
454 "status --rev --rev with copy information is not implemented yet",
454 "status --rev --rev with copy information is not implemented yet",
455 ));
455 ));
456 }
456 }
457
457
458 let stat = hg::operations::status_rev_rev_no_copies(
458 let stat = hg::operations::status_rev_rev_no_copies(
459 repo,
459 repo,
460 rev1,
460 rev1,
461 rev2,
461 rev2,
462 narrow_matcher,
462 narrow_matcher,
463 )?;
463 )?;
464 for entry in stat.iter() {
464 for entry in stat.iter() {
465 let (path, status) = entry?;
465 let (path, status) = entry?;
466 let path = StatusPath {
466 let path = StatusPath {
467 path: Cow::Borrowed(path),
467 path: Cow::Borrowed(path),
468 copy_source: None,
468 copy_source: None,
469 };
469 };
470 match status {
470 match status {
471 hg::operations::DiffStatus::Removed => {
471 hg::operations::DiffStatus::Removed => {
472 if display_states.removed {
472 if display_states.removed {
473 ds_status.removed.push(path)
473 ds_status.removed.push(path)
474 }
474 }
475 }
475 }
476 hg::operations::DiffStatus::Added => {
476 hg::operations::DiffStatus::Added => {
477 if display_states.added {
477 if display_states.added {
478 ds_status.added.push(path)
478 ds_status.added.push(path)
479 }
479 }
480 }
480 }
481 hg::operations::DiffStatus::Modified => {
481 hg::operations::DiffStatus::Modified => {
482 if display_states.modified {
482 if display_states.modified {
483 ds_status.modified.push(path)
483 ds_status.modified.push(path)
484 }
484 }
485 }
485 }
486 hg::operations::DiffStatus::Matching => {
486 hg::operations::DiffStatus::Matching => {
487 if display_states.clean {
487 if display_states.clean {
488 ds_status.clean.push(path)
488 ds_status.clean.push(path)
489 }
489 }
490 }
490 }
491 }
491 }
492 }
492 }
493 output.output(display_states, ds_status)?;
493 output.output(display_states, ds_status)?;
494 return Ok(());
494 return Ok(());
495 }
495 }
496
496
497 let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
497 let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
498 let matcher = match (repo.has_narrow(), repo.has_sparse()) {
498 let matcher = match (repo.has_narrow(), repo.has_sparse()) {
499 (true, true) => {
499 (true, true) => {
500 Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
500 Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
501 }
501 }
502 (true, false) => narrow_matcher,
502 (true, false) => narrow_matcher,
503 (false, true) => sparse_matcher,
503 (false, true) => sparse_matcher,
504 (false, false) => Box::new(AlwaysMatcher),
504 (false, false) => Box::new(AlwaysMatcher),
505 };
505 };
506 let matcher = match args.get_many::<std::ffi::OsString>("file") {
506 let matcher = match args.get_many::<std::ffi::OsString>("file") {
507 None => matcher,
507 None => matcher,
508 Some(files) => {
508 Some(files) => {
509 let patterns: Vec<Vec<u8>> = files
509 let patterns: Vec<Vec<u8>> = files
510 .filter(|s| !s.is_empty())
510 .filter(|s| !s.is_empty())
511 .map(get_bytes_from_os_str)
511 .map(get_bytes_from_os_str)
512 .collect();
512 .collect();
513 for file in &patterns {
513 for file in &patterns {
514 if file.starts_with(b"set:") {
514 if file.starts_with(b"set:") {
515 return Err(CommandError::unsupported("fileset"));
515 return Err(CommandError::unsupported("fileset"));
516 }
516 }
517 }
517 }
518 let cwd = hg::utils::current_dir()?;
518 let cwd = hg::utils::current_dir()?;
519 let root = repo.working_directory_path();
519 let root = repo.working_directory_path();
520 let ignore_patterns = parse_pattern_args(patterns, &cwd, root)?;
520 let ignore_patterns = parse_pattern_args(patterns, &cwd, root)?;
521 let files_matcher =
521 let files_matcher =
522 hg::matchers::PatternMatcher::new(ignore_patterns)?;
522 hg::matchers::PatternMatcher::new(ignore_patterns)?;
523 Box::new(IntersectionMatcher::new(
523 Box::new(IntersectionMatcher::new(
524 Box::new(files_matcher),
524 Box::new(files_matcher),
525 matcher,
525 matcher,
526 ))
526 ))
527 }
527 }
528 };
528 };
529
529
530 print_narrow_sparse_warnings(
530 print_narrow_sparse_warnings(
531 &narrow_warnings,
531 &narrow_warnings,
532 &sparse_warnings,
532 &sparse_warnings,
533 ui,
533 ui,
534 repo,
534 repo,
535 )?;
535 )?;
536 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
536 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
537 dmap.with_status(
537 dmap.with_status(
538 matcher.as_ref(),
538 matcher.as_ref(),
539 repo.working_directory_path().to_owned(),
539 repo.working_directory_path().to_owned(),
540 ignore_files(repo, config),
540 ignore_files(repo, config),
541 options,
541 options,
542 after_status,
542 after_status,
543 )?;
543 )?;
544
544
545 // Development config option to test write races
545 // Development config option to test write races
546 if let Err(e) =
546 if let Err(e) =
547 debug_wait_for_file(config, "status.pre-dirstate-write-file")
547 debug_wait_for_file(config, "status.pre-dirstate-write-file")
548 {
548 {
549 ui.write_stderr(e.as_bytes()).ok();
549 ui.write_stderr(e.as_bytes()).ok();
550 }
550 }
551
551
552 if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
552 if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
553 && !dirstate_write_needed
553 && !dirstate_write_needed
554 {
554 {
555 // Nothing to update
555 // Nothing to update
556 return Ok(());
556 return Ok(());
557 }
557 }
558
558
559 // Update the dirstate on disk if we can
559 // Update the dirstate on disk if we can
560 let with_lock_result =
560 let with_lock_result =
561 repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
561 repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
562 if let Some(mtime_boundary) = filesystem_time_at_status_start {
562 if let Some(mtime_boundary) = filesystem_time_at_status_start {
563 for hg_path in fixup {
563 for hg_path in fixup {
564 use std::os::unix::fs::MetadataExt;
564 use std::os::unix::fs::MetadataExt;
565 let fs_path = hg_path_to_path_buf(&hg_path)
565 let fs_path = hg_path_to_path_buf(&hg_path)
566 .expect("HgPath conversion");
566 .expect("HgPath conversion");
567 // Specifically do not reuse `fs_metadata` from
567 // Specifically do not reuse `fs_metadata` from
568 // `unsure_is_clean` which was needed before reading
568 // `unsure_is_clean` which was needed before reading
569 // contents. Here we access metadata again after reading
569 // contents. Here we access metadata again after reading
570 // content, in case it changed in the meantime.
570 // content, in case it changed in the meantime.
571 let metadata_res = repo
571 let metadata_res = repo
572 .working_directory_vfs()
572 .working_directory_vfs()
573 .symlink_metadata(&fs_path);
573 .symlink_metadata(&fs_path);
574 let fs_metadata = match metadata_res {
574 let fs_metadata = match metadata_res {
575 Ok(meta) => meta,
575 Ok(meta) => meta,
576 Err(err) => match err {
576 Err(err) => match err {
577 HgError::IoError { .. } => {
577 HgError::IoError { .. } => {
578 // The file has probably been deleted. In any
578 // The file has probably been deleted. In any
579 // case, it was in the dirstate before, so
579 // case, it was in the dirstate before, so
580 // let's ignore the error.
580 // let's ignore the error.
581 continue;
581 continue;
582 }
582 }
583 _ => return Err(err.into()),
583 _ => return Err(err.into()),
584 },
584 },
585 };
585 };
586 if let Some(mtime) =
586 if let Some(mtime) =
587 TruncatedTimestamp::for_reliable_mtime_of(
587 TruncatedTimestamp::for_reliable_mtime_of(
588 &fs_metadata,
588 &fs_metadata,
589 &mtime_boundary,
589 &mtime_boundary,
590 )
590 )
591 .when_reading_file(&fs_path)?
591 .when_reading_file(&fs_path)?
592 {
592 {
593 let mode = fs_metadata.mode();
593 let mode = fs_metadata.mode();
594 let size = fs_metadata.len();
594 let size = fs_metadata.len();
595 dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
595 dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
596 dirstate_write_needed = true
596 dirstate_write_needed = true
597 }
597 }
598 }
598 }
599 }
599 }
600 drop(dmap); // Avoid "already mutably borrowed" RefCell panics
600 drop(dmap); // Avoid "already mutably borrowed" RefCell panics
601 if dirstate_write_needed {
601 if dirstate_write_needed {
602 repo.write_dirstate()?
602 repo.write_dirstate()?
603 }
603 }
604 Ok(())
604 Ok(())
605 });
605 });
606 match with_lock_result {
606 match with_lock_result {
607 Ok(closure_result) => closure_result?,
607 Ok(closure_result) => closure_result?,
608 Err(LockError::AlreadyHeld) => {
608 Err(LockError::AlreadyHeld) => {
609 // Not updating the dirstate is not ideal but not critical:
609 // Not updating the dirstate is not ideal but not critical:
610 // don’t keep our caller waiting until some other Mercurial
610 // don’t keep our caller waiting until some other Mercurial
611 // process releases the lock.
611 // process releases the lock.
612 log::info!("not writing dirstate from `status`: lock is held")
612 log::info!("not writing dirstate from `status`: lock is held")
613 }
613 }
614 Err(LockError::Other(HgError::IoError { error, .. }))
614 Err(LockError::Other(HgError::IoError { error, .. }))
615 if error.kind() == io::ErrorKind::PermissionDenied
615 if error.kind() == io::ErrorKind::PermissionDenied
616 || match error.raw_os_error() {
616 || match error.raw_os_error() {
617 None => false,
617 None => false,
618 Some(errno) => libc::EROFS == errno,
618 Some(errno) => libc::EROFS == errno,
619 } =>
619 } =>
620 {
620 {
621 // `hg status` on a read-only repository is fine
621 // `hg status` on a read-only repository is fine
622 }
622 }
623 Err(LockError::Other(error)) => {
623 Err(LockError::Other(error)) => {
624 // Report other I/O errors
624 // Report other I/O errors
625 Err(error)?
625 Err(error)?
626 }
626 }
627 }
627 }
628 Ok(())
628 Ok(())
629 }
629 }
630
630
631 fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> {
631 fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> {
632 let mut ignore_files = Vec::new();
632 let mut ignore_files = Vec::new();
633 let repo_ignore = repo.working_directory_vfs().join(".hgignore");
633 let repo_ignore = repo.working_directory_vfs().join(".hgignore");
634 if repo_ignore.exists() {
634 if repo_ignore.exists() {
635 ignore_files.push(repo_ignore)
635 ignore_files.push(repo_ignore)
636 }
636 }
637 for (key, value) in config.iter_section(b"ui") {
637 for (key, value) in config.iter_section(b"ui") {
638 if key == b"ignore" || key.starts_with(b"ignore.") {
638 if key == b"ignore" || key.starts_with(b"ignore.") {
639 let path = get_path_from_bytes(value);
639 let path = get_path_from_bytes(value);
640 let path = shellexpand::path::full_with_context_no_errors(
640 let path = shellexpand::path::full_with_context_no_errors(
641 path,
641 path,
642 home::home_dir,
642 home::home_dir,
643 |s| std::env::var(s).ok(),
643 |s| std::env::var(s).ok(),
644 );
644 );
645 let joined = repo.working_directory_path().join(path);
645 let joined = repo.working_directory_path().join(path);
646 ignore_files.push(joined);
646 ignore_files.push(joined);
647 }
647 }
648 }
648 }
649 ignore_files
649 ignore_files
650 }
650 }
651
651
652 struct DisplayStatusPaths<'a> {
652 struct DisplayStatusPaths<'a> {
653 ui: &'a Ui,
653 ui: &'a Ui,
654 no_status: bool,
654 no_status: bool,
655 relativize: Option<RelativizePaths>,
655 relativize: Option<RelativizePaths>,
656 print0: bool,
656 print0: bool,
657 }
657 }
658
658
659 impl DisplayStatusPaths<'_> {
659 impl DisplayStatusPaths<'_> {
660 // Probably more elegant to use a Deref or Borrow trait rather than
660 // Probably more elegant to use a Deref or Borrow trait rather than
661 // harcode HgPathBuf, but probably not really useful at this point
661 // harcode HgPathBuf, but probably not really useful at this point
662 fn display(
662 fn display(
663 &self,
663 &self,
664 status_prefix: &[u8],
664 status_prefix: &[u8],
665 label: &'static str,
665 label: &'static str,
666 mut paths: Vec<StatusPath<'_>>,
666 mut paths: Vec<StatusPath<'_>>,
667 ) -> Result<(), CommandError> {
667 ) -> Result<(), CommandError> {
668 paths.sort_unstable();
668 paths.sort_unstable();
669 // TODO: get the stdout lock once for the whole loop
669 // TODO: get the stdout lock once for the whole loop
670 // instead of in each write
670 // instead of in each write
671 for StatusPath { path, copy_source } in paths {
671 for StatusPath { path, copy_source } in paths {
672 let relative_path;
672 let relative_path;
673 let relative_source;
673 let relative_source;
674 let (path, copy_source) = if let Some(relativize) =
674 let (path, copy_source) = if let Some(relativize) =
675 &self.relativize
675 &self.relativize
676 {
676 {
677 relative_path = relativize.relativize(&path);
677 relative_path = relativize.relativize(&path);
678 relative_source =
678 relative_source =
679 copy_source.as_ref().map(|s| relativize.relativize(s));
679 copy_source.as_ref().map(|s| relativize.relativize(s));
680 (&*relative_path, relative_source.as_deref())
680 (&*relative_path, relative_source.as_deref())
681 } else {
681 } else {
682 (path.as_bytes(), copy_source.as_ref().map(|s| s.as_bytes()))
682 (path.as_bytes(), copy_source.as_ref().map(|s| s.as_bytes()))
683 };
683 };
684 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
684 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
685 // in order to stream to stdout instead of allocating an
685 // in order to stream to stdout instead of allocating an
686 // itermediate `Vec<u8>`.
686 // itermediate `Vec<u8>`.
687 if !self.no_status {
687 if !self.no_status {
688 self.ui.write_stdout_labelled(status_prefix, label)?
688 self.ui.write_stdout_labelled(status_prefix, label)?
689 }
689 }
690 let linebreak = if self.print0 { b"\x00" } else { b"\n" };
690 let linebreak = if self.print0 { b"\x00" } else { b"\n" };
691 self.ui.write_stdout_labelled(
691 self.ui.write_stdout_labelled(
692 &format_bytes!(b"{}{}", path, linebreak),
692 &format_bytes!(b"{}{}", path, linebreak),
693 label,
693 label,
694 )?;
694 )?;
695 if let Some(source) = copy_source.filter(|_| !self.no_status) {
695 if let Some(source) = copy_source.filter(|_| !self.no_status) {
696 let label = "status.copied";
696 let label = "status.copied";
697 self.ui.write_stdout_labelled(
697 self.ui.write_stdout_labelled(
698 &format_bytes!(b" {}{}", source, linebreak),
698 &format_bytes!(b" {}{}", source, linebreak),
699 label,
699 label,
700 )?
700 )?
701 }
701 }
702 }
702 }
703 Ok(())
703 Ok(())
704 }
704 }
705
705
706 fn output(
706 fn output(
707 &mut self,
707 &mut self,
708 display_states: DisplayStates,
708 display_states: DisplayStates,
709 ds_status: DirstateStatus,
709 ds_status: DirstateStatus,
710 ) -> Result<(), CommandError> {
710 ) -> Result<(), CommandError> {
711 if display_states.modified {
711 if display_states.modified {
712 self.display(b"M ", "status.modified", ds_status.modified)?;
712 self.display(b"M ", "status.modified", ds_status.modified)?;
713 }
713 }
714 if display_states.added {
714 if display_states.added {
715 self.display(b"A ", "status.added", ds_status.added)?;
715 self.display(b"A ", "status.added", ds_status.added)?;
716 }
716 }
717 if display_states.removed {
717 if display_states.removed {
718 self.display(b"R ", "status.removed", ds_status.removed)?;
718 self.display(b"R ", "status.removed", ds_status.removed)?;
719 }
719 }
720 if display_states.deleted {
720 if display_states.deleted {
721 self.display(b"! ", "status.deleted", ds_status.deleted)?;
721 self.display(b"! ", "status.deleted", ds_status.deleted)?;
722 }
722 }
723 if display_states.unknown {
723 if display_states.unknown {
724 self.display(b"? ", "status.unknown", ds_status.unknown)?;
724 self.display(b"? ", "status.unknown", ds_status.unknown)?;
725 }
725 }
726 if display_states.ignored {
726 if display_states.ignored {
727 self.display(b"I ", "status.ignored", ds_status.ignored)?;
727 self.display(b"I ", "status.ignored", ds_status.ignored)?;
728 }
728 }
729 if display_states.clean {
729 if display_states.clean {
730 self.display(b"C ", "status.clean", ds_status.clean)?;
730 self.display(b"C ", "status.clean", ds_status.clean)?;
731 }
731 }
732 Ok(())
732 Ok(())
733 }
733 }
734 }
734 }
735
735
736 /// Outcome of the additional check for an ambiguous tracked file
736 /// Outcome of the additional check for an ambiguous tracked file
737 enum UnsureOutcome {
737 enum UnsureOutcome {
738 /// The file is actually clean
738 /// The file is actually clean
739 Clean,
739 Clean,
740 /// The file has been modified
740 /// The file has been modified
741 Modified,
741 Modified,
742 /// The file was deleted on disk (or became another type of fs entry)
742 /// The file was deleted on disk (or became another type of fs entry)
743 Deleted,
743 Deleted,
744 }
744 }
745
745
746 /// Check if a file is modified by comparing actual repo store and file system.
746 /// Check if a file is modified by comparing actual repo store and file system.
747 ///
747 ///
748 /// This meant to be used for those that the dirstate cannot resolve, due
748 /// This meant to be used for those that the dirstate cannot resolve, due
749 /// to time resolution limits.
749 /// to time resolution limits.
750 fn unsure_is_modified(
750 fn unsure_is_modified(
751 working_directory_vfs: hg::vfs::Vfs,
751 working_directory_vfs: &hg::vfs::VfsImpl,
752 store_vfs: hg::vfs::Vfs,
752 store_vfs: &hg::vfs::VfsImpl,
753 check_exec: bool,
753 check_exec: bool,
754 manifest: &Manifest,
754 manifest: &Manifest,
755 hg_path: &HgPath,
755 hg_path: &HgPath,
756 revlog_open_options: RevlogOpenOptions,
756 revlog_open_options: RevlogOpenOptions,
757 ) -> Result<UnsureOutcome, HgError> {
757 ) -> Result<UnsureOutcome, HgError> {
758 let vfs = working_directory_vfs;
758 let vfs = working_directory_vfs;
759 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
759 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
760 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
760 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
761 let is_symlink = fs_metadata.file_type().is_symlink();
761 let is_symlink = fs_metadata.file_type().is_symlink();
762
762
763 let entry = manifest
763 let entry = manifest
764 .find_by_path(hg_path)?
764 .find_by_path(hg_path)?
765 .expect("ambgious file not in p1");
765 .expect("ambgious file not in p1");
766
766
767 // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
767 // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
768 // dirstate
768 // dirstate
769 let fs_flags = if is_symlink {
769 let fs_flags = if is_symlink {
770 Some(b'l')
770 Some(b'l')
771 } else if check_exec && has_exec_bit(&fs_metadata) {
771 } else if check_exec && has_exec_bit(&fs_metadata) {
772 Some(b'x')
772 Some(b'x')
773 } else {
773 } else {
774 None
774 None
775 };
775 };
776
776
777 let entry_flags = if check_exec {
777 let entry_flags = if check_exec {
778 entry.flags
778 entry.flags
779 } else if entry.flags == Some(b'x') {
779 } else if entry.flags == Some(b'x') {
780 None
780 None
781 } else {
781 } else {
782 entry.flags
782 entry.flags
783 };
783 };
784
784
785 if entry_flags != fs_flags {
785 if entry_flags != fs_flags {
786 return Ok(UnsureOutcome::Modified);
786 return Ok(UnsureOutcome::Modified);
787 }
787 }
788 let filelog = hg::filelog::Filelog::open_vfs(
788 let filelog = hg::filelog::Filelog::open_vfs(
789 &store_vfs,
789 store_vfs,
790 hg_path,
790 hg_path,
791 revlog_open_options,
791 revlog_open_options,
792 )?;
792 )?;
793 let fs_len = fs_metadata.len();
793 let fs_len = fs_metadata.len();
794 let file_node = entry.node_id()?;
794 let file_node = entry.node_id()?;
795 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
795 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
796 HgError::corrupted(format!(
796 HgError::corrupted(format!(
797 "filelog {:?} missing node {:?} from manifest",
797 "filelog {:?} missing node {:?} from manifest",
798 hg_path, file_node
798 hg_path, file_node
799 ))
799 ))
800 })?;
800 })?;
801 if filelog_entry.file_data_len_not_equal_to(fs_len) {
801 if filelog_entry.file_data_len_not_equal_to(fs_len) {
802 // No need to read file contents:
802 // No need to read file contents:
803 // it cannot be equal if it has a different length.
803 // it cannot be equal if it has a different length.
804 return Ok(UnsureOutcome::Modified);
804 return Ok(UnsureOutcome::Modified);
805 }
805 }
806
806
807 let p1_filelog_data = filelog_entry.data()?;
807 let p1_filelog_data = filelog_entry.data()?;
808 let p1_contents = p1_filelog_data.file_data()?;
808 let p1_contents = p1_filelog_data.file_data()?;
809 if p1_contents.len() as u64 != fs_len {
809 if p1_contents.len() as u64 != fs_len {
810 // No need to read file contents:
810 // No need to read file contents:
811 // it cannot be equal if it has a different length.
811 // it cannot be equal if it has a different length.
812 return Ok(UnsureOutcome::Modified);
812 return Ok(UnsureOutcome::Modified);
813 }
813 }
814
814
815 let fs_contents = if is_symlink {
815 let fs_contents = if is_symlink {
816 get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
816 get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
817 } else {
817 } else {
818 vfs.read(fs_path)?
818 vfs.read(fs_path)?
819 };
819 };
820
820
821 Ok(if p1_contents != &*fs_contents {
821 Ok(if p1_contents != &*fs_contents {
822 UnsureOutcome::Modified
822 UnsureOutcome::Modified
823 } else {
823 } else {
824 UnsureOutcome::Clean
824 UnsureOutcome::Clean
825 })
825 })
826 }
826 }
General Comments 0
You need to be logged in to leave comments. Login now