##// END OF EJS Templates
dirstate-v2: Drop parent directory cache when removing a dirstate node...
Simon Sapin -
r48141:9d58e54b default
parent child Browse files
Show More
@@ -1,1095 +1,1113 b''
1 use bytes_cast::BytesCast;
1 use bytes_cast::BytesCast;
2 use micro_timer::timed;
2 use micro_timer::timed;
3 use std::borrow::Cow;
3 use std::borrow::Cow;
4 use std::convert::TryInto;
4 use std::convert::TryInto;
5 use std::path::PathBuf;
5 use std::path::PathBuf;
6
6
7 use super::on_disk;
7 use super::on_disk;
8 use super::on_disk::DirstateV2ParseError;
8 use super::on_disk::DirstateV2ParseError;
9 use super::path_with_basename::WithBasename;
9 use super::path_with_basename::WithBasename;
10 use crate::dirstate::parsers::pack_entry;
10 use crate::dirstate::parsers::pack_entry;
11 use crate::dirstate::parsers::packed_entry_size;
11 use crate::dirstate::parsers::packed_entry_size;
12 use crate::dirstate::parsers::parse_dirstate_entries;
12 use crate::dirstate::parsers::parse_dirstate_entries;
13 use crate::dirstate::parsers::Timestamp;
13 use crate::dirstate::parsers::Timestamp;
14 use crate::matchers::Matcher;
14 use crate::matchers::Matcher;
15 use crate::utils::hg_path::{HgPath, HgPathBuf};
15 use crate::utils::hg_path::{HgPath, HgPathBuf};
16 use crate::CopyMapIter;
16 use crate::CopyMapIter;
17 use crate::DirstateEntry;
17 use crate::DirstateEntry;
18 use crate::DirstateError;
18 use crate::DirstateError;
19 use crate::DirstateParents;
19 use crate::DirstateParents;
20 use crate::DirstateStatus;
20 use crate::DirstateStatus;
21 use crate::EntryState;
21 use crate::EntryState;
22 use crate::FastHashMap;
22 use crate::FastHashMap;
23 use crate::PatternFileWarning;
23 use crate::PatternFileWarning;
24 use crate::StateMapIter;
24 use crate::StateMapIter;
25 use crate::StatusError;
25 use crate::StatusError;
26 use crate::StatusOptions;
26 use crate::StatusOptions;
27
27
28 pub struct DirstateMap<'on_disk> {
28 pub struct DirstateMap<'on_disk> {
29 /// Contents of the `.hg/dirstate` file
29 /// Contents of the `.hg/dirstate` file
30 pub(super) on_disk: &'on_disk [u8],
30 pub(super) on_disk: &'on_disk [u8],
31
31
32 pub(super) root: ChildNodes<'on_disk>,
32 pub(super) root: ChildNodes<'on_disk>,
33
33
34 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
34 /// Number of nodes anywhere in the tree that have `.entry.is_some()`.
35 pub(super) nodes_with_entry_count: u32,
35 pub(super) nodes_with_entry_count: u32,
36
36
37 /// Number of nodes anywhere in the tree that have
37 /// Number of nodes anywhere in the tree that have
38 /// `.copy_source.is_some()`.
38 /// `.copy_source.is_some()`.
39 pub(super) nodes_with_copy_source_count: u32,
39 pub(super) nodes_with_copy_source_count: u32,
40 }
40 }
41
41
42 /// Using a plain `HgPathBuf` of the full path from the repository root as a
42 /// Using a plain `HgPathBuf` of the full path from the repository root as a
43 /// map key would also work: all paths in a given map have the same parent
43 /// map key would also work: all paths in a given map have the same parent
44 /// path, so comparing full paths gives the same result as comparing base
44 /// path, so comparing full paths gives the same result as comparing base
45 /// names. However `HashMap` would waste time always re-hashing the same
45 /// names. However `HashMap` would waste time always re-hashing the same
46 /// string prefix.
46 /// string prefix.
47 pub(super) type NodeKey<'on_disk> = WithBasename<Cow<'on_disk, HgPath>>;
47 pub(super) type NodeKey<'on_disk> = WithBasename<Cow<'on_disk, HgPath>>;
48
48
49 /// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
49 /// Similar to `&'tree Cow<'on_disk, HgPath>`, but can also be returned
50 /// for on-disk nodes that don’t actually have a `Cow` to borrow.
50 /// for on-disk nodes that don’t actually have a `Cow` to borrow.
51 pub(super) enum BorrowedPath<'tree, 'on_disk> {
51 pub(super) enum BorrowedPath<'tree, 'on_disk> {
52 InMemory(&'tree HgPathBuf),
52 InMemory(&'tree HgPathBuf),
53 OnDisk(&'on_disk HgPath),
53 OnDisk(&'on_disk HgPath),
54 }
54 }
55
55
56 pub(super) enum ChildNodes<'on_disk> {
56 pub(super) enum ChildNodes<'on_disk> {
57 InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
57 InMemory(FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
58 OnDisk(&'on_disk [on_disk::Node]),
58 OnDisk(&'on_disk [on_disk::Node]),
59 }
59 }
60
60
61 pub(super) enum ChildNodesRef<'tree, 'on_disk> {
61 pub(super) enum ChildNodesRef<'tree, 'on_disk> {
62 InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
62 InMemory(&'tree FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>),
63 OnDisk(&'on_disk [on_disk::Node]),
63 OnDisk(&'on_disk [on_disk::Node]),
64 }
64 }
65
65
66 pub(super) enum NodeRef<'tree, 'on_disk> {
66 pub(super) enum NodeRef<'tree, 'on_disk> {
67 InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
67 InMemory(&'tree NodeKey<'on_disk>, &'tree Node<'on_disk>),
68 OnDisk(&'on_disk on_disk::Node),
68 OnDisk(&'on_disk on_disk::Node),
69 }
69 }
70
70
71 impl<'tree, 'on_disk> BorrowedPath<'tree, 'on_disk> {
71 impl<'tree, 'on_disk> BorrowedPath<'tree, 'on_disk> {
72 pub fn detach_from_tree(&self) -> Cow<'on_disk, HgPath> {
72 pub fn detach_from_tree(&self) -> Cow<'on_disk, HgPath> {
73 match *self {
73 match *self {
74 BorrowedPath::InMemory(in_memory) => Cow::Owned(in_memory.clone()),
74 BorrowedPath::InMemory(in_memory) => Cow::Owned(in_memory.clone()),
75 BorrowedPath::OnDisk(on_disk) => Cow::Borrowed(on_disk),
75 BorrowedPath::OnDisk(on_disk) => Cow::Borrowed(on_disk),
76 }
76 }
77 }
77 }
78 }
78 }
79
79
80 impl<'tree, 'on_disk> std::ops::Deref for BorrowedPath<'tree, 'on_disk> {
80 impl<'tree, 'on_disk> std::ops::Deref for BorrowedPath<'tree, 'on_disk> {
81 type Target = HgPath;
81 type Target = HgPath;
82
82
83 fn deref(&self) -> &HgPath {
83 fn deref(&self) -> &HgPath {
84 match *self {
84 match *self {
85 BorrowedPath::InMemory(in_memory) => in_memory,
85 BorrowedPath::InMemory(in_memory) => in_memory,
86 BorrowedPath::OnDisk(on_disk) => on_disk,
86 BorrowedPath::OnDisk(on_disk) => on_disk,
87 }
87 }
88 }
88 }
89 }
89 }
90
90
91 impl Default for ChildNodes<'_> {
91 impl Default for ChildNodes<'_> {
92 fn default() -> Self {
92 fn default() -> Self {
93 ChildNodes::InMemory(Default::default())
93 ChildNodes::InMemory(Default::default())
94 }
94 }
95 }
95 }
96
96
97 impl<'on_disk> ChildNodes<'on_disk> {
97 impl<'on_disk> ChildNodes<'on_disk> {
98 pub(super) fn as_ref<'tree>(
98 pub(super) fn as_ref<'tree>(
99 &'tree self,
99 &'tree self,
100 ) -> ChildNodesRef<'tree, 'on_disk> {
100 ) -> ChildNodesRef<'tree, 'on_disk> {
101 match self {
101 match self {
102 ChildNodes::InMemory(nodes) => ChildNodesRef::InMemory(nodes),
102 ChildNodes::InMemory(nodes) => ChildNodesRef::InMemory(nodes),
103 ChildNodes::OnDisk(nodes) => ChildNodesRef::OnDisk(nodes),
103 ChildNodes::OnDisk(nodes) => ChildNodesRef::OnDisk(nodes),
104 }
104 }
105 }
105 }
106
106
107 pub(super) fn is_empty(&self) -> bool {
107 pub(super) fn is_empty(&self) -> bool {
108 match self {
108 match self {
109 ChildNodes::InMemory(nodes) => nodes.is_empty(),
109 ChildNodes::InMemory(nodes) => nodes.is_empty(),
110 ChildNodes::OnDisk(nodes) => nodes.is_empty(),
110 ChildNodes::OnDisk(nodes) => nodes.is_empty(),
111 }
111 }
112 }
112 }
113
113
114 pub(super) fn make_mut(
114 pub(super) fn make_mut(
115 &mut self,
115 &mut self,
116 on_disk: &'on_disk [u8],
116 on_disk: &'on_disk [u8],
117 ) -> Result<
117 ) -> Result<
118 &mut FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>,
118 &mut FastHashMap<NodeKey<'on_disk>, Node<'on_disk>>,
119 DirstateV2ParseError,
119 DirstateV2ParseError,
120 > {
120 > {
121 match self {
121 match self {
122 ChildNodes::InMemory(nodes) => Ok(nodes),
122 ChildNodes::InMemory(nodes) => Ok(nodes),
123 ChildNodes::OnDisk(nodes) => {
123 ChildNodes::OnDisk(nodes) => {
124 let nodes = nodes
124 let nodes = nodes
125 .iter()
125 .iter()
126 .map(|node| {
126 .map(|node| {
127 Ok((
127 Ok((
128 node.path(on_disk)?,
128 node.path(on_disk)?,
129 node.to_in_memory_node(on_disk)?,
129 node.to_in_memory_node(on_disk)?,
130 ))
130 ))
131 })
131 })
132 .collect::<Result<_, _>>()?;
132 .collect::<Result<_, _>>()?;
133 *self = ChildNodes::InMemory(nodes);
133 *self = ChildNodes::InMemory(nodes);
134 match self {
134 match self {
135 ChildNodes::InMemory(nodes) => Ok(nodes),
135 ChildNodes::InMemory(nodes) => Ok(nodes),
136 ChildNodes::OnDisk(_) => unreachable!(),
136 ChildNodes::OnDisk(_) => unreachable!(),
137 }
137 }
138 }
138 }
139 }
139 }
140 }
140 }
141 }
141 }
142
142
143 impl<'tree, 'on_disk> ChildNodesRef<'tree, 'on_disk> {
143 impl<'tree, 'on_disk> ChildNodesRef<'tree, 'on_disk> {
144 pub(super) fn get(
144 pub(super) fn get(
145 &self,
145 &self,
146 base_name: &HgPath,
146 base_name: &HgPath,
147 on_disk: &'on_disk [u8],
147 on_disk: &'on_disk [u8],
148 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
148 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
149 match self {
149 match self {
150 ChildNodesRef::InMemory(nodes) => Ok(nodes
150 ChildNodesRef::InMemory(nodes) => Ok(nodes
151 .get_key_value(base_name)
151 .get_key_value(base_name)
152 .map(|(k, v)| NodeRef::InMemory(k, v))),
152 .map(|(k, v)| NodeRef::InMemory(k, v))),
153 ChildNodesRef::OnDisk(nodes) => {
153 ChildNodesRef::OnDisk(nodes) => {
154 let mut parse_result = Ok(());
154 let mut parse_result = Ok(());
155 let search_result = nodes.binary_search_by(|node| {
155 let search_result = nodes.binary_search_by(|node| {
156 match node.base_name(on_disk) {
156 match node.base_name(on_disk) {
157 Ok(node_base_name) => node_base_name.cmp(base_name),
157 Ok(node_base_name) => node_base_name.cmp(base_name),
158 Err(e) => {
158 Err(e) => {
159 parse_result = Err(e);
159 parse_result = Err(e);
160 // Dummy comparison result, `search_result` won’t
160 // Dummy comparison result, `search_result` won’t
161 // be used since `parse_result` is an error
161 // be used since `parse_result` is an error
162 std::cmp::Ordering::Equal
162 std::cmp::Ordering::Equal
163 }
163 }
164 }
164 }
165 });
165 });
166 parse_result.map(|()| {
166 parse_result.map(|()| {
167 search_result.ok().map(|i| NodeRef::OnDisk(&nodes[i]))
167 search_result.ok().map(|i| NodeRef::OnDisk(&nodes[i]))
168 })
168 })
169 }
169 }
170 }
170 }
171 }
171 }
172
172
173 /// Iterate in undefined order
173 /// Iterate in undefined order
174 pub(super) fn iter(
174 pub(super) fn iter(
175 &self,
175 &self,
176 ) -> impl Iterator<Item = NodeRef<'tree, 'on_disk>> {
176 ) -> impl Iterator<Item = NodeRef<'tree, 'on_disk>> {
177 match self {
177 match self {
178 ChildNodesRef::InMemory(nodes) => itertools::Either::Left(
178 ChildNodesRef::InMemory(nodes) => itertools::Either::Left(
179 nodes.iter().map(|(k, v)| NodeRef::InMemory(k, v)),
179 nodes.iter().map(|(k, v)| NodeRef::InMemory(k, v)),
180 ),
180 ),
181 ChildNodesRef::OnDisk(nodes) => {
181 ChildNodesRef::OnDisk(nodes) => {
182 itertools::Either::Right(nodes.iter().map(NodeRef::OnDisk))
182 itertools::Either::Right(nodes.iter().map(NodeRef::OnDisk))
183 }
183 }
184 }
184 }
185 }
185 }
186
186
187 /// Iterate in parallel in undefined order
187 /// Iterate in parallel in undefined order
188 pub(super) fn par_iter(
188 pub(super) fn par_iter(
189 &self,
189 &self,
190 ) -> impl rayon::iter::ParallelIterator<Item = NodeRef<'tree, 'on_disk>>
190 ) -> impl rayon::iter::ParallelIterator<Item = NodeRef<'tree, 'on_disk>>
191 {
191 {
192 use rayon::prelude::*;
192 use rayon::prelude::*;
193 match self {
193 match self {
194 ChildNodesRef::InMemory(nodes) => rayon::iter::Either::Left(
194 ChildNodesRef::InMemory(nodes) => rayon::iter::Either::Left(
195 nodes.par_iter().map(|(k, v)| NodeRef::InMemory(k, v)),
195 nodes.par_iter().map(|(k, v)| NodeRef::InMemory(k, v)),
196 ),
196 ),
197 ChildNodesRef::OnDisk(nodes) => rayon::iter::Either::Right(
197 ChildNodesRef::OnDisk(nodes) => rayon::iter::Either::Right(
198 nodes.par_iter().map(NodeRef::OnDisk),
198 nodes.par_iter().map(NodeRef::OnDisk),
199 ),
199 ),
200 }
200 }
201 }
201 }
202
202
203 pub(super) fn sorted(&self) -> Vec<NodeRef<'tree, 'on_disk>> {
203 pub(super) fn sorted(&self) -> Vec<NodeRef<'tree, 'on_disk>> {
204 match self {
204 match self {
205 ChildNodesRef::InMemory(nodes) => {
205 ChildNodesRef::InMemory(nodes) => {
206 let mut vec: Vec<_> = nodes
206 let mut vec: Vec<_> = nodes
207 .iter()
207 .iter()
208 .map(|(k, v)| NodeRef::InMemory(k, v))
208 .map(|(k, v)| NodeRef::InMemory(k, v))
209 .collect();
209 .collect();
210 fn sort_key<'a>(node: &'a NodeRef) -> &'a HgPath {
210 fn sort_key<'a>(node: &'a NodeRef) -> &'a HgPath {
211 match node {
211 match node {
212 NodeRef::InMemory(path, _node) => path.base_name(),
212 NodeRef::InMemory(path, _node) => path.base_name(),
213 NodeRef::OnDisk(_) => unreachable!(),
213 NodeRef::OnDisk(_) => unreachable!(),
214 }
214 }
215 }
215 }
216 // `sort_unstable_by_key` doesn’t allow keys borrowing from the
216 // `sort_unstable_by_key` doesn’t allow keys borrowing from the
217 // value: https://github.com/rust-lang/rust/issues/34162
217 // value: https://github.com/rust-lang/rust/issues/34162
218 vec.sort_unstable_by(|a, b| sort_key(a).cmp(sort_key(b)));
218 vec.sort_unstable_by(|a, b| sort_key(a).cmp(sort_key(b)));
219 vec
219 vec
220 }
220 }
221 ChildNodesRef::OnDisk(nodes) => {
221 ChildNodesRef::OnDisk(nodes) => {
222 // Nodes on disk are already sorted
222 // Nodes on disk are already sorted
223 nodes.iter().map(NodeRef::OnDisk).collect()
223 nodes.iter().map(NodeRef::OnDisk).collect()
224 }
224 }
225 }
225 }
226 }
226 }
227 }
227 }
228
228
229 impl<'tree, 'on_disk> NodeRef<'tree, 'on_disk> {
229 impl<'tree, 'on_disk> NodeRef<'tree, 'on_disk> {
230 pub(super) fn full_path(
230 pub(super) fn full_path(
231 &self,
231 &self,
232 on_disk: &'on_disk [u8],
232 on_disk: &'on_disk [u8],
233 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
233 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
234 match self {
234 match self {
235 NodeRef::InMemory(path, _node) => Ok(path.full_path()),
235 NodeRef::InMemory(path, _node) => Ok(path.full_path()),
236 NodeRef::OnDisk(node) => node.full_path(on_disk),
236 NodeRef::OnDisk(node) => node.full_path(on_disk),
237 }
237 }
238 }
238 }
239
239
240 /// Returns a `BorrowedPath`, which can be turned into a `Cow<'on_disk,
240 /// Returns a `BorrowedPath`, which can be turned into a `Cow<'on_disk,
241 /// HgPath>` detached from `'tree`
241 /// HgPath>` detached from `'tree`
242 pub(super) fn full_path_borrowed(
242 pub(super) fn full_path_borrowed(
243 &self,
243 &self,
244 on_disk: &'on_disk [u8],
244 on_disk: &'on_disk [u8],
245 ) -> Result<BorrowedPath<'tree, 'on_disk>, DirstateV2ParseError> {
245 ) -> Result<BorrowedPath<'tree, 'on_disk>, DirstateV2ParseError> {
246 match self {
246 match self {
247 NodeRef::InMemory(path, _node) => match path.full_path() {
247 NodeRef::InMemory(path, _node) => match path.full_path() {
248 Cow::Borrowed(on_disk) => Ok(BorrowedPath::OnDisk(on_disk)),
248 Cow::Borrowed(on_disk) => Ok(BorrowedPath::OnDisk(on_disk)),
249 Cow::Owned(in_memory) => Ok(BorrowedPath::InMemory(in_memory)),
249 Cow::Owned(in_memory) => Ok(BorrowedPath::InMemory(in_memory)),
250 },
250 },
251 NodeRef::OnDisk(node) => {
251 NodeRef::OnDisk(node) => {
252 Ok(BorrowedPath::OnDisk(node.full_path(on_disk)?))
252 Ok(BorrowedPath::OnDisk(node.full_path(on_disk)?))
253 }
253 }
254 }
254 }
255 }
255 }
256
256
257 pub(super) fn base_name(
257 pub(super) fn base_name(
258 &self,
258 &self,
259 on_disk: &'on_disk [u8],
259 on_disk: &'on_disk [u8],
260 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
260 ) -> Result<&'tree HgPath, DirstateV2ParseError> {
261 match self {
261 match self {
262 NodeRef::InMemory(path, _node) => Ok(path.base_name()),
262 NodeRef::InMemory(path, _node) => Ok(path.base_name()),
263 NodeRef::OnDisk(node) => node.base_name(on_disk),
263 NodeRef::OnDisk(node) => node.base_name(on_disk),
264 }
264 }
265 }
265 }
266
266
267 pub(super) fn children(
267 pub(super) fn children(
268 &self,
268 &self,
269 on_disk: &'on_disk [u8],
269 on_disk: &'on_disk [u8],
270 ) -> Result<ChildNodesRef<'tree, 'on_disk>, DirstateV2ParseError> {
270 ) -> Result<ChildNodesRef<'tree, 'on_disk>, DirstateV2ParseError> {
271 match self {
271 match self {
272 NodeRef::InMemory(_path, node) => Ok(node.children.as_ref()),
272 NodeRef::InMemory(_path, node) => Ok(node.children.as_ref()),
273 NodeRef::OnDisk(node) => {
273 NodeRef::OnDisk(node) => {
274 Ok(ChildNodesRef::OnDisk(node.children(on_disk)?))
274 Ok(ChildNodesRef::OnDisk(node.children(on_disk)?))
275 }
275 }
276 }
276 }
277 }
277 }
278
278
279 pub(super) fn has_copy_source(&self) -> bool {
279 pub(super) fn has_copy_source(&self) -> bool {
280 match self {
280 match self {
281 NodeRef::InMemory(_path, node) => node.copy_source.is_some(),
281 NodeRef::InMemory(_path, node) => node.copy_source.is_some(),
282 NodeRef::OnDisk(node) => node.has_copy_source(),
282 NodeRef::OnDisk(node) => node.has_copy_source(),
283 }
283 }
284 }
284 }
285
285
286 pub(super) fn copy_source(
286 pub(super) fn copy_source(
287 &self,
287 &self,
288 on_disk: &'on_disk [u8],
288 on_disk: &'on_disk [u8],
289 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
289 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
290 match self {
290 match self {
291 NodeRef::InMemory(_path, node) => {
291 NodeRef::InMemory(_path, node) => {
292 Ok(node.copy_source.as_ref().map(|s| &**s))
292 Ok(node.copy_source.as_ref().map(|s| &**s))
293 }
293 }
294 NodeRef::OnDisk(node) => node.copy_source(on_disk),
294 NodeRef::OnDisk(node) => node.copy_source(on_disk),
295 }
295 }
296 }
296 }
297
297
298 pub(super) fn entry(
298 pub(super) fn entry(
299 &self,
299 &self,
300 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
300 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
301 match self {
301 match self {
302 NodeRef::InMemory(_path, node) => {
302 NodeRef::InMemory(_path, node) => {
303 Ok(node.data.as_entry().copied())
303 Ok(node.data.as_entry().copied())
304 }
304 }
305 NodeRef::OnDisk(node) => node.entry(),
305 NodeRef::OnDisk(node) => node.entry(),
306 }
306 }
307 }
307 }
308
308
309 pub(super) fn state(
309 pub(super) fn state(
310 &self,
310 &self,
311 ) -> Result<Option<EntryState>, DirstateV2ParseError> {
311 ) -> Result<Option<EntryState>, DirstateV2ParseError> {
312 match self {
312 match self {
313 NodeRef::InMemory(_path, node) => {
313 NodeRef::InMemory(_path, node) => {
314 Ok(node.data.as_entry().map(|entry| entry.state))
314 Ok(node.data.as_entry().map(|entry| entry.state))
315 }
315 }
316 NodeRef::OnDisk(node) => node.state(),
316 NodeRef::OnDisk(node) => node.state(),
317 }
317 }
318 }
318 }
319
319
320 pub(super) fn cached_directory_mtime(
320 pub(super) fn cached_directory_mtime(
321 &self,
321 &self,
322 ) -> Option<&'tree on_disk::Timestamp> {
322 ) -> Option<&'tree on_disk::Timestamp> {
323 match self {
323 match self {
324 NodeRef::InMemory(_path, node) => match &node.data {
324 NodeRef::InMemory(_path, node) => match &node.data {
325 NodeData::CachedDirectory { mtime } => Some(mtime),
325 NodeData::CachedDirectory { mtime } => Some(mtime),
326 _ => None,
326 _ => None,
327 },
327 },
328 NodeRef::OnDisk(node) => node.cached_directory_mtime(),
328 NodeRef::OnDisk(node) => node.cached_directory_mtime(),
329 }
329 }
330 }
330 }
331
331
332 pub(super) fn tracked_descendants_count(&self) -> u32 {
332 pub(super) fn tracked_descendants_count(&self) -> u32 {
333 match self {
333 match self {
334 NodeRef::InMemory(_path, node) => node.tracked_descendants_count,
334 NodeRef::InMemory(_path, node) => node.tracked_descendants_count,
335 NodeRef::OnDisk(node) => node.tracked_descendants_count.get(),
335 NodeRef::OnDisk(node) => node.tracked_descendants_count.get(),
336 }
336 }
337 }
337 }
338 }
338 }
339
339
340 /// Represents a file or a directory
340 /// Represents a file or a directory
341 #[derive(Default)]
341 #[derive(Default)]
342 pub(super) struct Node<'on_disk> {
342 pub(super) struct Node<'on_disk> {
343 pub(super) data: NodeData,
343 pub(super) data: NodeData,
344
344
345 pub(super) copy_source: Option<Cow<'on_disk, HgPath>>,
345 pub(super) copy_source: Option<Cow<'on_disk, HgPath>>,
346
346
347 pub(super) children: ChildNodes<'on_disk>,
347 pub(super) children: ChildNodes<'on_disk>,
348
348
349 /// How many (non-inclusive) descendants of this node are tracked files
349 /// How many (non-inclusive) descendants of this node are tracked files
350 pub(super) tracked_descendants_count: u32,
350 pub(super) tracked_descendants_count: u32,
351 }
351 }
352
352
353 pub(super) enum NodeData {
353 pub(super) enum NodeData {
354 Entry(DirstateEntry),
354 Entry(DirstateEntry),
355 CachedDirectory { mtime: on_disk::Timestamp },
355 CachedDirectory { mtime: on_disk::Timestamp },
356 None,
356 None,
357 }
357 }
358
358
359 impl Default for NodeData {
359 impl Default for NodeData {
360 fn default() -> Self {
360 fn default() -> Self {
361 NodeData::None
361 NodeData::None
362 }
362 }
363 }
363 }
364
364
365 impl NodeData {
365 impl NodeData {
366 fn has_entry(&self) -> bool {
366 fn has_entry(&self) -> bool {
367 match self {
367 match self {
368 NodeData::Entry(_) => true,
368 NodeData::Entry(_) => true,
369 _ => false,
369 _ => false,
370 }
370 }
371 }
371 }
372
372
373 fn as_entry(&self) -> Option<&DirstateEntry> {
373 fn as_entry(&self) -> Option<&DirstateEntry> {
374 match self {
374 match self {
375 NodeData::Entry(entry) => Some(entry),
375 NodeData::Entry(entry) => Some(entry),
376 _ => None,
376 _ => None,
377 }
377 }
378 }
378 }
379 }
379 }
380
380
381 impl<'on_disk> DirstateMap<'on_disk> {
381 impl<'on_disk> DirstateMap<'on_disk> {
382 pub(super) fn empty(on_disk: &'on_disk [u8]) -> Self {
382 pub(super) fn empty(on_disk: &'on_disk [u8]) -> Self {
383 Self {
383 Self {
384 on_disk,
384 on_disk,
385 root: ChildNodes::default(),
385 root: ChildNodes::default(),
386 nodes_with_entry_count: 0,
386 nodes_with_entry_count: 0,
387 nodes_with_copy_source_count: 0,
387 nodes_with_copy_source_count: 0,
388 }
388 }
389 }
389 }
390
390
391 #[timed]
391 #[timed]
392 pub fn new_v2(
392 pub fn new_v2(
393 on_disk: &'on_disk [u8],
393 on_disk: &'on_disk [u8],
394 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
394 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
395 Ok(on_disk::read(on_disk)?)
395 Ok(on_disk::read(on_disk)?)
396 }
396 }
397
397
398 #[timed]
398 #[timed]
399 pub fn new_v1(
399 pub fn new_v1(
400 on_disk: &'on_disk [u8],
400 on_disk: &'on_disk [u8],
401 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
401 ) -> Result<(Self, Option<DirstateParents>), DirstateError> {
402 let mut map = Self::empty(on_disk);
402 let mut map = Self::empty(on_disk);
403 if map.on_disk.is_empty() {
403 if map.on_disk.is_empty() {
404 return Ok((map, None));
404 return Ok((map, None));
405 }
405 }
406
406
407 let parents = parse_dirstate_entries(
407 let parents = parse_dirstate_entries(
408 map.on_disk,
408 map.on_disk,
409 |path, entry, copy_source| {
409 |path, entry, copy_source| {
410 let tracked = entry.state.is_tracked();
410 let tracked = entry.state.is_tracked();
411 let node = Self::get_or_insert_node(
411 let node = Self::get_or_insert_node(
412 map.on_disk,
412 map.on_disk,
413 &mut map.root,
413 &mut map.root,
414 path,
414 path,
415 WithBasename::to_cow_borrowed,
415 WithBasename::to_cow_borrowed,
416 |ancestor| {
416 |ancestor| {
417 if tracked {
417 if tracked {
418 ancestor.tracked_descendants_count += 1
418 ancestor.tracked_descendants_count += 1
419 }
419 }
420 },
420 },
421 )?;
421 )?;
422 assert!(
422 assert!(
423 !node.data.has_entry(),
423 !node.data.has_entry(),
424 "duplicate dirstate entry in read"
424 "duplicate dirstate entry in read"
425 );
425 );
426 assert!(
426 assert!(
427 node.copy_source.is_none(),
427 node.copy_source.is_none(),
428 "duplicate dirstate entry in read"
428 "duplicate dirstate entry in read"
429 );
429 );
430 node.data = NodeData::Entry(*entry);
430 node.data = NodeData::Entry(*entry);
431 node.copy_source = copy_source.map(Cow::Borrowed);
431 node.copy_source = copy_source.map(Cow::Borrowed);
432 map.nodes_with_entry_count += 1;
432 map.nodes_with_entry_count += 1;
433 if copy_source.is_some() {
433 if copy_source.is_some() {
434 map.nodes_with_copy_source_count += 1
434 map.nodes_with_copy_source_count += 1
435 }
435 }
436 Ok(())
436 Ok(())
437 },
437 },
438 )?;
438 )?;
439 let parents = Some(parents.clone());
439 let parents = Some(parents.clone());
440
440
441 Ok((map, parents))
441 Ok((map, parents))
442 }
442 }
443
443
444 fn get_node<'tree>(
444 fn get_node<'tree>(
445 &'tree self,
445 &'tree self,
446 path: &HgPath,
446 path: &HgPath,
447 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
447 ) -> Result<Option<NodeRef<'tree, 'on_disk>>, DirstateV2ParseError> {
448 let mut children = self.root.as_ref();
448 let mut children = self.root.as_ref();
449 let mut components = path.components();
449 let mut components = path.components();
450 let mut component =
450 let mut component =
451 components.next().expect("expected at least one components");
451 components.next().expect("expected at least one components");
452 loop {
452 loop {
453 if let Some(child) = children.get(component, self.on_disk)? {
453 if let Some(child) = children.get(component, self.on_disk)? {
454 if let Some(next_component) = components.next() {
454 if let Some(next_component) = components.next() {
455 component = next_component;
455 component = next_component;
456 children = child.children(self.on_disk)?;
456 children = child.children(self.on_disk)?;
457 } else {
457 } else {
458 return Ok(Some(child));
458 return Ok(Some(child));
459 }
459 }
460 } else {
460 } else {
461 return Ok(None);
461 return Ok(None);
462 }
462 }
463 }
463 }
464 }
464 }
465
465
466 /// Returns a mutable reference to the node at `path` if it exists
466 /// Returns a mutable reference to the node at `path` if it exists
467 ///
467 ///
468 /// This takes `root` instead of `&mut self` so that callers can mutate
468 /// This takes `root` instead of `&mut self` so that callers can mutate
469 /// other fields while the returned borrow is still valid
469 /// other fields while the returned borrow is still valid
470 fn get_node_mut<'tree>(
470 fn get_node_mut<'tree>(
471 on_disk: &'on_disk [u8],
471 on_disk: &'on_disk [u8],
472 root: &'tree mut ChildNodes<'on_disk>,
472 root: &'tree mut ChildNodes<'on_disk>,
473 path: &HgPath,
473 path: &HgPath,
474 ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
474 ) -> Result<Option<&'tree mut Node<'on_disk>>, DirstateV2ParseError> {
475 let mut children = root;
475 let mut children = root;
476 let mut components = path.components();
476 let mut components = path.components();
477 let mut component =
477 let mut component =
478 components.next().expect("expected at least one components");
478 components.next().expect("expected at least one components");
479 loop {
479 loop {
480 if let Some(child) = children.make_mut(on_disk)?.get_mut(component)
480 if let Some(child) = children.make_mut(on_disk)?.get_mut(component)
481 {
481 {
482 if let Some(next_component) = components.next() {
482 if let Some(next_component) = components.next() {
483 component = next_component;
483 component = next_component;
484 children = &mut child.children;
484 children = &mut child.children;
485 } else {
485 } else {
486 return Ok(Some(child));
486 return Ok(Some(child));
487 }
487 }
488 } else {
488 } else {
489 return Ok(None);
489 return Ok(None);
490 }
490 }
491 }
491 }
492 }
492 }
493
493
494 pub(super) fn get_or_insert_node<'tree, 'path>(
494 pub(super) fn get_or_insert_node<'tree, 'path>(
495 on_disk: &'on_disk [u8],
495 on_disk: &'on_disk [u8],
496 root: &'tree mut ChildNodes<'on_disk>,
496 root: &'tree mut ChildNodes<'on_disk>,
497 path: &'path HgPath,
497 path: &'path HgPath,
498 to_cow: impl Fn(
498 to_cow: impl Fn(
499 WithBasename<&'path HgPath>,
499 WithBasename<&'path HgPath>,
500 ) -> WithBasename<Cow<'on_disk, HgPath>>,
500 ) -> WithBasename<Cow<'on_disk, HgPath>>,
501 mut each_ancestor: impl FnMut(&mut Node),
501 mut each_ancestor: impl FnMut(&mut Node),
502 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
502 ) -> Result<&'tree mut Node<'on_disk>, DirstateV2ParseError> {
503 let mut child_nodes = root;
503 let mut child_nodes = root;
504 let mut inclusive_ancestor_paths =
504 let mut inclusive_ancestor_paths =
505 WithBasename::inclusive_ancestors_of(path);
505 WithBasename::inclusive_ancestors_of(path);
506 let mut ancestor_path = inclusive_ancestor_paths
506 let mut ancestor_path = inclusive_ancestor_paths
507 .next()
507 .next()
508 .expect("expected at least one inclusive ancestor");
508 .expect("expected at least one inclusive ancestor");
509 loop {
509 loop {
510 // TODO: can we avoid allocating an owned key in cases where the
510 // TODO: can we avoid allocating an owned key in cases where the
511 // map already contains that key, without introducing double
511 // map already contains that key, without introducing double
512 // lookup?
512 // lookup?
513 let child_node = child_nodes
513 let child_node = child_nodes
514 .make_mut(on_disk)?
514 .make_mut(on_disk)?
515 .entry(to_cow(ancestor_path))
515 .entry(to_cow(ancestor_path))
516 .or_default();
516 .or_default();
517 if let Some(next) = inclusive_ancestor_paths.next() {
517 if let Some(next) = inclusive_ancestor_paths.next() {
518 each_ancestor(child_node);
518 each_ancestor(child_node);
519 ancestor_path = next;
519 ancestor_path = next;
520 child_nodes = &mut child_node.children;
520 child_nodes = &mut child_node.children;
521 } else {
521 } else {
522 return Ok(child_node);
522 return Ok(child_node);
523 }
523 }
524 }
524 }
525 }
525 }
526
526
527 fn add_or_remove_file(
527 fn add_or_remove_file(
528 &mut self,
528 &mut self,
529 path: &HgPath,
529 path: &HgPath,
530 old_state: EntryState,
530 old_state: EntryState,
531 new_entry: DirstateEntry,
531 new_entry: DirstateEntry,
532 ) -> Result<(), DirstateV2ParseError> {
532 ) -> Result<(), DirstateV2ParseError> {
533 let tracked_count_increment =
533 let tracked_count_increment =
534 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
534 match (old_state.is_tracked(), new_entry.state.is_tracked()) {
535 (false, true) => 1,
535 (false, true) => 1,
536 (true, false) => -1,
536 (true, false) => -1,
537 _ => 0,
537 _ => 0,
538 };
538 };
539
539
540 let node = Self::get_or_insert_node(
540 let node = Self::get_or_insert_node(
541 self.on_disk,
541 self.on_disk,
542 &mut self.root,
542 &mut self.root,
543 path,
543 path,
544 WithBasename::to_cow_owned,
544 WithBasename::to_cow_owned,
545 |ancestor| {
545 |ancestor| {
546 // We can’t use `+= increment` because the counter is unsigned,
546 // We can’t use `+= increment` because the counter is unsigned,
547 // and we want debug builds to detect accidental underflow
547 // and we want debug builds to detect accidental underflow
548 // through zero
548 // through zero
549 match tracked_count_increment {
549 match tracked_count_increment {
550 1 => ancestor.tracked_descendants_count += 1,
550 1 => ancestor.tracked_descendants_count += 1,
551 -1 => ancestor.tracked_descendants_count -= 1,
551 -1 => ancestor.tracked_descendants_count -= 1,
552 _ => {}
552 _ => {}
553 }
553 }
554 },
554 },
555 )?;
555 )?;
556 if !node.data.has_entry() {
556 if !node.data.has_entry() {
557 self.nodes_with_entry_count += 1
557 self.nodes_with_entry_count += 1
558 }
558 }
559 node.data = NodeData::Entry(new_entry);
559 node.data = NodeData::Entry(new_entry);
560 Ok(())
560 Ok(())
561 }
561 }
562
562
563 fn iter_nodes<'tree>(
563 fn iter_nodes<'tree>(
564 &'tree self,
564 &'tree self,
565 ) -> impl Iterator<
565 ) -> impl Iterator<
566 Item = Result<NodeRef<'tree, 'on_disk>, DirstateV2ParseError>,
566 Item = Result<NodeRef<'tree, 'on_disk>, DirstateV2ParseError>,
567 > + 'tree {
567 > + 'tree {
568 // Depth first tree traversal.
568 // Depth first tree traversal.
569 //
569 //
570 // If we could afford internal iteration and recursion,
570 // If we could afford internal iteration and recursion,
571 // this would look like:
571 // this would look like:
572 //
572 //
573 // ```
573 // ```
574 // fn traverse_children(
574 // fn traverse_children(
575 // children: &ChildNodes,
575 // children: &ChildNodes,
576 // each: &mut impl FnMut(&Node),
576 // each: &mut impl FnMut(&Node),
577 // ) {
577 // ) {
578 // for child in children.values() {
578 // for child in children.values() {
579 // traverse_children(&child.children, each);
579 // traverse_children(&child.children, each);
580 // each(child);
580 // each(child);
581 // }
581 // }
582 // }
582 // }
583 // ```
583 // ```
584 //
584 //
585 // However we want an external iterator and therefore can’t use the
585 // However we want an external iterator and therefore can’t use the
586 // call stack. Use an explicit stack instead:
586 // call stack. Use an explicit stack instead:
587 let mut stack = Vec::new();
587 let mut stack = Vec::new();
588 let mut iter = self.root.as_ref().iter();
588 let mut iter = self.root.as_ref().iter();
589 std::iter::from_fn(move || {
589 std::iter::from_fn(move || {
590 while let Some(child_node) = iter.next() {
590 while let Some(child_node) = iter.next() {
591 let children = match child_node.children(self.on_disk) {
591 let children = match child_node.children(self.on_disk) {
592 Ok(children) => children,
592 Ok(children) => children,
593 Err(error) => return Some(Err(error)),
593 Err(error) => return Some(Err(error)),
594 };
594 };
595 // Pseudo-recursion
595 // Pseudo-recursion
596 let new_iter = children.iter();
596 let new_iter = children.iter();
597 let old_iter = std::mem::replace(&mut iter, new_iter);
597 let old_iter = std::mem::replace(&mut iter, new_iter);
598 stack.push((child_node, old_iter));
598 stack.push((child_node, old_iter));
599 }
599 }
600 // Found the end of a `children.iter()` iterator.
600 // Found the end of a `children.iter()` iterator.
601 if let Some((child_node, next_iter)) = stack.pop() {
601 if let Some((child_node, next_iter)) = stack.pop() {
602 // "Return" from pseudo-recursion by restoring state from the
602 // "Return" from pseudo-recursion by restoring state from the
603 // explicit stack
603 // explicit stack
604 iter = next_iter;
604 iter = next_iter;
605
605
606 Some(Ok(child_node))
606 Some(Ok(child_node))
607 } else {
607 } else {
608 // Reached the bottom of the stack, we’re done
608 // Reached the bottom of the stack, we’re done
609 None
609 None
610 }
610 }
611 })
611 })
612 }
612 }
613
613
614 fn clear_known_ambiguous_mtimes(
614 fn clear_known_ambiguous_mtimes(
615 &mut self,
615 &mut self,
616 paths: &[impl AsRef<HgPath>],
616 paths: &[impl AsRef<HgPath>],
617 ) -> Result<(), DirstateV2ParseError> {
617 ) -> Result<(), DirstateV2ParseError> {
618 for path in paths {
618 for path in paths {
619 if let Some(node) = Self::get_node_mut(
619 if let Some(node) = Self::get_node_mut(
620 self.on_disk,
620 self.on_disk,
621 &mut self.root,
621 &mut self.root,
622 path.as_ref(),
622 path.as_ref(),
623 )? {
623 )? {
624 if let NodeData::Entry(entry) = &mut node.data {
624 if let NodeData::Entry(entry) = &mut node.data {
625 entry.clear_mtime();
625 entry.clear_mtime();
626 }
626 }
627 }
627 }
628 }
628 }
629 Ok(())
629 Ok(())
630 }
630 }
631
631
632 /// Return a faillilble iterator of full paths of nodes that have an
632 /// Return a faillilble iterator of full paths of nodes that have an
633 /// `entry` for which the given `predicate` returns true.
633 /// `entry` for which the given `predicate` returns true.
634 ///
634 ///
635 /// Fallibility means that each iterator item is a `Result`, which may
635 /// Fallibility means that each iterator item is a `Result`, which may
636 /// indicate a parse error of the on-disk dirstate-v2 format. Such errors
636 /// indicate a parse error of the on-disk dirstate-v2 format. Such errors
637 /// should only happen if Mercurial is buggy or a repository is corrupted.
637 /// should only happen if Mercurial is buggy or a repository is corrupted.
638 fn filter_full_paths<'tree>(
638 fn filter_full_paths<'tree>(
639 &'tree self,
639 &'tree self,
640 predicate: impl Fn(&DirstateEntry) -> bool + 'tree,
640 predicate: impl Fn(&DirstateEntry) -> bool + 'tree,
641 ) -> impl Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + 'tree
641 ) -> impl Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + 'tree
642 {
642 {
643 filter_map_results(self.iter_nodes(), move |node| {
643 filter_map_results(self.iter_nodes(), move |node| {
644 if let Some(entry) = node.entry()? {
644 if let Some(entry) = node.entry()? {
645 if predicate(&entry) {
645 if predicate(&entry) {
646 return Ok(Some(node.full_path(self.on_disk)?));
646 return Ok(Some(node.full_path(self.on_disk)?));
647 }
647 }
648 }
648 }
649 Ok(None)
649 Ok(None)
650 })
650 })
651 }
651 }
652 }
652 }
653
653
654 /// Like `Iterator::filter_map`, but over a fallible iterator of `Result`s.
654 /// Like `Iterator::filter_map`, but over a fallible iterator of `Result`s.
655 ///
655 ///
656 /// The callback is only called for incoming `Ok` values. Errors are passed
656 /// The callback is only called for incoming `Ok` values. Errors are passed
657 /// through as-is. In order to let it use the `?` operator the callback is
657 /// through as-is. In order to let it use the `?` operator the callback is
658 /// expected to return a `Result` of `Option`, instead of an `Option` of
658 /// expected to return a `Result` of `Option`, instead of an `Option` of
659 /// `Result`.
659 /// `Result`.
660 fn filter_map_results<'a, I, F, A, B, E>(
660 fn filter_map_results<'a, I, F, A, B, E>(
661 iter: I,
661 iter: I,
662 f: F,
662 f: F,
663 ) -> impl Iterator<Item = Result<B, E>> + 'a
663 ) -> impl Iterator<Item = Result<B, E>> + 'a
664 where
664 where
665 I: Iterator<Item = Result<A, E>> + 'a,
665 I: Iterator<Item = Result<A, E>> + 'a,
666 F: Fn(A) -> Result<Option<B>, E> + 'a,
666 F: Fn(A) -> Result<Option<B>, E> + 'a,
667 {
667 {
668 iter.filter_map(move |result| match result {
668 iter.filter_map(move |result| match result {
669 Ok(node) => f(node).transpose(),
669 Ok(node) => f(node).transpose(),
670 Err(e) => Some(Err(e)),
670 Err(e) => Some(Err(e)),
671 })
671 })
672 }
672 }
673
673
674 impl<'on_disk> super::dispatch::DirstateMapMethods for DirstateMap<'on_disk> {
674 impl<'on_disk> super::dispatch::DirstateMapMethods for DirstateMap<'on_disk> {
675 fn clear(&mut self) {
675 fn clear(&mut self) {
676 self.root = Default::default();
676 self.root = Default::default();
677 self.nodes_with_entry_count = 0;
677 self.nodes_with_entry_count = 0;
678 self.nodes_with_copy_source_count = 0;
678 self.nodes_with_copy_source_count = 0;
679 }
679 }
680
680
681 fn add_file(
681 fn add_file(
682 &mut self,
682 &mut self,
683 filename: &HgPath,
683 filename: &HgPath,
684 old_state: EntryState,
684 old_state: EntryState,
685 entry: DirstateEntry,
685 entry: DirstateEntry,
686 ) -> Result<(), DirstateError> {
686 ) -> Result<(), DirstateError> {
687 Ok(self.add_or_remove_file(filename, old_state, entry)?)
687 Ok(self.add_or_remove_file(filename, old_state, entry)?)
688 }
688 }
689
689
690 fn remove_file(
690 fn remove_file(
691 &mut self,
691 &mut self,
692 filename: &HgPath,
692 filename: &HgPath,
693 old_state: EntryState,
693 old_state: EntryState,
694 size: i32,
694 size: i32,
695 ) -> Result<(), DirstateError> {
695 ) -> Result<(), DirstateError> {
696 let entry = DirstateEntry {
696 let entry = DirstateEntry {
697 state: EntryState::Removed,
697 state: EntryState::Removed,
698 mode: 0,
698 mode: 0,
699 size,
699 size,
700 mtime: 0,
700 mtime: 0,
701 };
701 };
702 Ok(self.add_or_remove_file(filename, old_state, entry)?)
702 Ok(self.add_or_remove_file(filename, old_state, entry)?)
703 }
703 }
704
704
705 fn drop_file(
705 fn drop_file(
706 &mut self,
706 &mut self,
707 filename: &HgPath,
707 filename: &HgPath,
708 old_state: EntryState,
708 old_state: EntryState,
709 ) -> Result<bool, DirstateError> {
709 ) -> Result<bool, DirstateError> {
710 struct Dropped {
710 struct Dropped {
711 was_tracked: bool,
711 was_tracked: bool,
712 had_entry: bool,
712 had_entry: bool,
713 had_copy_source: bool,
713 had_copy_source: bool,
714 }
714 }
715
716 /// If this returns `Ok(Some((dropped, removed)))`, then
717 ///
718 /// * `dropped` is about the leaf node that was at `filename`
719 /// * `removed` is whether this particular level of recursion just
720 /// removed a node in `nodes`.
715 fn recur<'on_disk>(
721 fn recur<'on_disk>(
716 on_disk: &'on_disk [u8],
722 on_disk: &'on_disk [u8],
717 nodes: &mut ChildNodes<'on_disk>,
723 nodes: &mut ChildNodes<'on_disk>,
718 path: &HgPath,
724 path: &HgPath,
719 ) -> Result<Option<Dropped>, DirstateV2ParseError> {
725 ) -> Result<Option<(Dropped, bool)>, DirstateV2ParseError> {
720 let (first_path_component, rest_of_path) =
726 let (first_path_component, rest_of_path) =
721 path.split_first_component();
727 path.split_first_component();
722 let node = if let Some(node) =
728 let node = if let Some(node) =
723 nodes.make_mut(on_disk)?.get_mut(first_path_component)
729 nodes.make_mut(on_disk)?.get_mut(first_path_component)
724 {
730 {
725 node
731 node
726 } else {
732 } else {
727 return Ok(None);
733 return Ok(None);
728 };
734 };
729 let dropped;
735 let dropped;
730 if let Some(rest) = rest_of_path {
736 if let Some(rest) = rest_of_path {
731 if let Some(d) = recur(on_disk, &mut node.children, rest)? {
737 if let Some((d, removed)) =
738 recur(on_disk, &mut node.children, rest)?
739 {
732 dropped = d;
740 dropped = d;
733 if dropped.was_tracked {
741 if dropped.was_tracked {
734 node.tracked_descendants_count -= 1;
742 node.tracked_descendants_count -= 1;
735 }
743 }
744
745 // Directory caches must be invalidated when removing a
746 // child node
747 if removed {
748 if let NodeData::CachedDirectory { .. } = &node.data {
749 node.data = NodeData::None
750 }
751 }
736 } else {
752 } else {
737 return Ok(None);
753 return Ok(None);
738 }
754 }
739 } else {
755 } else {
740 let had_entry = node.data.has_entry();
756 let had_entry = node.data.has_entry();
741 if had_entry {
757 if had_entry {
742 node.data = NodeData::None
758 node.data = NodeData::None
743 }
759 }
744 dropped = Dropped {
760 dropped = Dropped {
745 was_tracked: node
761 was_tracked: node
746 .data
762 .data
747 .as_entry()
763 .as_entry()
748 .map_or(false, |entry| entry.state.is_tracked()),
764 .map_or(false, |entry| entry.state.is_tracked()),
749 had_entry,
765 had_entry,
750 had_copy_source: node.copy_source.take().is_some(),
766 had_copy_source: node.copy_source.take().is_some(),
751 };
767 };
752 }
768 }
753 // After recursion, for both leaf (rest_of_path is None) nodes and
769 // After recursion, for both leaf (rest_of_path is None) nodes and
754 // parent nodes, remove a node if it just became empty.
770 // parent nodes, remove a node if it just became empty.
755 if !node.data.has_entry()
771 let remove = !node.data.has_entry()
756 && node.copy_source.is_none()
772 && node.copy_source.is_none()
757 && node.children.is_empty()
773 && node.children.is_empty();
758 {
774 if remove {
759 nodes.make_mut(on_disk)?.remove(first_path_component);
775 nodes.make_mut(on_disk)?.remove(first_path_component);
760 }
776 }
761 Ok(Some(dropped))
777 Ok(Some((dropped, remove)))
762 }
778 }
763
779
764 if let Some(dropped) = recur(self.on_disk, &mut self.root, filename)? {
780 if let Some((dropped, _removed)) =
781 recur(self.on_disk, &mut self.root, filename)?
782 {
765 if dropped.had_entry {
783 if dropped.had_entry {
766 self.nodes_with_entry_count -= 1
784 self.nodes_with_entry_count -= 1
767 }
785 }
768 if dropped.had_copy_source {
786 if dropped.had_copy_source {
769 self.nodes_with_copy_source_count -= 1
787 self.nodes_with_copy_source_count -= 1
770 }
788 }
771 Ok(dropped.had_entry)
789 Ok(dropped.had_entry)
772 } else {
790 } else {
773 debug_assert!(!old_state.is_tracked());
791 debug_assert!(!old_state.is_tracked());
774 Ok(false)
792 Ok(false)
775 }
793 }
776 }
794 }
777
795
778 fn clear_ambiguous_times(
796 fn clear_ambiguous_times(
779 &mut self,
797 &mut self,
780 filenames: Vec<HgPathBuf>,
798 filenames: Vec<HgPathBuf>,
781 now: i32,
799 now: i32,
782 ) -> Result<(), DirstateV2ParseError> {
800 ) -> Result<(), DirstateV2ParseError> {
783 for filename in filenames {
801 for filename in filenames {
784 if let Some(node) =
802 if let Some(node) =
785 Self::get_node_mut(self.on_disk, &mut self.root, &filename)?
803 Self::get_node_mut(self.on_disk, &mut self.root, &filename)?
786 {
804 {
787 if let NodeData::Entry(entry) = &mut node.data {
805 if let NodeData::Entry(entry) = &mut node.data {
788 entry.clear_ambiguous_mtime(now);
806 entry.clear_ambiguous_mtime(now);
789 }
807 }
790 }
808 }
791 }
809 }
792 Ok(())
810 Ok(())
793 }
811 }
794
812
795 fn non_normal_entries_contains(
813 fn non_normal_entries_contains(
796 &mut self,
814 &mut self,
797 key: &HgPath,
815 key: &HgPath,
798 ) -> Result<bool, DirstateV2ParseError> {
816 ) -> Result<bool, DirstateV2ParseError> {
799 Ok(if let Some(node) = self.get_node(key)? {
817 Ok(if let Some(node) = self.get_node(key)? {
800 node.entry()?.map_or(false, |entry| entry.is_non_normal())
818 node.entry()?.map_or(false, |entry| entry.is_non_normal())
801 } else {
819 } else {
802 false
820 false
803 })
821 })
804 }
822 }
805
823
806 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
824 fn non_normal_entries_remove(&mut self, _key: &HgPath) {
807 // Do nothing, this `DirstateMap` does not have a separate "non normal
825 // Do nothing, this `DirstateMap` does not have a separate "non normal
808 // entries" set that need to be kept up to date
826 // entries" set that need to be kept up to date
809 }
827 }
810
828
811 fn non_normal_or_other_parent_paths(
829 fn non_normal_or_other_parent_paths(
812 &mut self,
830 &mut self,
813 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
831 ) -> Box<dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + '_>
814 {
832 {
815 Box::new(self.filter_full_paths(|entry| {
833 Box::new(self.filter_full_paths(|entry| {
816 entry.is_non_normal() || entry.is_from_other_parent()
834 entry.is_non_normal() || entry.is_from_other_parent()
817 }))
835 }))
818 }
836 }
819
837
820 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
838 fn set_non_normal_other_parent_entries(&mut self, _force: bool) {
821 // Do nothing, this `DirstateMap` does not have a separate "non normal
839 // Do nothing, this `DirstateMap` does not have a separate "non normal
822 // entries" and "from other parent" sets that need to be recomputed
840 // entries" and "from other parent" sets that need to be recomputed
823 }
841 }
824
842
825 fn iter_non_normal_paths(
843 fn iter_non_normal_paths(
826 &mut self,
844 &mut self,
827 ) -> Box<
845 ) -> Box<
828 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
846 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
829 > {
847 > {
830 self.iter_non_normal_paths_panic()
848 self.iter_non_normal_paths_panic()
831 }
849 }
832
850
833 fn iter_non_normal_paths_panic(
851 fn iter_non_normal_paths_panic(
834 &self,
852 &self,
835 ) -> Box<
853 ) -> Box<
836 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
854 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
837 > {
855 > {
838 Box::new(self.filter_full_paths(|entry| entry.is_non_normal()))
856 Box::new(self.filter_full_paths(|entry| entry.is_non_normal()))
839 }
857 }
840
858
841 fn iter_other_parent_paths(
859 fn iter_other_parent_paths(
842 &mut self,
860 &mut self,
843 ) -> Box<
861 ) -> Box<
844 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
862 dyn Iterator<Item = Result<&HgPath, DirstateV2ParseError>> + Send + '_,
845 > {
863 > {
846 Box::new(self.filter_full_paths(|entry| entry.is_from_other_parent()))
864 Box::new(self.filter_full_paths(|entry| entry.is_from_other_parent()))
847 }
865 }
848
866
849 fn has_tracked_dir(
867 fn has_tracked_dir(
850 &mut self,
868 &mut self,
851 directory: &HgPath,
869 directory: &HgPath,
852 ) -> Result<bool, DirstateError> {
870 ) -> Result<bool, DirstateError> {
853 if let Some(node) = self.get_node(directory)? {
871 if let Some(node) = self.get_node(directory)? {
854 // A node without a `DirstateEntry` was created to hold child
872 // A node without a `DirstateEntry` was created to hold child
855 // nodes, and is therefore a directory.
873 // nodes, and is therefore a directory.
856 let state = node.state()?;
874 let state = node.state()?;
857 Ok(state.is_none() && node.tracked_descendants_count() > 0)
875 Ok(state.is_none() && node.tracked_descendants_count() > 0)
858 } else {
876 } else {
859 Ok(false)
877 Ok(false)
860 }
878 }
861 }
879 }
862
880
863 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
881 fn has_dir(&mut self, directory: &HgPath) -> Result<bool, DirstateError> {
864 if let Some(node) = self.get_node(directory)? {
882 if let Some(node) = self.get_node(directory)? {
865 // A node without a `DirstateEntry` was created to hold child
883 // A node without a `DirstateEntry` was created to hold child
866 // nodes, and is therefore a directory.
884 // nodes, and is therefore a directory.
867 Ok(node.state()?.is_none())
885 Ok(node.state()?.is_none())
868 } else {
886 } else {
869 Ok(false)
887 Ok(false)
870 }
888 }
871 }
889 }
872
890
873 #[timed]
891 #[timed]
874 fn pack_v1(
892 fn pack_v1(
875 &mut self,
893 &mut self,
876 parents: DirstateParents,
894 parents: DirstateParents,
877 now: Timestamp,
895 now: Timestamp,
878 ) -> Result<Vec<u8>, DirstateError> {
896 ) -> Result<Vec<u8>, DirstateError> {
879 let now: i32 = now.0.try_into().expect("time overflow");
897 let now: i32 = now.0.try_into().expect("time overflow");
880 let mut ambiguous_mtimes = Vec::new();
898 let mut ambiguous_mtimes = Vec::new();
881 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
899 // Optizimation (to be measured?): pre-compute size to avoid `Vec`
882 // reallocations
900 // reallocations
883 let mut size = parents.as_bytes().len();
901 let mut size = parents.as_bytes().len();
884 for node in self.iter_nodes() {
902 for node in self.iter_nodes() {
885 let node = node?;
903 let node = node?;
886 if let Some(entry) = node.entry()? {
904 if let Some(entry) = node.entry()? {
887 size += packed_entry_size(
905 size += packed_entry_size(
888 node.full_path(self.on_disk)?,
906 node.full_path(self.on_disk)?,
889 node.copy_source(self.on_disk)?,
907 node.copy_source(self.on_disk)?,
890 );
908 );
891 if entry.mtime_is_ambiguous(now) {
909 if entry.mtime_is_ambiguous(now) {
892 ambiguous_mtimes.push(
910 ambiguous_mtimes.push(
893 node.full_path_borrowed(self.on_disk)?
911 node.full_path_borrowed(self.on_disk)?
894 .detach_from_tree(),
912 .detach_from_tree(),
895 )
913 )
896 }
914 }
897 }
915 }
898 }
916 }
899 self.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
917 self.clear_known_ambiguous_mtimes(&ambiguous_mtimes)?;
900
918
901 let mut packed = Vec::with_capacity(size);
919 let mut packed = Vec::with_capacity(size);
902 packed.extend(parents.as_bytes());
920 packed.extend(parents.as_bytes());
903
921
904 for node in self.iter_nodes() {
922 for node in self.iter_nodes() {
905 let node = node?;
923 let node = node?;
906 if let Some(entry) = node.entry()? {
924 if let Some(entry) = node.entry()? {
907 pack_entry(
925 pack_entry(
908 node.full_path(self.on_disk)?,
926 node.full_path(self.on_disk)?,
909 &entry,
927 &entry,
910 node.copy_source(self.on_disk)?,
928 node.copy_source(self.on_disk)?,
911 &mut packed,
929 &mut packed,
912 );
930 );
913 }
931 }
914 }
932 }
915 Ok(packed)
933 Ok(packed)
916 }
934 }
917
935
918 #[timed]
936 #[timed]
919 fn pack_v2(
937 fn pack_v2(
920 &mut self,
938 &mut self,
921 parents: DirstateParents,
939 parents: DirstateParents,
922 now: Timestamp,
940 now: Timestamp,
923 ) -> Result<Vec<u8>, DirstateError> {
941 ) -> Result<Vec<u8>, DirstateError> {
924 // TODO: how do we want to handle this in 2038?
942 // TODO: how do we want to handle this in 2038?
925 let now: i32 = now.0.try_into().expect("time overflow");
943 let now: i32 = now.0.try_into().expect("time overflow");
926 let mut paths = Vec::new();
944 let mut paths = Vec::new();
927 for node in self.iter_nodes() {
945 for node in self.iter_nodes() {
928 let node = node?;
946 let node = node?;
929 if let Some(entry) = node.entry()? {
947 if let Some(entry) = node.entry()? {
930 if entry.mtime_is_ambiguous(now) {
948 if entry.mtime_is_ambiguous(now) {
931 paths.push(
949 paths.push(
932 node.full_path_borrowed(self.on_disk)?
950 node.full_path_borrowed(self.on_disk)?
933 .detach_from_tree(),
951 .detach_from_tree(),
934 )
952 )
935 }
953 }
936 }
954 }
937 }
955 }
938 // Borrow of `self` ends here since we collect cloned paths
956 // Borrow of `self` ends here since we collect cloned paths
939
957
940 self.clear_known_ambiguous_mtimes(&paths)?;
958 self.clear_known_ambiguous_mtimes(&paths)?;
941
959
942 on_disk::write(self, parents)
960 on_disk::write(self, parents)
943 }
961 }
944
962
945 fn set_all_dirs(&mut self) -> Result<(), DirstateError> {
963 fn set_all_dirs(&mut self) -> Result<(), DirstateError> {
946 // Do nothing, this `DirstateMap` does not a separate `all_dirs` that
964 // Do nothing, this `DirstateMap` does not a separate `all_dirs` that
947 // needs to be recomputed
965 // needs to be recomputed
948 Ok(())
966 Ok(())
949 }
967 }
950
968
951 fn set_dirs(&mut self) -> Result<(), DirstateError> {
969 fn set_dirs(&mut self) -> Result<(), DirstateError> {
952 // Do nothing, this `DirstateMap` does not a separate `dirs` that needs
970 // Do nothing, this `DirstateMap` does not a separate `dirs` that needs
953 // to be recomputed
971 // to be recomputed
954 Ok(())
972 Ok(())
955 }
973 }
956
974
957 fn status<'a>(
975 fn status<'a>(
958 &'a mut self,
976 &'a mut self,
959 matcher: &'a (dyn Matcher + Sync),
977 matcher: &'a (dyn Matcher + Sync),
960 root_dir: PathBuf,
978 root_dir: PathBuf,
961 ignore_files: Vec<PathBuf>,
979 ignore_files: Vec<PathBuf>,
962 options: StatusOptions,
980 options: StatusOptions,
963 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
981 ) -> Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>
964 {
982 {
965 super::status::status(self, matcher, root_dir, ignore_files, options)
983 super::status::status(self, matcher, root_dir, ignore_files, options)
966 }
984 }
967
985
968 fn copy_map_len(&self) -> usize {
986 fn copy_map_len(&self) -> usize {
969 self.nodes_with_copy_source_count as usize
987 self.nodes_with_copy_source_count as usize
970 }
988 }
971
989
972 fn copy_map_iter(&self) -> CopyMapIter<'_> {
990 fn copy_map_iter(&self) -> CopyMapIter<'_> {
973 Box::new(filter_map_results(self.iter_nodes(), move |node| {
991 Box::new(filter_map_results(self.iter_nodes(), move |node| {
974 Ok(if let Some(source) = node.copy_source(self.on_disk)? {
992 Ok(if let Some(source) = node.copy_source(self.on_disk)? {
975 Some((node.full_path(self.on_disk)?, source))
993 Some((node.full_path(self.on_disk)?, source))
976 } else {
994 } else {
977 None
995 None
978 })
996 })
979 }))
997 }))
980 }
998 }
981
999
982 fn copy_map_contains_key(
1000 fn copy_map_contains_key(
983 &self,
1001 &self,
984 key: &HgPath,
1002 key: &HgPath,
985 ) -> Result<bool, DirstateV2ParseError> {
1003 ) -> Result<bool, DirstateV2ParseError> {
986 Ok(if let Some(node) = self.get_node(key)? {
1004 Ok(if let Some(node) = self.get_node(key)? {
987 node.has_copy_source()
1005 node.has_copy_source()
988 } else {
1006 } else {
989 false
1007 false
990 })
1008 })
991 }
1009 }
992
1010
993 fn copy_map_get(
1011 fn copy_map_get(
994 &self,
1012 &self,
995 key: &HgPath,
1013 key: &HgPath,
996 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
1014 ) -> Result<Option<&HgPath>, DirstateV2ParseError> {
997 if let Some(node) = self.get_node(key)? {
1015 if let Some(node) = self.get_node(key)? {
998 if let Some(source) = node.copy_source(self.on_disk)? {
1016 if let Some(source) = node.copy_source(self.on_disk)? {
999 return Ok(Some(source));
1017 return Ok(Some(source));
1000 }
1018 }
1001 }
1019 }
1002 Ok(None)
1020 Ok(None)
1003 }
1021 }
1004
1022
1005 fn copy_map_remove(
1023 fn copy_map_remove(
1006 &mut self,
1024 &mut self,
1007 key: &HgPath,
1025 key: &HgPath,
1008 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1026 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1009 let count = &mut self.nodes_with_copy_source_count;
1027 let count = &mut self.nodes_with_copy_source_count;
1010 Ok(
1028 Ok(
1011 Self::get_node_mut(self.on_disk, &mut self.root, key)?.and_then(
1029 Self::get_node_mut(self.on_disk, &mut self.root, key)?.and_then(
1012 |node| {
1030 |node| {
1013 if node.copy_source.is_some() {
1031 if node.copy_source.is_some() {
1014 *count -= 1
1032 *count -= 1
1015 }
1033 }
1016 node.copy_source.take().map(Cow::into_owned)
1034 node.copy_source.take().map(Cow::into_owned)
1017 },
1035 },
1018 ),
1036 ),
1019 )
1037 )
1020 }
1038 }
1021
1039
1022 fn copy_map_insert(
1040 fn copy_map_insert(
1023 &mut self,
1041 &mut self,
1024 key: HgPathBuf,
1042 key: HgPathBuf,
1025 value: HgPathBuf,
1043 value: HgPathBuf,
1026 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1044 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1027 let node = Self::get_or_insert_node(
1045 let node = Self::get_or_insert_node(
1028 self.on_disk,
1046 self.on_disk,
1029 &mut self.root,
1047 &mut self.root,
1030 &key,
1048 &key,
1031 WithBasename::to_cow_owned,
1049 WithBasename::to_cow_owned,
1032 |_ancestor| {},
1050 |_ancestor| {},
1033 )?;
1051 )?;
1034 if node.copy_source.is_none() {
1052 if node.copy_source.is_none() {
1035 self.nodes_with_copy_source_count += 1
1053 self.nodes_with_copy_source_count += 1
1036 }
1054 }
1037 Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
1055 Ok(node.copy_source.replace(value.into()).map(Cow::into_owned))
1038 }
1056 }
1039
1057
1040 fn len(&self) -> usize {
1058 fn len(&self) -> usize {
1041 self.nodes_with_entry_count as usize
1059 self.nodes_with_entry_count as usize
1042 }
1060 }
1043
1061
1044 fn contains_key(
1062 fn contains_key(
1045 &self,
1063 &self,
1046 key: &HgPath,
1064 key: &HgPath,
1047 ) -> Result<bool, DirstateV2ParseError> {
1065 ) -> Result<bool, DirstateV2ParseError> {
1048 Ok(self.get(key)?.is_some())
1066 Ok(self.get(key)?.is_some())
1049 }
1067 }
1050
1068
1051 fn get(
1069 fn get(
1052 &self,
1070 &self,
1053 key: &HgPath,
1071 key: &HgPath,
1054 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
1072 ) -> Result<Option<DirstateEntry>, DirstateV2ParseError> {
1055 Ok(if let Some(node) = self.get_node(key)? {
1073 Ok(if let Some(node) = self.get_node(key)? {
1056 node.entry()?
1074 node.entry()?
1057 } else {
1075 } else {
1058 None
1076 None
1059 })
1077 })
1060 }
1078 }
1061
1079
1062 fn iter(&self) -> StateMapIter<'_> {
1080 fn iter(&self) -> StateMapIter<'_> {
1063 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1081 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1064 Ok(if let Some(entry) = node.entry()? {
1082 Ok(if let Some(entry) = node.entry()? {
1065 Some((node.full_path(self.on_disk)?, entry))
1083 Some((node.full_path(self.on_disk)?, entry))
1066 } else {
1084 } else {
1067 None
1085 None
1068 })
1086 })
1069 }))
1087 }))
1070 }
1088 }
1071
1089
1072 fn iter_directories(
1090 fn iter_directories(
1073 &self,
1091 &self,
1074 ) -> Box<
1092 ) -> Box<
1075 dyn Iterator<
1093 dyn Iterator<
1076 Item = Result<
1094 Item = Result<
1077 (&HgPath, Option<Timestamp>),
1095 (&HgPath, Option<Timestamp>),
1078 DirstateV2ParseError,
1096 DirstateV2ParseError,
1079 >,
1097 >,
1080 > + Send
1098 > + Send
1081 + '_,
1099 + '_,
1082 > {
1100 > {
1083 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1101 Box::new(filter_map_results(self.iter_nodes(), move |node| {
1084 Ok(if node.state()?.is_none() {
1102 Ok(if node.state()?.is_none() {
1085 Some((
1103 Some((
1086 node.full_path(self.on_disk)?,
1104 node.full_path(self.on_disk)?,
1087 node.cached_directory_mtime()
1105 node.cached_directory_mtime()
1088 .map(|mtime| Timestamp(mtime.seconds())),
1106 .map(|mtime| Timestamp(mtime.seconds())),
1089 ))
1107 ))
1090 } else {
1108 } else {
1091 None
1109 None
1092 })
1110 })
1093 }))
1111 }))
1094 }
1112 }
1095 }
1113 }
@@ -1,960 +1,974 b''
1 #testcases dirstate-v1 dirstate-v1-tree dirstate-v2
1 #testcases dirstate-v1 dirstate-v1-tree dirstate-v2
2
2
3 #if no-rust
3 #if no-rust
4 $ hg init repo0 --config format.exp-dirstate-v2=1
4 $ hg init repo0 --config format.exp-dirstate-v2=1
5 abort: dirstate v2 format requested by config but not supported (requires Rust extensions)
5 abort: dirstate v2 format requested by config but not supported (requires Rust extensions)
6 [255]
6 [255]
7 #endif
7 #endif
8
8
9 #if dirstate-v1-tree
9 #if dirstate-v1-tree
10 #require rust
10 #require rust
11 $ echo '[experimental]' >> $HGRCPATH
11 $ echo '[experimental]' >> $HGRCPATH
12 $ echo 'dirstate-tree.in-memory=1' >> $HGRCPATH
12 $ echo 'dirstate-tree.in-memory=1' >> $HGRCPATH
13 #endif
13 #endif
14
14
15 #if dirstate-v2
15 #if dirstate-v2
16 #require rust
16 #require rust
17 $ echo '[format]' >> $HGRCPATH
17 $ echo '[format]' >> $HGRCPATH
18 $ echo 'exp-dirstate-v2=1' >> $HGRCPATH
18 $ echo 'exp-dirstate-v2=1' >> $HGRCPATH
19 #endif
19 #endif
20
20
21 $ hg init repo1
21 $ hg init repo1
22 $ cd repo1
22 $ cd repo1
23 $ mkdir a b a/1 b/1 b/2
23 $ mkdir a b a/1 b/1 b/2
24 $ touch in_root a/in_a b/in_b a/1/in_a_1 b/1/in_b_1 b/2/in_b_2
24 $ touch in_root a/in_a b/in_b a/1/in_a_1 b/1/in_b_1 b/2/in_b_2
25
25
26 hg status in repo root:
26 hg status in repo root:
27
27
28 $ hg status
28 $ hg status
29 ? a/1/in_a_1
29 ? a/1/in_a_1
30 ? a/in_a
30 ? a/in_a
31 ? b/1/in_b_1
31 ? b/1/in_b_1
32 ? b/2/in_b_2
32 ? b/2/in_b_2
33 ? b/in_b
33 ? b/in_b
34 ? in_root
34 ? in_root
35
35
36 hg status . in repo root:
36 hg status . in repo root:
37
37
38 $ hg status .
38 $ hg status .
39 ? a/1/in_a_1
39 ? a/1/in_a_1
40 ? a/in_a
40 ? a/in_a
41 ? b/1/in_b_1
41 ? b/1/in_b_1
42 ? b/2/in_b_2
42 ? b/2/in_b_2
43 ? b/in_b
43 ? b/in_b
44 ? in_root
44 ? in_root
45
45
46 $ hg status --cwd a
46 $ hg status --cwd a
47 ? a/1/in_a_1
47 ? a/1/in_a_1
48 ? a/in_a
48 ? a/in_a
49 ? b/1/in_b_1
49 ? b/1/in_b_1
50 ? b/2/in_b_2
50 ? b/2/in_b_2
51 ? b/in_b
51 ? b/in_b
52 ? in_root
52 ? in_root
53 $ hg status --cwd a .
53 $ hg status --cwd a .
54 ? 1/in_a_1
54 ? 1/in_a_1
55 ? in_a
55 ? in_a
56 $ hg status --cwd a ..
56 $ hg status --cwd a ..
57 ? 1/in_a_1
57 ? 1/in_a_1
58 ? in_a
58 ? in_a
59 ? ../b/1/in_b_1
59 ? ../b/1/in_b_1
60 ? ../b/2/in_b_2
60 ? ../b/2/in_b_2
61 ? ../b/in_b
61 ? ../b/in_b
62 ? ../in_root
62 ? ../in_root
63
63
64 $ hg status --cwd b
64 $ hg status --cwd b
65 ? a/1/in_a_1
65 ? a/1/in_a_1
66 ? a/in_a
66 ? a/in_a
67 ? b/1/in_b_1
67 ? b/1/in_b_1
68 ? b/2/in_b_2
68 ? b/2/in_b_2
69 ? b/in_b
69 ? b/in_b
70 ? in_root
70 ? in_root
71 $ hg status --cwd b .
71 $ hg status --cwd b .
72 ? 1/in_b_1
72 ? 1/in_b_1
73 ? 2/in_b_2
73 ? 2/in_b_2
74 ? in_b
74 ? in_b
75 $ hg status --cwd b ..
75 $ hg status --cwd b ..
76 ? ../a/1/in_a_1
76 ? ../a/1/in_a_1
77 ? ../a/in_a
77 ? ../a/in_a
78 ? 1/in_b_1
78 ? 1/in_b_1
79 ? 2/in_b_2
79 ? 2/in_b_2
80 ? in_b
80 ? in_b
81 ? ../in_root
81 ? ../in_root
82
82
83 $ hg status --cwd a/1
83 $ hg status --cwd a/1
84 ? a/1/in_a_1
84 ? a/1/in_a_1
85 ? a/in_a
85 ? a/in_a
86 ? b/1/in_b_1
86 ? b/1/in_b_1
87 ? b/2/in_b_2
87 ? b/2/in_b_2
88 ? b/in_b
88 ? b/in_b
89 ? in_root
89 ? in_root
90 $ hg status --cwd a/1 .
90 $ hg status --cwd a/1 .
91 ? in_a_1
91 ? in_a_1
92 $ hg status --cwd a/1 ..
92 $ hg status --cwd a/1 ..
93 ? in_a_1
93 ? in_a_1
94 ? ../in_a
94 ? ../in_a
95
95
96 $ hg status --cwd b/1
96 $ hg status --cwd b/1
97 ? a/1/in_a_1
97 ? a/1/in_a_1
98 ? a/in_a
98 ? a/in_a
99 ? b/1/in_b_1
99 ? b/1/in_b_1
100 ? b/2/in_b_2
100 ? b/2/in_b_2
101 ? b/in_b
101 ? b/in_b
102 ? in_root
102 ? in_root
103 $ hg status --cwd b/1 .
103 $ hg status --cwd b/1 .
104 ? in_b_1
104 ? in_b_1
105 $ hg status --cwd b/1 ..
105 $ hg status --cwd b/1 ..
106 ? in_b_1
106 ? in_b_1
107 ? ../2/in_b_2
107 ? ../2/in_b_2
108 ? ../in_b
108 ? ../in_b
109
109
110 $ hg status --cwd b/2
110 $ hg status --cwd b/2
111 ? a/1/in_a_1
111 ? a/1/in_a_1
112 ? a/in_a
112 ? a/in_a
113 ? b/1/in_b_1
113 ? b/1/in_b_1
114 ? b/2/in_b_2
114 ? b/2/in_b_2
115 ? b/in_b
115 ? b/in_b
116 ? in_root
116 ? in_root
117 $ hg status --cwd b/2 .
117 $ hg status --cwd b/2 .
118 ? in_b_2
118 ? in_b_2
119 $ hg status --cwd b/2 ..
119 $ hg status --cwd b/2 ..
120 ? ../1/in_b_1
120 ? ../1/in_b_1
121 ? in_b_2
121 ? in_b_2
122 ? ../in_b
122 ? ../in_b
123
123
124 combining patterns with root and patterns without a root works
124 combining patterns with root and patterns without a root works
125
125
126 $ hg st a/in_a re:.*b$
126 $ hg st a/in_a re:.*b$
127 ? a/in_a
127 ? a/in_a
128 ? b/in_b
128 ? b/in_b
129
129
130 tweaking defaults works
130 tweaking defaults works
131 $ hg status --cwd a --config ui.tweakdefaults=yes
131 $ hg status --cwd a --config ui.tweakdefaults=yes
132 ? 1/in_a_1
132 ? 1/in_a_1
133 ? in_a
133 ? in_a
134 ? ../b/1/in_b_1
134 ? ../b/1/in_b_1
135 ? ../b/2/in_b_2
135 ? ../b/2/in_b_2
136 ? ../b/in_b
136 ? ../b/in_b
137 ? ../in_root
137 ? ../in_root
138 $ HGPLAIN=1 hg status --cwd a --config ui.tweakdefaults=yes
138 $ HGPLAIN=1 hg status --cwd a --config ui.tweakdefaults=yes
139 ? a/1/in_a_1 (glob)
139 ? a/1/in_a_1 (glob)
140 ? a/in_a (glob)
140 ? a/in_a (glob)
141 ? b/1/in_b_1 (glob)
141 ? b/1/in_b_1 (glob)
142 ? b/2/in_b_2 (glob)
142 ? b/2/in_b_2 (glob)
143 ? b/in_b (glob)
143 ? b/in_b (glob)
144 ? in_root
144 ? in_root
145 $ HGPLAINEXCEPT=tweakdefaults hg status --cwd a --config ui.tweakdefaults=yes
145 $ HGPLAINEXCEPT=tweakdefaults hg status --cwd a --config ui.tweakdefaults=yes
146 ? 1/in_a_1
146 ? 1/in_a_1
147 ? in_a
147 ? in_a
148 ? ../b/1/in_b_1
148 ? ../b/1/in_b_1
149 ? ../b/2/in_b_2
149 ? ../b/2/in_b_2
150 ? ../b/in_b
150 ? ../b/in_b
151 ? ../in_root (glob)
151 ? ../in_root (glob)
152
152
153 relative paths can be requested
153 relative paths can be requested
154
154
155 $ hg status --cwd a --config ui.relative-paths=yes
155 $ hg status --cwd a --config ui.relative-paths=yes
156 ? 1/in_a_1
156 ? 1/in_a_1
157 ? in_a
157 ? in_a
158 ? ../b/1/in_b_1
158 ? ../b/1/in_b_1
159 ? ../b/2/in_b_2
159 ? ../b/2/in_b_2
160 ? ../b/in_b
160 ? ../b/in_b
161 ? ../in_root
161 ? ../in_root
162
162
163 $ hg status --cwd a . --config ui.relative-paths=legacy
163 $ hg status --cwd a . --config ui.relative-paths=legacy
164 ? 1/in_a_1
164 ? 1/in_a_1
165 ? in_a
165 ? in_a
166 $ hg status --cwd a . --config ui.relative-paths=no
166 $ hg status --cwd a . --config ui.relative-paths=no
167 ? a/1/in_a_1
167 ? a/1/in_a_1
168 ? a/in_a
168 ? a/in_a
169
169
170 commands.status.relative overrides ui.relative-paths
170 commands.status.relative overrides ui.relative-paths
171
171
172 $ cat >> $HGRCPATH <<EOF
172 $ cat >> $HGRCPATH <<EOF
173 > [ui]
173 > [ui]
174 > relative-paths = False
174 > relative-paths = False
175 > [commands]
175 > [commands]
176 > status.relative = True
176 > status.relative = True
177 > EOF
177 > EOF
178 $ hg status --cwd a
178 $ hg status --cwd a
179 ? 1/in_a_1
179 ? 1/in_a_1
180 ? in_a
180 ? in_a
181 ? ../b/1/in_b_1
181 ? ../b/1/in_b_1
182 ? ../b/2/in_b_2
182 ? ../b/2/in_b_2
183 ? ../b/in_b
183 ? ../b/in_b
184 ? ../in_root
184 ? ../in_root
185 $ HGPLAIN=1 hg status --cwd a
185 $ HGPLAIN=1 hg status --cwd a
186 ? a/1/in_a_1 (glob)
186 ? a/1/in_a_1 (glob)
187 ? a/in_a (glob)
187 ? a/in_a (glob)
188 ? b/1/in_b_1 (glob)
188 ? b/1/in_b_1 (glob)
189 ? b/2/in_b_2 (glob)
189 ? b/2/in_b_2 (glob)
190 ? b/in_b (glob)
190 ? b/in_b (glob)
191 ? in_root
191 ? in_root
192
192
193 if relative paths are explicitly off, tweakdefaults doesn't change it
193 if relative paths are explicitly off, tweakdefaults doesn't change it
194 $ cat >> $HGRCPATH <<EOF
194 $ cat >> $HGRCPATH <<EOF
195 > [commands]
195 > [commands]
196 > status.relative = False
196 > status.relative = False
197 > EOF
197 > EOF
198 $ hg status --cwd a --config ui.tweakdefaults=yes
198 $ hg status --cwd a --config ui.tweakdefaults=yes
199 ? a/1/in_a_1
199 ? a/1/in_a_1
200 ? a/in_a
200 ? a/in_a
201 ? b/1/in_b_1
201 ? b/1/in_b_1
202 ? b/2/in_b_2
202 ? b/2/in_b_2
203 ? b/in_b
203 ? b/in_b
204 ? in_root
204 ? in_root
205
205
206 $ cd ..
206 $ cd ..
207
207
208 $ hg init repo2
208 $ hg init repo2
209 $ cd repo2
209 $ cd repo2
210 $ touch modified removed deleted ignored
210 $ touch modified removed deleted ignored
211 $ echo "^ignored$" > .hgignore
211 $ echo "^ignored$" > .hgignore
212 $ hg ci -A -m 'initial checkin'
212 $ hg ci -A -m 'initial checkin'
213 adding .hgignore
213 adding .hgignore
214 adding deleted
214 adding deleted
215 adding modified
215 adding modified
216 adding removed
216 adding removed
217 $ touch modified added unknown ignored
217 $ touch modified added unknown ignored
218 $ hg add added
218 $ hg add added
219 $ hg remove removed
219 $ hg remove removed
220 $ rm deleted
220 $ rm deleted
221
221
222 hg status:
222 hg status:
223
223
224 $ hg status
224 $ hg status
225 A added
225 A added
226 R removed
226 R removed
227 ! deleted
227 ! deleted
228 ? unknown
228 ? unknown
229
229
230 hg status modified added removed deleted unknown never-existed ignored:
230 hg status modified added removed deleted unknown never-existed ignored:
231
231
232 $ hg status modified added removed deleted unknown never-existed ignored
232 $ hg status modified added removed deleted unknown never-existed ignored
233 never-existed: * (glob)
233 never-existed: * (glob)
234 A added
234 A added
235 R removed
235 R removed
236 ! deleted
236 ! deleted
237 ? unknown
237 ? unknown
238
238
239 $ hg copy modified copied
239 $ hg copy modified copied
240
240
241 hg status -C:
241 hg status -C:
242
242
243 $ hg status -C
243 $ hg status -C
244 A added
244 A added
245 A copied
245 A copied
246 modified
246 modified
247 R removed
247 R removed
248 ! deleted
248 ! deleted
249 ? unknown
249 ? unknown
250
250
251 hg status -A:
251 hg status -A:
252
252
253 $ hg status -A
253 $ hg status -A
254 A added
254 A added
255 A copied
255 A copied
256 modified
256 modified
257 R removed
257 R removed
258 ! deleted
258 ! deleted
259 ? unknown
259 ? unknown
260 I ignored
260 I ignored
261 C .hgignore
261 C .hgignore
262 C modified
262 C modified
263
263
264 $ hg status -A -T '{status} {path} {node|shortest}\n'
264 $ hg status -A -T '{status} {path} {node|shortest}\n'
265 A added ffff
265 A added ffff
266 A copied ffff
266 A copied ffff
267 R removed ffff
267 R removed ffff
268 ! deleted ffff
268 ! deleted ffff
269 ? unknown ffff
269 ? unknown ffff
270 I ignored ffff
270 I ignored ffff
271 C .hgignore ffff
271 C .hgignore ffff
272 C modified ffff
272 C modified ffff
273
273
274 $ hg status -A -Tjson
274 $ hg status -A -Tjson
275 [
275 [
276 {
276 {
277 "itemtype": "file",
277 "itemtype": "file",
278 "path": "added",
278 "path": "added",
279 "status": "A"
279 "status": "A"
280 },
280 },
281 {
281 {
282 "itemtype": "file",
282 "itemtype": "file",
283 "path": "copied",
283 "path": "copied",
284 "source": "modified",
284 "source": "modified",
285 "status": "A"
285 "status": "A"
286 },
286 },
287 {
287 {
288 "itemtype": "file",
288 "itemtype": "file",
289 "path": "removed",
289 "path": "removed",
290 "status": "R"
290 "status": "R"
291 },
291 },
292 {
292 {
293 "itemtype": "file",
293 "itemtype": "file",
294 "path": "deleted",
294 "path": "deleted",
295 "status": "!"
295 "status": "!"
296 },
296 },
297 {
297 {
298 "itemtype": "file",
298 "itemtype": "file",
299 "path": "unknown",
299 "path": "unknown",
300 "status": "?"
300 "status": "?"
301 },
301 },
302 {
302 {
303 "itemtype": "file",
303 "itemtype": "file",
304 "path": "ignored",
304 "path": "ignored",
305 "status": "I"
305 "status": "I"
306 },
306 },
307 {
307 {
308 "itemtype": "file",
308 "itemtype": "file",
309 "path": ".hgignore",
309 "path": ".hgignore",
310 "status": "C"
310 "status": "C"
311 },
311 },
312 {
312 {
313 "itemtype": "file",
313 "itemtype": "file",
314 "path": "modified",
314 "path": "modified",
315 "status": "C"
315 "status": "C"
316 }
316 }
317 ]
317 ]
318
318
319 $ hg status -A -Tpickle > pickle
319 $ hg status -A -Tpickle > pickle
320 >>> from __future__ import print_function
320 >>> from __future__ import print_function
321 >>> from mercurial import util
321 >>> from mercurial import util
322 >>> pickle = util.pickle
322 >>> pickle = util.pickle
323 >>> data = sorted((x[b'status'].decode(), x[b'path'].decode()) for x in pickle.load(open("pickle", r"rb")))
323 >>> data = sorted((x[b'status'].decode(), x[b'path'].decode()) for x in pickle.load(open("pickle", r"rb")))
324 >>> for s, p in data: print("%s %s" % (s, p))
324 >>> for s, p in data: print("%s %s" % (s, p))
325 ! deleted
325 ! deleted
326 ? pickle
326 ? pickle
327 ? unknown
327 ? unknown
328 A added
328 A added
329 A copied
329 A copied
330 C .hgignore
330 C .hgignore
331 C modified
331 C modified
332 I ignored
332 I ignored
333 R removed
333 R removed
334 $ rm pickle
334 $ rm pickle
335
335
336 $ echo "^ignoreddir$" > .hgignore
336 $ echo "^ignoreddir$" > .hgignore
337 $ mkdir ignoreddir
337 $ mkdir ignoreddir
338 $ touch ignoreddir/file
338 $ touch ignoreddir/file
339
339
340 Test templater support:
340 Test templater support:
341
341
342 $ hg status -AT "[{status}]\t{if(source, '{source} -> ')}{path}\n"
342 $ hg status -AT "[{status}]\t{if(source, '{source} -> ')}{path}\n"
343 [M] .hgignore
343 [M] .hgignore
344 [A] added
344 [A] added
345 [A] modified -> copied
345 [A] modified -> copied
346 [R] removed
346 [R] removed
347 [!] deleted
347 [!] deleted
348 [?] ignored
348 [?] ignored
349 [?] unknown
349 [?] unknown
350 [I] ignoreddir/file
350 [I] ignoreddir/file
351 [C] modified
351 [C] modified
352 $ hg status -AT default
352 $ hg status -AT default
353 M .hgignore
353 M .hgignore
354 A added
354 A added
355 A copied
355 A copied
356 modified
356 modified
357 R removed
357 R removed
358 ! deleted
358 ! deleted
359 ? ignored
359 ? ignored
360 ? unknown
360 ? unknown
361 I ignoreddir/file
361 I ignoreddir/file
362 C modified
362 C modified
363 $ hg status -T compact
363 $ hg status -T compact
364 abort: "status" not in template map
364 abort: "status" not in template map
365 [255]
365 [255]
366
366
367 hg status ignoreddir/file:
367 hg status ignoreddir/file:
368
368
369 $ hg status ignoreddir/file
369 $ hg status ignoreddir/file
370
370
371 hg status -i ignoreddir/file:
371 hg status -i ignoreddir/file:
372
372
373 $ hg status -i ignoreddir/file
373 $ hg status -i ignoreddir/file
374 I ignoreddir/file
374 I ignoreddir/file
375 $ cd ..
375 $ cd ..
376
376
377 Check 'status -q' and some combinations
377 Check 'status -q' and some combinations
378
378
379 $ hg init repo3
379 $ hg init repo3
380 $ cd repo3
380 $ cd repo3
381 $ touch modified removed deleted ignored
381 $ touch modified removed deleted ignored
382 $ echo "^ignored$" > .hgignore
382 $ echo "^ignored$" > .hgignore
383 $ hg commit -A -m 'initial checkin'
383 $ hg commit -A -m 'initial checkin'
384 adding .hgignore
384 adding .hgignore
385 adding deleted
385 adding deleted
386 adding modified
386 adding modified
387 adding removed
387 adding removed
388 $ touch added unknown ignored
388 $ touch added unknown ignored
389 $ hg add added
389 $ hg add added
390 $ echo "test" >> modified
390 $ echo "test" >> modified
391 $ hg remove removed
391 $ hg remove removed
392 $ rm deleted
392 $ rm deleted
393 $ hg copy modified copied
393 $ hg copy modified copied
394
394
395 Specify working directory revision explicitly, that should be the same as
395 Specify working directory revision explicitly, that should be the same as
396 "hg status"
396 "hg status"
397
397
398 $ hg status --change "wdir()"
398 $ hg status --change "wdir()"
399 M modified
399 M modified
400 A added
400 A added
401 A copied
401 A copied
402 R removed
402 R removed
403 ! deleted
403 ! deleted
404 ? unknown
404 ? unknown
405
405
406 Run status with 2 different flags.
406 Run status with 2 different flags.
407 Check if result is the same or different.
407 Check if result is the same or different.
408 If result is not as expected, raise error
408 If result is not as expected, raise error
409
409
410 $ assert() {
410 $ assert() {
411 > hg status $1 > ../a
411 > hg status $1 > ../a
412 > hg status $2 > ../b
412 > hg status $2 > ../b
413 > if diff ../a ../b > /dev/null; then
413 > if diff ../a ../b > /dev/null; then
414 > out=0
414 > out=0
415 > else
415 > else
416 > out=1
416 > out=1
417 > fi
417 > fi
418 > if [ $3 -eq 0 ]; then
418 > if [ $3 -eq 0 ]; then
419 > df="same"
419 > df="same"
420 > else
420 > else
421 > df="different"
421 > df="different"
422 > fi
422 > fi
423 > if [ $out -ne $3 ]; then
423 > if [ $out -ne $3 ]; then
424 > echo "Error on $1 and $2, should be $df."
424 > echo "Error on $1 and $2, should be $df."
425 > fi
425 > fi
426 > }
426 > }
427
427
428 Assert flag1 flag2 [0-same | 1-different]
428 Assert flag1 flag2 [0-same | 1-different]
429
429
430 $ assert "-q" "-mard" 0
430 $ assert "-q" "-mard" 0
431 $ assert "-A" "-marduicC" 0
431 $ assert "-A" "-marduicC" 0
432 $ assert "-qA" "-mardcC" 0
432 $ assert "-qA" "-mardcC" 0
433 $ assert "-qAui" "-A" 0
433 $ assert "-qAui" "-A" 0
434 $ assert "-qAu" "-marducC" 0
434 $ assert "-qAu" "-marducC" 0
435 $ assert "-qAi" "-mardicC" 0
435 $ assert "-qAi" "-mardicC" 0
436 $ assert "-qu" "-u" 0
436 $ assert "-qu" "-u" 0
437 $ assert "-q" "-u" 1
437 $ assert "-q" "-u" 1
438 $ assert "-m" "-a" 1
438 $ assert "-m" "-a" 1
439 $ assert "-r" "-d" 1
439 $ assert "-r" "-d" 1
440 $ cd ..
440 $ cd ..
441
441
442 $ hg init repo4
442 $ hg init repo4
443 $ cd repo4
443 $ cd repo4
444 $ touch modified removed deleted
444 $ touch modified removed deleted
445 $ hg ci -q -A -m 'initial checkin'
445 $ hg ci -q -A -m 'initial checkin'
446 $ touch added unknown
446 $ touch added unknown
447 $ hg add added
447 $ hg add added
448 $ hg remove removed
448 $ hg remove removed
449 $ rm deleted
449 $ rm deleted
450 $ echo x > modified
450 $ echo x > modified
451 $ hg copy modified copied
451 $ hg copy modified copied
452 $ hg ci -m 'test checkin' -d "1000001 0"
452 $ hg ci -m 'test checkin' -d "1000001 0"
453 $ rm *
453 $ rm *
454 $ touch unrelated
454 $ touch unrelated
455 $ hg ci -q -A -m 'unrelated checkin' -d "1000002 0"
455 $ hg ci -q -A -m 'unrelated checkin' -d "1000002 0"
456
456
457 hg status --change 1:
457 hg status --change 1:
458
458
459 $ hg status --change 1
459 $ hg status --change 1
460 M modified
460 M modified
461 A added
461 A added
462 A copied
462 A copied
463 R removed
463 R removed
464
464
465 hg status --change 1 unrelated:
465 hg status --change 1 unrelated:
466
466
467 $ hg status --change 1 unrelated
467 $ hg status --change 1 unrelated
468
468
469 hg status -C --change 1 added modified copied removed deleted:
469 hg status -C --change 1 added modified copied removed deleted:
470
470
471 $ hg status -C --change 1 added modified copied removed deleted
471 $ hg status -C --change 1 added modified copied removed deleted
472 M modified
472 M modified
473 A added
473 A added
474 A copied
474 A copied
475 modified
475 modified
476 R removed
476 R removed
477
477
478 hg status -A --change 1 and revset:
478 hg status -A --change 1 and revset:
479
479
480 $ hg status -A --change '1|1'
480 $ hg status -A --change '1|1'
481 M modified
481 M modified
482 A added
482 A added
483 A copied
483 A copied
484 modified
484 modified
485 R removed
485 R removed
486 C deleted
486 C deleted
487
487
488 $ cd ..
488 $ cd ..
489
489
490 hg status with --rev and reverted changes:
490 hg status with --rev and reverted changes:
491
491
492 $ hg init reverted-changes-repo
492 $ hg init reverted-changes-repo
493 $ cd reverted-changes-repo
493 $ cd reverted-changes-repo
494 $ echo a > file
494 $ echo a > file
495 $ hg add file
495 $ hg add file
496 $ hg ci -m a
496 $ hg ci -m a
497 $ echo b > file
497 $ echo b > file
498 $ hg ci -m b
498 $ hg ci -m b
499
499
500 reverted file should appear clean
500 reverted file should appear clean
501
501
502 $ hg revert -r 0 .
502 $ hg revert -r 0 .
503 reverting file
503 reverting file
504 $ hg status -A --rev 0
504 $ hg status -A --rev 0
505 C file
505 C file
506
506
507 #if execbit
507 #if execbit
508 reverted file with changed flag should appear modified
508 reverted file with changed flag should appear modified
509
509
510 $ chmod +x file
510 $ chmod +x file
511 $ hg status -A --rev 0
511 $ hg status -A --rev 0
512 M file
512 M file
513
513
514 $ hg revert -r 0 .
514 $ hg revert -r 0 .
515 reverting file
515 reverting file
516
516
517 reverted and committed file with changed flag should appear modified
517 reverted and committed file with changed flag should appear modified
518
518
519 $ hg co -C .
519 $ hg co -C .
520 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
520 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
521 $ chmod +x file
521 $ chmod +x file
522 $ hg ci -m 'change flag'
522 $ hg ci -m 'change flag'
523 $ hg status -A --rev 1 --rev 2
523 $ hg status -A --rev 1 --rev 2
524 M file
524 M file
525 $ hg diff -r 1 -r 2
525 $ hg diff -r 1 -r 2
526
526
527 #endif
527 #endif
528
528
529 $ cd ..
529 $ cd ..
530
530
531 hg status of binary file starting with '\1\n', a separator for metadata:
531 hg status of binary file starting with '\1\n', a separator for metadata:
532
532
533 $ hg init repo5
533 $ hg init repo5
534 $ cd repo5
534 $ cd repo5
535 >>> open("010a", r"wb").write(b"\1\nfoo") and None
535 >>> open("010a", r"wb").write(b"\1\nfoo") and None
536 $ hg ci -q -A -m 'initial checkin'
536 $ hg ci -q -A -m 'initial checkin'
537 $ hg status -A
537 $ hg status -A
538 C 010a
538 C 010a
539
539
540 >>> open("010a", r"wb").write(b"\1\nbar") and None
540 >>> open("010a", r"wb").write(b"\1\nbar") and None
541 $ hg status -A
541 $ hg status -A
542 M 010a
542 M 010a
543 $ hg ci -q -m 'modify 010a'
543 $ hg ci -q -m 'modify 010a'
544 $ hg status -A --rev 0:1
544 $ hg status -A --rev 0:1
545 M 010a
545 M 010a
546
546
547 $ touch empty
547 $ touch empty
548 $ hg ci -q -A -m 'add another file'
548 $ hg ci -q -A -m 'add another file'
549 $ hg status -A --rev 1:2 010a
549 $ hg status -A --rev 1:2 010a
550 C 010a
550 C 010a
551
551
552 $ cd ..
552 $ cd ..
553
553
554 test "hg status" with "directory pattern" which matches against files
554 test "hg status" with "directory pattern" which matches against files
555 only known on target revision.
555 only known on target revision.
556
556
557 $ hg init repo6
557 $ hg init repo6
558 $ cd repo6
558 $ cd repo6
559
559
560 $ echo a > a.txt
560 $ echo a > a.txt
561 $ hg add a.txt
561 $ hg add a.txt
562 $ hg commit -m '#0'
562 $ hg commit -m '#0'
563 $ mkdir -p 1/2/3/4/5
563 $ mkdir -p 1/2/3/4/5
564 $ echo b > 1/2/3/4/5/b.txt
564 $ echo b > 1/2/3/4/5/b.txt
565 $ hg add 1/2/3/4/5/b.txt
565 $ hg add 1/2/3/4/5/b.txt
566 $ hg commit -m '#1'
566 $ hg commit -m '#1'
567
567
568 $ hg update -C 0 > /dev/null
568 $ hg update -C 0 > /dev/null
569 $ hg status -A
569 $ hg status -A
570 C a.txt
570 C a.txt
571
571
572 the directory matching against specified pattern should be removed,
572 the directory matching against specified pattern should be removed,
573 because directory existence prevents 'dirstate.walk()' from showing
573 because directory existence prevents 'dirstate.walk()' from showing
574 warning message about such pattern.
574 warning message about such pattern.
575
575
576 $ test ! -d 1
576 $ test ! -d 1
577 $ hg status -A --rev 1 1/2/3/4/5/b.txt
577 $ hg status -A --rev 1 1/2/3/4/5/b.txt
578 R 1/2/3/4/5/b.txt
578 R 1/2/3/4/5/b.txt
579 $ hg status -A --rev 1 1/2/3/4/5
579 $ hg status -A --rev 1 1/2/3/4/5
580 R 1/2/3/4/5/b.txt
580 R 1/2/3/4/5/b.txt
581 $ hg status -A --rev 1 1/2/3
581 $ hg status -A --rev 1 1/2/3
582 R 1/2/3/4/5/b.txt
582 R 1/2/3/4/5/b.txt
583 $ hg status -A --rev 1 1
583 $ hg status -A --rev 1 1
584 R 1/2/3/4/5/b.txt
584 R 1/2/3/4/5/b.txt
585
585
586 $ hg status --config ui.formatdebug=True --rev 1 1
586 $ hg status --config ui.formatdebug=True --rev 1 1
587 status = [
587 status = [
588 {
588 {
589 'itemtype': 'file',
589 'itemtype': 'file',
590 'path': '1/2/3/4/5/b.txt',
590 'path': '1/2/3/4/5/b.txt',
591 'status': 'R'
591 'status': 'R'
592 },
592 },
593 ]
593 ]
594
594
595 #if windows
595 #if windows
596 $ hg --config ui.slash=false status -A --rev 1 1
596 $ hg --config ui.slash=false status -A --rev 1 1
597 R 1\2\3\4\5\b.txt
597 R 1\2\3\4\5\b.txt
598 #endif
598 #endif
599
599
600 $ cd ..
600 $ cd ..
601
601
602 Status after move overwriting a file (issue4458)
602 Status after move overwriting a file (issue4458)
603 =================================================
603 =================================================
604
604
605
605
606 $ hg init issue4458
606 $ hg init issue4458
607 $ cd issue4458
607 $ cd issue4458
608 $ echo a > a
608 $ echo a > a
609 $ echo b > b
609 $ echo b > b
610 $ hg commit -Am base
610 $ hg commit -Am base
611 adding a
611 adding a
612 adding b
612 adding b
613
613
614
614
615 with --force
615 with --force
616
616
617 $ hg mv b --force a
617 $ hg mv b --force a
618 $ hg st --copies
618 $ hg st --copies
619 M a
619 M a
620 b
620 b
621 R b
621 R b
622 $ hg revert --all
622 $ hg revert --all
623 reverting a
623 reverting a
624 undeleting b
624 undeleting b
625 $ rm *.orig
625 $ rm *.orig
626
626
627 without force
627 without force
628
628
629 $ hg rm a
629 $ hg rm a
630 $ hg st --copies
630 $ hg st --copies
631 R a
631 R a
632 $ hg mv b a
632 $ hg mv b a
633 $ hg st --copies
633 $ hg st --copies
634 M a
634 M a
635 b
635 b
636 R b
636 R b
637
637
638 using ui.statuscopies setting
638 using ui.statuscopies setting
639 $ hg st --config ui.statuscopies=true
639 $ hg st --config ui.statuscopies=true
640 M a
640 M a
641 b
641 b
642 R b
642 R b
643 $ hg st --config ui.statuscopies=false
643 $ hg st --config ui.statuscopies=false
644 M a
644 M a
645 R b
645 R b
646 $ hg st --config ui.tweakdefaults=yes
646 $ hg st --config ui.tweakdefaults=yes
647 M a
647 M a
648 b
648 b
649 R b
649 R b
650
650
651 using log status template (issue5155)
651 using log status template (issue5155)
652 $ hg log -Tstatus -r 'wdir()' -C
652 $ hg log -Tstatus -r 'wdir()' -C
653 changeset: 2147483647:ffffffffffff
653 changeset: 2147483647:ffffffffffff
654 parent: 0:8c55c58b4c0e
654 parent: 0:8c55c58b4c0e
655 user: test
655 user: test
656 date: * (glob)
656 date: * (glob)
657 files:
657 files:
658 M a
658 M a
659 b
659 b
660 R b
660 R b
661
661
662 $ hg log -GTstatus -r 'wdir()' -C
662 $ hg log -GTstatus -r 'wdir()' -C
663 o changeset: 2147483647:ffffffffffff
663 o changeset: 2147483647:ffffffffffff
664 | parent: 0:8c55c58b4c0e
664 | parent: 0:8c55c58b4c0e
665 ~ user: test
665 ~ user: test
666 date: * (glob)
666 date: * (glob)
667 files:
667 files:
668 M a
668 M a
669 b
669 b
670 R b
670 R b
671
671
672
672
673 Other "bug" highlight, the revision status does not report the copy information.
673 Other "bug" highlight, the revision status does not report the copy information.
674 This is buggy behavior.
674 This is buggy behavior.
675
675
676 $ hg commit -m 'blah'
676 $ hg commit -m 'blah'
677 $ hg st --copies --change .
677 $ hg st --copies --change .
678 M a
678 M a
679 R b
679 R b
680
680
681 using log status template, the copy information is displayed correctly.
681 using log status template, the copy information is displayed correctly.
682 $ hg log -Tstatus -r. -C
682 $ hg log -Tstatus -r. -C
683 changeset: 1:6685fde43d21
683 changeset: 1:6685fde43d21
684 tag: tip
684 tag: tip
685 user: test
685 user: test
686 date: * (glob)
686 date: * (glob)
687 summary: blah
687 summary: blah
688 files:
688 files:
689 M a
689 M a
690 b
690 b
691 R b
691 R b
692
692
693
693
694 $ cd ..
694 $ cd ..
695
695
696 Make sure .hg doesn't show up even as a symlink
696 Make sure .hg doesn't show up even as a symlink
697
697
698 $ hg init repo0
698 $ hg init repo0
699 $ mkdir symlink-repo0
699 $ mkdir symlink-repo0
700 $ cd symlink-repo0
700 $ cd symlink-repo0
701 $ ln -s ../repo0/.hg
701 $ ln -s ../repo0/.hg
702 $ hg status
702 $ hg status
703
703
704 If the size hasnt changed but mtime has, status needs to read the contents
704 If the size hasnt changed but mtime has, status needs to read the contents
705 of the file to check whether it has changed
705 of the file to check whether it has changed
706
706
707 $ echo 1 > a
707 $ echo 1 > a
708 $ echo 1 > b
708 $ echo 1 > b
709 $ touch -t 200102030000 a b
709 $ touch -t 200102030000 a b
710 $ hg commit -Aqm '#0'
710 $ hg commit -Aqm '#0'
711 $ echo 2 > a
711 $ echo 2 > a
712 $ touch -t 200102040000 a b
712 $ touch -t 200102040000 a b
713 $ hg status
713 $ hg status
714 M a
714 M a
715
715
716 Asking specifically for the status of a deleted/removed file
716 Asking specifically for the status of a deleted/removed file
717
717
718 $ rm a
718 $ rm a
719 $ rm b
719 $ rm b
720 $ hg status a
720 $ hg status a
721 ! a
721 ! a
722 $ hg rm a
722 $ hg rm a
723 $ hg rm b
723 $ hg rm b
724 $ hg status a
724 $ hg status a
725 R a
725 R a
726 $ hg commit -qm '#1'
726 $ hg commit -qm '#1'
727 $ hg status a
727 $ hg status a
728 a: $ENOENT$
728 a: $ENOENT$
729
729
730 Check using include flag with pattern when status does not need to traverse
730 Check using include flag with pattern when status does not need to traverse
731 the working directory (issue6483)
731 the working directory (issue6483)
732
732
733 $ cd ..
733 $ cd ..
734 $ hg init issue6483
734 $ hg init issue6483
735 $ cd issue6483
735 $ cd issue6483
736 $ touch a.py b.rs
736 $ touch a.py b.rs
737 $ hg add a.py b.rs
737 $ hg add a.py b.rs
738 $ hg st -aI "*.py"
738 $ hg st -aI "*.py"
739 A a.py
739 A a.py
740
740
741 Also check exclude pattern
741 Also check exclude pattern
742
742
743 $ hg st -aX "*.rs"
743 $ hg st -aX "*.rs"
744 A a.py
744 A a.py
745
745
746 issue6335
746 issue6335
747 When a directory containing a tracked file gets symlinked, as of 5.8
747 When a directory containing a tracked file gets symlinked, as of 5.8
748 `hg st` only gives the correct answer about clean (or deleted) files
748 `hg st` only gives the correct answer about clean (or deleted) files
749 if also listing unknowns.
749 if also listing unknowns.
750 The tree-based dirstate and status algorithm fix this:
750 The tree-based dirstate and status algorithm fix this:
751
751
752 #if symlink no-dirstate-v1
752 #if symlink no-dirstate-v1
753
753
754 $ cd ..
754 $ cd ..
755 $ hg init issue6335
755 $ hg init issue6335
756 $ cd issue6335
756 $ cd issue6335
757 $ mkdir foo
757 $ mkdir foo
758 $ touch foo/a
758 $ touch foo/a
759 $ hg ci -Ama
759 $ hg ci -Ama
760 adding foo/a
760 adding foo/a
761 $ mv foo bar
761 $ mv foo bar
762 $ ln -s bar foo
762 $ ln -s bar foo
763 $ hg status
763 $ hg status
764 ! foo/a
764 ! foo/a
765 ? bar/a
765 ? bar/a
766 ? foo
766 ? foo
767
767
768 $ hg status -c # incorrect output with `dirstate-v1`
768 $ hg status -c # incorrect output with `dirstate-v1`
769 $ hg status -cu
769 $ hg status -cu
770 ? bar/a
770 ? bar/a
771 ? foo
771 ? foo
772 $ hg status -d # incorrect output with `dirstate-v1`
772 $ hg status -d # incorrect output with `dirstate-v1`
773 ! foo/a
773 ! foo/a
774 $ hg status -du
774 $ hg status -du
775 ! foo/a
775 ! foo/a
776 ? bar/a
776 ? bar/a
777 ? foo
777 ? foo
778
778
779 #endif
779 #endif
780
780
781
781
782 Create a repo with files in each possible status
782 Create a repo with files in each possible status
783
783
784 $ cd ..
784 $ cd ..
785 $ hg init repo7
785 $ hg init repo7
786 $ cd repo7
786 $ cd repo7
787 $ mkdir subdir
787 $ mkdir subdir
788 $ touch clean modified deleted removed
788 $ touch clean modified deleted removed
789 $ touch subdir/clean subdir/modified subdir/deleted subdir/removed
789 $ touch subdir/clean subdir/modified subdir/deleted subdir/removed
790 $ echo ignored > .hgignore
790 $ echo ignored > .hgignore
791 $ hg ci -Aqm '#0'
791 $ hg ci -Aqm '#0'
792 $ echo 1 > modified
792 $ echo 1 > modified
793 $ echo 1 > subdir/modified
793 $ echo 1 > subdir/modified
794 $ rm deleted
794 $ rm deleted
795 $ rm subdir/deleted
795 $ rm subdir/deleted
796 $ hg rm removed
796 $ hg rm removed
797 $ hg rm subdir/removed
797 $ hg rm subdir/removed
798 $ touch unknown ignored
798 $ touch unknown ignored
799 $ touch subdir/unknown subdir/ignored
799 $ touch subdir/unknown subdir/ignored
800
800
801 Check the output
801 Check the output
802
802
803 $ hg status
803 $ hg status
804 M modified
804 M modified
805 M subdir/modified
805 M subdir/modified
806 R removed
806 R removed
807 R subdir/removed
807 R subdir/removed
808 ! deleted
808 ! deleted
809 ! subdir/deleted
809 ! subdir/deleted
810 ? subdir/unknown
810 ? subdir/unknown
811 ? unknown
811 ? unknown
812
812
813 $ hg status -mard
813 $ hg status -mard
814 M modified
814 M modified
815 M subdir/modified
815 M subdir/modified
816 R removed
816 R removed
817 R subdir/removed
817 R subdir/removed
818 ! deleted
818 ! deleted
819 ! subdir/deleted
819 ! subdir/deleted
820
820
821 $ hg status -A
821 $ hg status -A
822 M modified
822 M modified
823 M subdir/modified
823 M subdir/modified
824 R removed
824 R removed
825 R subdir/removed
825 R subdir/removed
826 ! deleted
826 ! deleted
827 ! subdir/deleted
827 ! subdir/deleted
828 ? subdir/unknown
828 ? subdir/unknown
829 ? unknown
829 ? unknown
830 I ignored
830 I ignored
831 I subdir/ignored
831 I subdir/ignored
832 C .hgignore
832 C .hgignore
833 C clean
833 C clean
834 C subdir/clean
834 C subdir/clean
835
835
836 Note: `hg status some-name` creates a patternmatcher which is not supported
836 Note: `hg status some-name` creates a patternmatcher which is not supported
837 yet by the Rust implementation of status, but includematcher is supported.
837 yet by the Rust implementation of status, but includematcher is supported.
838 --include is used below for that reason
838 --include is used below for that reason
839
839
840 #if unix-permissions
840 #if unix-permissions
841
841
842 Not having permission to read a directory that contains tracked files makes
842 Not having permission to read a directory that contains tracked files makes
843 status emit a warning then behave as if the directory was empty or removed
843 status emit a warning then behave as if the directory was empty or removed
844 entirely:
844 entirely:
845
845
846 $ chmod 0 subdir
846 $ chmod 0 subdir
847 $ hg status --include subdir
847 $ hg status --include subdir
848 subdir: Permission denied
848 subdir: Permission denied
849 R subdir/removed
849 R subdir/removed
850 ! subdir/clean
850 ! subdir/clean
851 ! subdir/deleted
851 ! subdir/deleted
852 ! subdir/modified
852 ! subdir/modified
853 $ chmod 755 subdir
853 $ chmod 755 subdir
854
854
855 #endif
855 #endif
856
856
857 Remove a directory that contains tracked files
857 Remove a directory that contains tracked files
858
858
859 $ rm -r subdir
859 $ rm -r subdir
860 $ hg status --include subdir
860 $ hg status --include subdir
861 R subdir/removed
861 R subdir/removed
862 ! subdir/clean
862 ! subdir/clean
863 ! subdir/deleted
863 ! subdir/deleted
864 ! subdir/modified
864 ! subdir/modified
865
865
866 and replace it by a file
866 and replace it by a file
867
867
868 $ touch subdir
868 $ touch subdir
869 $ hg status --include subdir
869 $ hg status --include subdir
870 R subdir/removed
870 R subdir/removed
871 ! subdir/clean
871 ! subdir/clean
872 ! subdir/deleted
872 ! subdir/deleted
873 ! subdir/modified
873 ! subdir/modified
874 ? subdir
874 ? subdir
875
875
876 Replaced a deleted or removed file with a directory
876 Replaced a deleted or removed file with a directory
877
877
878 $ mkdir deleted removed
878 $ mkdir deleted removed
879 $ touch deleted/1 removed/1
879 $ touch deleted/1 removed/1
880 $ hg status --include deleted --include removed
880 $ hg status --include deleted --include removed
881 R removed
881 R removed
882 ! deleted
882 ! deleted
883 ? deleted/1
883 ? deleted/1
884 ? removed/1
884 ? removed/1
885 $ hg add removed/1
885 $ hg add removed/1
886 $ hg status --include deleted --include removed
886 $ hg status --include deleted --include removed
887 A removed/1
887 A removed/1
888 R removed
888 R removed
889 ! deleted
889 ! deleted
890 ? deleted/1
890 ? deleted/1
891
891
892 Deeply nested files in an ignored directory are still listed on request
892 Deeply nested files in an ignored directory are still listed on request
893
893
894 $ echo ignored-dir >> .hgignore
894 $ echo ignored-dir >> .hgignore
895 $ mkdir ignored-dir
895 $ mkdir ignored-dir
896 $ mkdir ignored-dir/subdir
896 $ mkdir ignored-dir/subdir
897 $ touch ignored-dir/subdir/1
897 $ touch ignored-dir/subdir/1
898 $ hg status --ignored
898 $ hg status --ignored
899 I ignored
899 I ignored
900 I ignored-dir/subdir/1
900 I ignored-dir/subdir/1
901
901
902 Check using include flag while listing ignored composes correctly (issue6514)
902 Check using include flag while listing ignored composes correctly (issue6514)
903
903
904 $ cd ..
904 $ cd ..
905 $ hg init issue6514
905 $ hg init issue6514
906 $ cd issue6514
906 $ cd issue6514
907 $ mkdir ignored-folder
907 $ mkdir ignored-folder
908 $ touch A.hs B.hs C.hs ignored-folder/other.txt ignored-folder/ctest.hs
908 $ touch A.hs B.hs C.hs ignored-folder/other.txt ignored-folder/ctest.hs
909 $ cat >.hgignore <<EOF
909 $ cat >.hgignore <<EOF
910 > A.hs
910 > A.hs
911 > B.hs
911 > B.hs
912 > ignored-folder/
912 > ignored-folder/
913 > EOF
913 > EOF
914 $ hg st -i -I 're:.*\.hs$'
914 $ hg st -i -I 're:.*\.hs$'
915 I A.hs
915 I A.hs
916 I B.hs
916 I B.hs
917 I ignored-folder/ctest.hs
917 I ignored-folder/ctest.hs
918
918
919 #if dirstate-v2
919 #if dirstate-v2
920
920
921 Check read_dir caching
921 Check read_dir caching
922
922
923 $ cd ..
923 $ cd ..
924 $ hg init repo8
924 $ hg init repo8
925 $ cd repo8
925 $ cd repo8
926 $ mkdir subdir
926 $ mkdir subdir
927 $ touch subdir/a subdir/b
927 $ touch subdir/a subdir/b
928 $ hg ci -Aqm '#0'
928 $ hg ci -Aqm '#0'
929
929
930 The cached mtime is initially unset
930 The cached mtime is initially unset
931
931
932 $ hg debugdirstate --dirs --no-dates | grep '^d'
932 $ hg debugdirstate --dirs --no-dates | grep '^d'
933 d 0 0 unset subdir
933 d 0 0 unset subdir
934
934
935 It is still not set when there are unknown files
935 It is still not set when there are unknown files
936
936
937 $ touch subdir/unknown
937 $ touch subdir/unknown
938 $ hg status
938 $ hg status
939 ? subdir/unknown
939 ? subdir/unknown
940 $ hg debugdirstate --dirs --no-dates | grep '^d'
940 $ hg debugdirstate --dirs --no-dates | grep '^d'
941 d 0 0 unset subdir
941 d 0 0 unset subdir
942
942
943 Now the directory is eligible for caching, so its mtime is save in the dirstate
943 Now the directory is eligible for caching, so its mtime is save in the dirstate
944
944
945 $ rm subdir/unknown
945 $ rm subdir/unknown
946 $ hg status
946 $ hg status
947 $ hg debugdirstate --dirs --no-dates | grep '^d'
947 $ hg debugdirstate --dirs --no-dates | grep '^d'
948 d 0 0 set subdir
948 d 0 0 set subdir
949
949
950 This time the command should be ever so slightly faster since it does not need `read_dir("subdir")`
950 This time the command should be ever so slightly faster since it does not need `read_dir("subdir")`
951
951
952 $ hg status
952 $ hg status
953
953
954 Creating a new file changes the directorys mtime, invalidating the cache
954 Creating a new file changes the directorys mtime, invalidating the cache
955
955
956 $ touch subdir/unknown
956 $ touch subdir/unknown
957 $ hg status
957 $ hg status
958 ? subdir/unknown
958 ? subdir/unknown
959
959
960 $ rm subdir/unknown
961 $ hg status
962
963 Removing a node from the dirstate resets the cache for its parent directory
964
965 $ hg forget subdir/a
966 $ hg debugdirstate --dirs --no-dates | grep '^d'
967 d 0 0 set subdir
968 $ hg ci -qm '#1'
969 $ hg debugdirstate --dirs --no-dates | grep '^d'
970 d 0 0 unset subdir
971 $ hg status
972 ? subdir/a
973
960 #endif
974 #endif
General Comments 0
You need to be logged in to leave comments. Login now