##// END OF EJS Templates
dirstate-v2: fix infinite loop in pure packer...
Raphaël Gomès -
r49614:46d12f77 stable
parent child Browse files
Show More
@@ -1,414 +1,435 b''
1 # v2.py - Pure-Python implementation of the dirstate-v2 file format
1 # v2.py - Pure-Python implementation of the dirstate-v2 file format
2 #
2 #
3 # Copyright Mercurial Contributors
3 # Copyright Mercurial Contributors
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import struct
10 import struct
11
11
12 from ..thirdparty import attr
12 from ..thirdparty import attr
13 from .. import error, policy
13 from .. import error, policy
14
14
15 parsers = policy.importmod('parsers')
15 parsers = policy.importmod('parsers')
16
16
17
17
18 # Must match the constant of the same name in
18 # Must match the constant of the same name in
19 # `rust/hg-core/src/dirstate_tree/on_disk.rs`
19 # `rust/hg-core/src/dirstate_tree/on_disk.rs`
20 TREE_METADATA_SIZE = 44
20 TREE_METADATA_SIZE = 44
21 NODE_SIZE = 44
21 NODE_SIZE = 44
22
22
23
23
24 # Must match the `TreeMetadata` Rust struct in
24 # Must match the `TreeMetadata` Rust struct in
25 # `rust/hg-core/src/dirstate_tree/on_disk.rs`. See doc-comments there.
25 # `rust/hg-core/src/dirstate_tree/on_disk.rs`. See doc-comments there.
26 #
26 #
27 # * 4 bytes: start offset of root nodes
27 # * 4 bytes: start offset of root nodes
28 # * 4 bytes: number of root nodes
28 # * 4 bytes: number of root nodes
29 # * 4 bytes: total number of nodes in the tree that have an entry
29 # * 4 bytes: total number of nodes in the tree that have an entry
30 # * 4 bytes: total number of nodes in the tree that have a copy source
30 # * 4 bytes: total number of nodes in the tree that have a copy source
31 # * 4 bytes: number of bytes in the data file that are not used anymore
31 # * 4 bytes: number of bytes in the data file that are not used anymore
32 # * 4 bytes: unused
32 # * 4 bytes: unused
33 # * 20 bytes: SHA-1 hash of ignore patterns
33 # * 20 bytes: SHA-1 hash of ignore patterns
34 TREE_METADATA = struct.Struct('>LLLLL4s20s')
34 TREE_METADATA = struct.Struct('>LLLLL4s20s')
35
35
36
36
37 # Must match the `Node` Rust struct in
37 # Must match the `Node` Rust struct in
38 # `rust/hg-core/src/dirstate_tree/on_disk.rs`. See doc-comments there.
38 # `rust/hg-core/src/dirstate_tree/on_disk.rs`. See doc-comments there.
39 #
39 #
40 # * 4 bytes: start offset of full path
40 # * 4 bytes: start offset of full path
41 # * 2 bytes: length of the full path
41 # * 2 bytes: length of the full path
42 # * 2 bytes: length within the full path before its "base name"
42 # * 2 bytes: length within the full path before its "base name"
43 # * 4 bytes: start offset of the copy source if any, or zero for no copy source
43 # * 4 bytes: start offset of the copy source if any, or zero for no copy source
44 # * 2 bytes: length of the copy source if any, or unused
44 # * 2 bytes: length of the copy source if any, or unused
45 # * 4 bytes: start offset of child nodes
45 # * 4 bytes: start offset of child nodes
46 # * 4 bytes: number of child nodes
46 # * 4 bytes: number of child nodes
47 # * 4 bytes: number of descendant nodes that have an entry
47 # * 4 bytes: number of descendant nodes that have an entry
48 # * 4 bytes: number of descendant nodes that have a "tracked" state
48 # * 4 bytes: number of descendant nodes that have a "tracked" state
49 # * 1 byte: flags
49 # * 1 byte: flags
50 # * 4 bytes: expected size
50 # * 4 bytes: expected size
51 # * 4 bytes: mtime seconds
51 # * 4 bytes: mtime seconds
52 # * 4 bytes: mtime nanoseconds
52 # * 4 bytes: mtime nanoseconds
53 NODE = struct.Struct('>LHHLHLLLLHlll')
53 NODE = struct.Struct('>LHHLHLLLLHlll')
54
54
55
55
56 assert TREE_METADATA_SIZE == TREE_METADATA.size
56 assert TREE_METADATA_SIZE == TREE_METADATA.size
57 assert NODE_SIZE == NODE.size
57 assert NODE_SIZE == NODE.size
58
58
59 # match constant in mercurial/pure/parsers.py
59 # match constant in mercurial/pure/parsers.py
60 DIRSTATE_V2_DIRECTORY = 1 << 5
60 DIRSTATE_V2_DIRECTORY = 1 << 5
61
61
62
62
63 def parse_dirstate(map, copy_map, data, tree_metadata):
63 def parse_dirstate(map, copy_map, data, tree_metadata):
64 """parse a full v2-dirstate from a binary data into dictionnaries:
64 """parse a full v2-dirstate from a binary data into dictionnaries:
65
65
66 - map: a {path: entry} mapping that will be filled
66 - map: a {path: entry} mapping that will be filled
67 - copy_map: a {path: copy-source} mapping that will be filled
67 - copy_map: a {path: copy-source} mapping that will be filled
68 - data: a binary blob contains v2 nodes data
68 - data: a binary blob contains v2 nodes data
69 - tree_metadata:: a binary blob of the top level node (from the docket)
69 - tree_metadata:: a binary blob of the top level node (from the docket)
70 """
70 """
71 (
71 (
72 root_nodes_start,
72 root_nodes_start,
73 root_nodes_len,
73 root_nodes_len,
74 _nodes_with_entry_count,
74 _nodes_with_entry_count,
75 _nodes_with_copy_source_count,
75 _nodes_with_copy_source_count,
76 _unreachable_bytes,
76 _unreachable_bytes,
77 _unused,
77 _unused,
78 _ignore_patterns_hash,
78 _ignore_patterns_hash,
79 ) = TREE_METADATA.unpack(tree_metadata)
79 ) = TREE_METADATA.unpack(tree_metadata)
80 parse_nodes(map, copy_map, data, root_nodes_start, root_nodes_len)
80 parse_nodes(map, copy_map, data, root_nodes_start, root_nodes_len)
81
81
82
82
83 def parse_nodes(map, copy_map, data, start, len):
83 def parse_nodes(map, copy_map, data, start, len):
84 """parse <len> nodes from <data> starting at offset <start>
84 """parse <len> nodes from <data> starting at offset <start>
85
85
86 This is used by parse_dirstate to recursively fill `map` and `copy_map`.
86 This is used by parse_dirstate to recursively fill `map` and `copy_map`.
87
87
88 All directory specific information is ignored and do not need any
88 All directory specific information is ignored and do not need any
89 processing (DIRECTORY, ALL_UNKNOWN_RECORDED, ALL_IGNORED_RECORDED)
89 processing (DIRECTORY, ALL_UNKNOWN_RECORDED, ALL_IGNORED_RECORDED)
90 """
90 """
91 for i in range(len):
91 for i in range(len):
92 node_start = start + NODE_SIZE * i
92 node_start = start + NODE_SIZE * i
93 node_bytes = slice_with_len(data, node_start, NODE_SIZE)
93 node_bytes = slice_with_len(data, node_start, NODE_SIZE)
94 (
94 (
95 path_start,
95 path_start,
96 path_len,
96 path_len,
97 _basename_start,
97 _basename_start,
98 copy_source_start,
98 copy_source_start,
99 copy_source_len,
99 copy_source_len,
100 children_start,
100 children_start,
101 children_count,
101 children_count,
102 _descendants_with_entry_count,
102 _descendants_with_entry_count,
103 _tracked_descendants_count,
103 _tracked_descendants_count,
104 flags,
104 flags,
105 size,
105 size,
106 mtime_s,
106 mtime_s,
107 mtime_ns,
107 mtime_ns,
108 ) = NODE.unpack(node_bytes)
108 ) = NODE.unpack(node_bytes)
109
109
110 # Parse child nodes of this node recursively
110 # Parse child nodes of this node recursively
111 parse_nodes(map, copy_map, data, children_start, children_count)
111 parse_nodes(map, copy_map, data, children_start, children_count)
112
112
113 item = parsers.DirstateItem.from_v2_data(flags, size, mtime_s, mtime_ns)
113 item = parsers.DirstateItem.from_v2_data(flags, size, mtime_s, mtime_ns)
114 if not item.any_tracked:
114 if not item.any_tracked:
115 continue
115 continue
116 path = slice_with_len(data, path_start, path_len)
116 path = slice_with_len(data, path_start, path_len)
117 map[path] = item
117 map[path] = item
118 if copy_source_start:
118 if copy_source_start:
119 copy_map[path] = slice_with_len(
119 copy_map[path] = slice_with_len(
120 data, copy_source_start, copy_source_len
120 data, copy_source_start, copy_source_len
121 )
121 )
122
122
123
123
124 def slice_with_len(data, start, len):
124 def slice_with_len(data, start, len):
125 return data[start : start + len]
125 return data[start : start + len]
126
126
127
127
128 @attr.s
128 @attr.s
129 class Node(object):
129 class Node(object):
130 path = attr.ib()
130 path = attr.ib()
131 entry = attr.ib()
131 entry = attr.ib()
132 parent = attr.ib(default=None)
132 parent = attr.ib(default=None)
133 children_count = attr.ib(default=0)
133 children_count = attr.ib(default=0)
134 children_offset = attr.ib(default=0)
134 children_offset = attr.ib(default=0)
135 descendants_with_entry = attr.ib(default=0)
135 descendants_with_entry = attr.ib(default=0)
136 tracked_descendants = attr.ib(default=0)
136 tracked_descendants = attr.ib(default=0)
137
137
138 def pack(self, copy_map, paths_offset):
138 def pack(self, copy_map, paths_offset):
139 path = self.path
139 path = self.path
140 copy = copy_map.get(path)
140 copy = copy_map.get(path)
141 entry = self.entry
141 entry = self.entry
142
142
143 path_start = paths_offset
143 path_start = paths_offset
144 path_len = len(path)
144 path_len = len(path)
145 basename_start = path.rfind(b'/') + 1 # 0 if rfind returns -1
145 basename_start = path.rfind(b'/') + 1 # 0 if rfind returns -1
146 if copy is not None:
146 if copy is not None:
147 copy_source_start = paths_offset + len(path)
147 copy_source_start = paths_offset + len(path)
148 copy_source_len = len(copy)
148 copy_source_len = len(copy)
149 else:
149 else:
150 copy_source_start = 0
150 copy_source_start = 0
151 copy_source_len = 0
151 copy_source_len = 0
152 if entry is not None:
152 if entry is not None:
153 flags, size, mtime_s, mtime_ns = entry.v2_data()
153 flags, size, mtime_s, mtime_ns = entry.v2_data()
154 else:
154 else:
155 # There are no mtime-cached directories in the Python implementation
155 # There are no mtime-cached directories in the Python implementation
156 flags = DIRSTATE_V2_DIRECTORY
156 flags = DIRSTATE_V2_DIRECTORY
157 size = 0
157 size = 0
158 mtime_s = 0
158 mtime_s = 0
159 mtime_ns = 0
159 mtime_ns = 0
160 return NODE.pack(
160 return NODE.pack(
161 path_start,
161 path_start,
162 path_len,
162 path_len,
163 basename_start,
163 basename_start,
164 copy_source_start,
164 copy_source_start,
165 copy_source_len,
165 copy_source_len,
166 self.children_offset,
166 self.children_offset,
167 self.children_count,
167 self.children_count,
168 self.descendants_with_entry,
168 self.descendants_with_entry,
169 self.tracked_descendants,
169 self.tracked_descendants,
170 flags,
170 flags,
171 size,
171 size,
172 mtime_s,
172 mtime_s,
173 mtime_ns,
173 mtime_ns,
174 )
174 )
175
175
176
176
177 def pack_dirstate(map, copy_map, now):
177 def pack_dirstate(map, copy_map, now):
178 """
178 """
179 Pack `map` and `copy_map` into the dirstate v2 binary format and return
179 Pack `map` and `copy_map` into the dirstate v2 binary format and return
180 the bytearray.
180 the bytearray.
181 `now` is a timestamp of the current filesystem time used to detect race
181 `now` is a timestamp of the current filesystem time used to detect race
182 conditions in writing the dirstate to disk, see inline comment.
182 conditions in writing the dirstate to disk, see inline comment.
183
183
184 The on-disk format expects a tree-like structure where the leaves are
184 The on-disk format expects a tree-like structure where the leaves are
185 written first (and sorted per-directory), going up levels until the root
185 written first (and sorted per-directory), going up levels until the root
186 node and writing that one to the docket. See more details on the on-disk
186 node and writing that one to the docket. See more details on the on-disk
187 format in `mercurial/helptext/internals/dirstate-v2`.
187 format in `mercurial/helptext/internals/dirstate-v2`.
188
188
189 Since both `map` and `copy_map` are flat dicts we need to figure out the
189 Since both `map` and `copy_map` are flat dicts we need to figure out the
190 hierarchy. This algorithm does so without having to build the entire tree
190 hierarchy. This algorithm does so without having to build the entire tree
191 in-memory: it only keeps the minimum number of nodes around to satisfy the
191 in-memory: it only keeps the minimum number of nodes around to satisfy the
192 format.
192 format.
193
193
194 # Algorithm explanation
194 # Algorithm explanation
195
195
196 This explanation does not talk about the different counters for tracked
196 This explanation does not talk about the different counters for tracked
197 descendents and storing the copies, but that work is pretty simple once this
197 descendents and storing the copies, but that work is pretty simple once this
198 algorithm is in place.
198 algorithm is in place.
199
199
200 ## Building a subtree
200 ## Building a subtree
201
201
202 First, sort `map`: this makes it so the leaves of the tree are contiguous
202 First, sort `map`: this makes it so the leaves of the tree are contiguous
203 per directory (i.e. a/b/c and a/b/d will be next to each other in the list),
203 per directory (i.e. a/b/c and a/b/d will be next to each other in the list),
204 and enables us to use the ordering of folders to have a "cursor" of the
204 and enables us to use the ordering of folders to have a "cursor" of the
205 current folder we're in without ever going twice in the same branch of the
205 current folder we're in without ever going twice in the same branch of the
206 tree. The cursor is a node that remembers its parent and any information
206 tree. The cursor is a node that remembers its parent and any information
207 relevant to the format (see the `Node` class), building the relevant part
207 relevant to the format (see the `Node` class), building the relevant part
208 of the tree lazily.
208 of the tree lazily.
209 Then, for each file in `map`, move the cursor into the tree to the
209 Then, for each file in `map`, move the cursor into the tree to the
210 corresponding folder of the file: for example, if the very first file
210 corresponding folder of the file: for example, if the very first file
211 is "a/b/c", we start from `Node[""]`, create `Node["a"]` which points to
211 is "a/b/c", we start from `Node[""]`, create `Node["a"]` which points to
212 its parent `Node[""]`, then create `Node["a/b"]`, which points to its parent
212 its parent `Node[""]`, then create `Node["a/b"]`, which points to its parent
213 `Node["a"]`. These nodes are kept around in a stack.
213 `Node["a"]`. These nodes are kept around in a stack.
214 If the next file in `map` is in the same subtree ("a/b/d" or "a/b/e/f"), we
214 If the next file in `map` is in the same subtree ("a/b/d" or "a/b/e/f"), we
215 add it to the stack and keep looping with the same logic of creating the
215 add it to the stack and keep looping with the same logic of creating the
216 tree nodes as needed. If however the next file in `map` is *not* in the same
216 tree nodes as needed. If however the next file in `map` is *not* in the same
217 subtree ("a/other", if we're still in the "a/b" folder), then we know that
217 subtree ("a/other", if we're still in the "a/b" folder), then we know that
218 the subtree we're in is complete.
218 the subtree we're in is complete.
219
219
220 ## Writing the subtree
220 ## Writing the subtree
221
221
222 We have the entire subtree in the stack, so we start writing it to disk
222 We have the entire subtree in the stack, so we start writing it to disk
223 folder by folder. The way we write a folder is to pop the stack into a list
223 folder by folder. The way we write a folder is to pop the stack into a list
224 until the folder changes, revert this list of direct children (to satisfy
224 until the folder changes, revert this list of direct children (to satisfy
225 the format requirement that children be sorted). This process repeats until
225 the format requirement that children be sorted). This process repeats until
226 we hit the "other" subtree.
226 we hit the "other" subtree.
227
227
228 An example:
228 An example:
229 a
229 a
230 dir1/b
230 dir1/b
231 dir1/c
231 dir1/c
232 dir2/dir3/d
232 dir2/dir3/d
233 dir2/dir3/e
233 dir2/dir3/e
234 dir2/f
234 dir2/f
235
235
236 Would have us:
236 Would have us:
237 - add to the stack until "dir2/dir3/e"
237 - add to the stack until "dir2/dir3/e"
238 - realize that "dir2/f" is in a different subtree
238 - realize that "dir2/f" is in a different subtree
239 - pop "dir2/dir3/e", "dir2/dir3/d", reverse them so they're sorted and
239 - pop "dir2/dir3/e", "dir2/dir3/d", reverse them so they're sorted and
240 pack them since the next entry is "dir2/dir3"
240 pack them since the next entry is "dir2/dir3"
241 - go back up to "dir2"
241 - go back up to "dir2"
242 - add "dir2/f" to the stack
242 - add "dir2/f" to the stack
243 - realize we're done with the map
243 - realize we're done with the map
244 - pop "dir2/f", "dir2/dir3" from the stack, reverse and pack them
244 - pop "dir2/f", "dir2/dir3" from the stack, reverse and pack them
245 - go up to the root node, do the same to write "a", "dir1" and "dir2" in
245 - go up to the root node, do the same to write "a", "dir1" and "dir2" in
246 that order
246 that order
247
247
248 ## Special case for the root node
248 ## Special case for the root node
249
249
250 The root node is not serialized in the format, but its information is
250 The root node is not serialized in the format, but its information is
251 written to the docket. Again, see more details on the on-disk format in
251 written to the docket. Again, see more details on the on-disk format in
252 `mercurial/helptext/internals/dirstate-v2`.
252 `mercurial/helptext/internals/dirstate-v2`.
253 """
253 """
254 data = bytearray()
254 data = bytearray()
255 root_nodes_start = 0
255 root_nodes_start = 0
256 root_nodes_len = 0
256 root_nodes_len = 0
257 nodes_with_entry_count = 0
257 nodes_with_entry_count = 0
258 nodes_with_copy_source_count = 0
258 nodes_with_copy_source_count = 0
259 # Will always be 0 since this implementation always re-writes everything
259 # Will always be 0 since this implementation always re-writes everything
260 # to disk
260 # to disk
261 unreachable_bytes = 0
261 unreachable_bytes = 0
262 unused = b'\x00' * 4
262 unused = b'\x00' * 4
263 # This is an optimization that's only useful for the Rust implementation
263 # This is an optimization that's only useful for the Rust implementation
264 ignore_patterns_hash = b'\x00' * 20
264 ignore_patterns_hash = b'\x00' * 20
265
265
266 if len(map) == 0:
266 if len(map) == 0:
267 tree_metadata = TREE_METADATA.pack(
267 tree_metadata = TREE_METADATA.pack(
268 root_nodes_start,
268 root_nodes_start,
269 root_nodes_len,
269 root_nodes_len,
270 nodes_with_entry_count,
270 nodes_with_entry_count,
271 nodes_with_copy_source_count,
271 nodes_with_copy_source_count,
272 unreachable_bytes,
272 unreachable_bytes,
273 unused,
273 unused,
274 ignore_patterns_hash,
274 ignore_patterns_hash,
275 )
275 )
276 return data, tree_metadata
276 return data, tree_metadata
277
277
278 sorted_map = sorted(map.items(), key=lambda x: x[0])
278 sorted_map = sorted(map.items(), key=lambda x: x[0])
279
279
280 # Use a stack to not have to only remember the nodes we currently need
280 # Use a stack to not have to only remember the nodes we currently need
281 # instead of building the entire tree in memory
281 # instead of building the entire tree in memory
282 stack = []
282 stack = []
283 current_node = Node(b"", None)
283 current_node = Node(b"", None)
284 stack.append(current_node)
284 stack.append(current_node)
285
285
286 for index, (path, entry) in enumerate(sorted_map, 1):
286 for index, (path, entry) in enumerate(sorted_map, 1):
287 if entry.need_delay(now):
287 if entry.need_delay(now):
288 # The file was last modified "simultaneously" with the current
288 # The file was last modified "simultaneously" with the current
289 # write to dirstate (i.e. within the same second for file-
289 # write to dirstate (i.e. within the same second for file-
290 # systems with a granularity of 1 sec). This commonly happens
290 # systems with a granularity of 1 sec). This commonly happens
291 # for at least a couple of files on 'update'.
291 # for at least a couple of files on 'update'.
292 # The user could change the file without changing its size
292 # The user could change the file without changing its size
293 # within the same second. Invalidate the file's mtime in
293 # within the same second. Invalidate the file's mtime in
294 # dirstate, forcing future 'status' calls to compare the
294 # dirstate, forcing future 'status' calls to compare the
295 # contents of the file if the size is the same. This prevents
295 # contents of the file if the size is the same. This prevents
296 # mistakenly treating such files as clean.
296 # mistakenly treating such files as clean.
297 entry.set_possibly_dirty()
297 entry.set_possibly_dirty()
298 nodes_with_entry_count += 1
298 nodes_with_entry_count += 1
299 if path in copy_map:
299 if path in copy_map:
300 nodes_with_copy_source_count += 1
300 nodes_with_copy_source_count += 1
301 current_folder = get_folder(path)
301 current_folder = get_folder(path)
302 current_node = move_to_correct_node_in_tree(
302 current_node = move_to_correct_node_in_tree(
303 current_folder, current_node, stack
303 current_folder, current_node, stack
304 )
304 )
305
305
306 current_node.children_count += 1
306 current_node.children_count += 1
307 # Entries from `map` are never `None`
307 # Entries from `map` are never `None`
308 if entry.tracked:
308 if entry.tracked:
309 current_node.tracked_descendants += 1
309 current_node.tracked_descendants += 1
310 current_node.descendants_with_entry += 1
310 current_node.descendants_with_entry += 1
311 stack.append(Node(path, entry, current_node))
311 stack.append(Node(path, entry, current_node))
312
312
313 should_pack = True
313 should_pack = True
314 next_path = None
314 next_path = None
315 if index < len(sorted_map):
315 if index < len(sorted_map):
316 # Determine if the next entry is in the same sub-tree, if so don't
316 # Determine if the next entry is in the same sub-tree, if so don't
317 # pack yet
317 # pack yet
318 next_path = sorted_map[index][0]
318 next_path = sorted_map[index][0]
319 should_pack = not get_folder(next_path).startswith(current_folder)
319 should_pack = not is_ancestor(next_path, current_folder)
320 if should_pack:
320 if should_pack:
321 pack_directory_children(current_node, copy_map, data, stack)
321 pack_directory_children(current_node, copy_map, data, stack)
322 while stack and current_node.path != b"":
322 while stack and current_node.path != b"":
323 # Go up the tree and write until we reach the folder of the next
323 # Go up the tree and write until we reach the folder of the next
324 # entry (if any, otherwise the root)
324 # entry (if any, otherwise the root)
325 parent = current_node.parent
325 parent = current_node.parent
326 in_parent_folder_of_next_entry = next_path is not None and (
326 in_ancestor_of_next_path = next_path is not None and (
327 get_folder(next_path).startswith(get_folder(stack[-1].path))
327 is_ancestor(next_path, get_folder(stack[-1].path))
328 )
328 )
329 if parent is None or in_parent_folder_of_next_entry:
329 if parent is None or in_ancestor_of_next_path:
330 break
330 break
331 pack_directory_children(parent, copy_map, data, stack)
331 pack_directory_children(parent, copy_map, data, stack)
332 current_node = parent
332 current_node = parent
333
333
334 # Special case for the root node since we don't write it to disk, only its
334 # Special case for the root node since we don't write it to disk, only its
335 # children to the docket
335 # children to the docket
336 current_node = stack.pop()
336 current_node = stack.pop()
337 assert current_node.path == b"", current_node.path
337 assert current_node.path == b"", current_node.path
338 assert len(stack) == 0, len(stack)
338 assert len(stack) == 0, len(stack)
339
339
340 tree_metadata = TREE_METADATA.pack(
340 tree_metadata = TREE_METADATA.pack(
341 current_node.children_offset,
341 current_node.children_offset,
342 current_node.children_count,
342 current_node.children_count,
343 nodes_with_entry_count,
343 nodes_with_entry_count,
344 nodes_with_copy_source_count,
344 nodes_with_copy_source_count,
345 unreachable_bytes,
345 unreachable_bytes,
346 unused,
346 unused,
347 ignore_patterns_hash,
347 ignore_patterns_hash,
348 )
348 )
349
349
350 return data, tree_metadata
350 return data, tree_metadata
351
351
352
352
353 def get_folder(path):
353 def get_folder(path):
354 """
354 """
355 Return the folder of the path that's given, an empty string for root paths.
355 Return the folder of the path that's given, an empty string for root paths.
356 """
356 """
357 return path.rsplit(b'/', 1)[0] if b'/' in path else b''
357 return path.rsplit(b'/', 1)[0] if b'/' in path else b''
358
358
359
359
360 def is_ancestor(path, maybe_ancestor):
361 """Returns whether `maybe_ancestor` is an ancestor of `path`.
362
363 >>> is_ancestor(b"a", b"")
364 True
365 >>> is_ancestor(b"a/b/c", b"a/b/c")
366 False
367 >>> is_ancestor(b"hgext3rd/__init__.py", b"hgext")
368 False
369 >>> is_ancestor(b"hgext3rd/__init__.py", b"hgext3rd")
370 True
371 """
372 if maybe_ancestor == b"":
373 return True
374 if path <= maybe_ancestor:
375 return False
376 path_components = path.split(b"/")
377 ancestor_components = maybe_ancestor.split(b"/")
378 return all(c == o for c, o in zip(path_components, ancestor_components))
379
380
360 def move_to_correct_node_in_tree(target_folder, current_node, stack):
381 def move_to_correct_node_in_tree(target_folder, current_node, stack):
361 """
382 """
362 Move inside the dirstate node tree to the node corresponding to
383 Move inside the dirstate node tree to the node corresponding to
363 `target_folder`, creating the missing nodes along the way if needed.
384 `target_folder`, creating the missing nodes along the way if needed.
364 """
385 """
365 while target_folder != current_node.path:
386 while target_folder != current_node.path:
366 if target_folder.startswith(current_node.path):
387 if is_ancestor(target_folder, current_node.path):
367 # We need to go down a folder
388 # We need to go down a folder
368 prefix = target_folder[len(current_node.path) :].lstrip(b'/')
389 prefix = target_folder[len(current_node.path) :].lstrip(b'/')
369 subfolder_name = prefix.split(b'/', 1)[0]
390 subfolder_name = prefix.split(b'/', 1)[0]
370 if current_node.path:
391 if current_node.path:
371 subfolder_path = current_node.path + b'/' + subfolder_name
392 subfolder_path = current_node.path + b'/' + subfolder_name
372 else:
393 else:
373 subfolder_path = subfolder_name
394 subfolder_path = subfolder_name
374 next_node = stack[-1]
395 next_node = stack[-1]
375 if next_node.path == target_folder:
396 if next_node.path == target_folder:
376 # This folder is now a file and only contains removed entries
397 # This folder is now a file and only contains removed entries
377 # merge with the last node
398 # merge with the last node
378 current_node = next_node
399 current_node = next_node
379 else:
400 else:
380 current_node.children_count += 1
401 current_node.children_count += 1
381 current_node = Node(subfolder_path, None, current_node)
402 current_node = Node(subfolder_path, None, current_node)
382 stack.append(current_node)
403 stack.append(current_node)
383 else:
404 else:
384 # We need to go up a folder
405 # We need to go up a folder
385 current_node = current_node.parent
406 current_node = current_node.parent
386 return current_node
407 return current_node
387
408
388
409
389 def pack_directory_children(node, copy_map, data, stack):
410 def pack_directory_children(node, copy_map, data, stack):
390 """
411 """
391 Write the binary representation of the direct sorted children of `node` to
412 Write the binary representation of the direct sorted children of `node` to
392 `data`
413 `data`
393 """
414 """
394 direct_children = []
415 direct_children = []
395
416
396 while stack[-1].path != b"" and get_folder(stack[-1].path) == node.path:
417 while stack[-1].path != b"" and get_folder(stack[-1].path) == node.path:
397 direct_children.append(stack.pop())
418 direct_children.append(stack.pop())
398 if not direct_children:
419 if not direct_children:
399 raise error.ProgrammingError(b"no direct children for %r" % node.path)
420 raise error.ProgrammingError(b"no direct children for %r" % node.path)
400
421
401 # Reverse the stack to get the correct sorted order
422 # Reverse the stack to get the correct sorted order
402 direct_children.reverse()
423 direct_children.reverse()
403 packed_children = bytearray()
424 packed_children = bytearray()
404 # Write the paths to `data`. Pack child nodes but don't write them yet
425 # Write the paths to `data`. Pack child nodes but don't write them yet
405 for child in direct_children:
426 for child in direct_children:
406 packed = child.pack(copy_map=copy_map, paths_offset=len(data))
427 packed = child.pack(copy_map=copy_map, paths_offset=len(data))
407 packed_children.extend(packed)
428 packed_children.extend(packed)
408 data.extend(child.path)
429 data.extend(child.path)
409 data.extend(copy_map.get(child.path, b""))
430 data.extend(copy_map.get(child.path, b""))
410 node.tracked_descendants += child.tracked_descendants
431 node.tracked_descendants += child.tracked_descendants
411 node.descendants_with_entry += child.descendants_with_entry
432 node.descendants_with_entry += child.descendants_with_entry
412 # Write the fixed-size child nodes all together
433 # Write the fixed-size child nodes all together
413 node.children_offset = len(data)
434 node.children_offset = len(data)
414 data.extend(packed_children)
435 data.extend(packed_children)
@@ -1,105 +1,123 b''
1 #testcases dirstate-v1 dirstate-v2
1 #testcases dirstate-v1 dirstate-v2
2
2
3 #if dirstate-v2
3 #if dirstate-v2
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [format]
5 > [format]
6 > use-dirstate-v2=1
6 > use-dirstate-v2=1
7 > [storage]
7 > [storage]
8 > dirstate-v2.slow-path=allow
8 > dirstate-v2.slow-path=allow
9 > EOF
9 > EOF
10 #endif
10 #endif
11
11
12 ------ Test dirstate._dirs refcounting
12 ------ Test dirstate._dirs refcounting
13
13
14 $ hg init t
14 $ hg init t
15 $ cd t
15 $ cd t
16 $ mkdir -p a/b/c/d
16 $ mkdir -p a/b/c/d
17 $ touch a/b/c/d/x
17 $ touch a/b/c/d/x
18 $ touch a/b/c/d/y
18 $ touch a/b/c/d/y
19 $ touch a/b/c/d/z
19 $ touch a/b/c/d/z
20 $ hg ci -Am m
20 $ hg ci -Am m
21 adding a/b/c/d/x
21 adding a/b/c/d/x
22 adding a/b/c/d/y
22 adding a/b/c/d/y
23 adding a/b/c/d/z
23 adding a/b/c/d/z
24 $ hg mv a z
24 $ hg mv a z
25 moving a/b/c/d/x to z/b/c/d/x
25 moving a/b/c/d/x to z/b/c/d/x
26 moving a/b/c/d/y to z/b/c/d/y
26 moving a/b/c/d/y to z/b/c/d/y
27 moving a/b/c/d/z to z/b/c/d/z
27 moving a/b/c/d/z to z/b/c/d/z
28
28
29 Test name collisions
29 Test name collisions
30
30
31 $ rm z/b/c/d/x
31 $ rm z/b/c/d/x
32 $ mkdir z/b/c/d/x
32 $ mkdir z/b/c/d/x
33 $ touch z/b/c/d/x/y
33 $ touch z/b/c/d/x/y
34 $ hg add z/b/c/d/x/y
34 $ hg add z/b/c/d/x/y
35 abort: file 'z/b/c/d/x' in dirstate clashes with 'z/b/c/d/x/y'
35 abort: file 'z/b/c/d/x' in dirstate clashes with 'z/b/c/d/x/y'
36 [255]
36 [255]
37 $ rm -rf z/b/c/d
37 $ rm -rf z/b/c/d
38 $ touch z/b/c/d
38 $ touch z/b/c/d
39 $ hg add z/b/c/d
39 $ hg add z/b/c/d
40 abort: directory 'z/b/c/d' already in dirstate
40 abort: directory 'z/b/c/d' already in dirstate
41 [255]
41 [255]
42
42
43 $ cd ..
43 $ cd ..
44
44
45 Issue1790: dirstate entry locked into unset if file mtime is set into
45 Issue1790: dirstate entry locked into unset if file mtime is set into
46 the future
46 the future
47
47
48 Prepare test repo:
48 Prepare test repo:
49
49
50 $ hg init u
50 $ hg init u
51 $ cd u
51 $ cd u
52 $ echo a > a
52 $ echo a > a
53 $ hg add
53 $ hg add
54 adding a
54 adding a
55 $ hg ci -m1
55 $ hg ci -m1
56
56
57 Set mtime of a into the future:
57 Set mtime of a into the future:
58
58
59 $ touch -t 203101011200 a
59 $ touch -t 203101011200 a
60
60
61 Status must not set a's entry to unset (issue1790):
61 Status must not set a's entry to unset (issue1790):
62
62
63 $ hg status
63 $ hg status
64 $ hg debugstate
64 $ hg debugstate
65 n 644 2 2031-01-01 12:00:00 a
65 n 644 2 2031-01-01 12:00:00 a
66
66
67 Test modulo storage/comparison of absurd dates:
67 Test modulo storage/comparison of absurd dates:
68
68
69 #if no-aix
69 #if no-aix
70 $ touch -t 195001011200 a
70 $ touch -t 195001011200 a
71 $ hg st
71 $ hg st
72 $ hg debugstate
72 $ hg debugstate
73 n 644 2 2018-01-19 15:14:08 a
73 n 644 2 2018-01-19 15:14:08 a
74 #endif
74 #endif
75
75
76 Verify that exceptions during a dirstate change leave the dirstate
76 Verify that exceptions during a dirstate change leave the dirstate
77 coherent (issue4353)
77 coherent (issue4353)
78
78
79 $ cat > ../dirstateexception.py <<EOF
79 $ cat > ../dirstateexception.py <<EOF
80 > from __future__ import absolute_import
80 > from __future__ import absolute_import
81 > from mercurial import (
81 > from mercurial import (
82 > error,
82 > error,
83 > extensions,
83 > extensions,
84 > mergestate as mergestatemod,
84 > mergestate as mergestatemod,
85 > )
85 > )
86 >
86 >
87 > def wraprecordupdates(*args):
87 > def wraprecordupdates(*args):
88 > raise error.Abort(b"simulated error while recording dirstateupdates")
88 > raise error.Abort(b"simulated error while recording dirstateupdates")
89 >
89 >
90 > def reposetup(ui, repo):
90 > def reposetup(ui, repo):
91 > extensions.wrapfunction(mergestatemod, 'recordupdates',
91 > extensions.wrapfunction(mergestatemod, 'recordupdates',
92 > wraprecordupdates)
92 > wraprecordupdates)
93 > EOF
93 > EOF
94
94
95 $ hg rm a
95 $ hg rm a
96 $ hg commit -m 'rm a'
96 $ hg commit -m 'rm a'
97 $ echo "[extensions]" >> .hg/hgrc
97 $ echo "[extensions]" >> .hg/hgrc
98 $ echo "dirstateex=../dirstateexception.py" >> .hg/hgrc
98 $ echo "dirstateex=../dirstateexception.py" >> .hg/hgrc
99 $ hg up 0
99 $ hg up 0
100 abort: simulated error while recording dirstateupdates
100 abort: simulated error while recording dirstateupdates
101 [255]
101 [255]
102 $ hg log -r . -T '{rev}\n'
102 $ hg log -r . -T '{rev}\n'
103 1
103 1
104 $ hg status
104 $ hg status
105 ? a
105 ? a
106
107 #if dirstate-v2
108 Check that folders that are prefixes of others do not throw the packer into an
109 infinite loop.
110
111 $ cd ..
112 $ hg init infinite-loop
113 $ cd infinite-loop
114 $ mkdir hgext3rd hgext
115 $ touch hgext3rd/__init__.py hgext/zeroconf.py
116 $ hg commit -Aqm0
117
118 $ hg st -c
119 C hgext/zeroconf.py
120 C hgext3rd/__init__.py
121
122 $ cd ..
123 #endif
@@ -1,177 +1,178 b''
1 # this is hack to make sure no escape characters are inserted into the output
1 # this is hack to make sure no escape characters are inserted into the output
2
2
3 from __future__ import absolute_import
3 from __future__ import absolute_import
4 from __future__ import print_function
4 from __future__ import print_function
5
5
6 import doctest
6 import doctest
7 import os
7 import os
8 import re
8 import re
9 import subprocess
9 import subprocess
10 import sys
10 import sys
11
11
12 ispy3 = sys.version_info[0] >= 3
12 ispy3 = sys.version_info[0] >= 3
13
13
14 if 'TERM' in os.environ:
14 if 'TERM' in os.environ:
15 del os.environ['TERM']
15 del os.environ['TERM']
16
16
17
17
18 class py3docchecker(doctest.OutputChecker):
18 class py3docchecker(doctest.OutputChecker):
19 def check_output(self, want, got, optionflags):
19 def check_output(self, want, got, optionflags):
20 want2 = re.sub(r'''\bu(['"])(.*?)\1''', r'\1\2\1', want) # py2: u''
20 want2 = re.sub(r'''\bu(['"])(.*?)\1''', r'\1\2\1', want) # py2: u''
21 got2 = re.sub(r'''\bb(['"])(.*?)\1''', r'\1\2\1', got) # py3: b''
21 got2 = re.sub(r'''\bb(['"])(.*?)\1''', r'\1\2\1', got) # py3: b''
22 # py3: <exc.name>: b'<msg>' -> <name>: <msg>
22 # py3: <exc.name>: b'<msg>' -> <name>: <msg>
23 # <exc.name>: <others> -> <name>: <others>
23 # <exc.name>: <others> -> <name>: <others>
24 got2 = re.sub(
24 got2 = re.sub(
25 r'''^mercurial\.\w+\.(\w+): (['"])(.*?)\2''',
25 r'''^mercurial\.\w+\.(\w+): (['"])(.*?)\2''',
26 r'\1: \3',
26 r'\1: \3',
27 got2,
27 got2,
28 re.MULTILINE,
28 re.MULTILINE,
29 )
29 )
30 got2 = re.sub(r'^mercurial\.\w+\.(\w+): ', r'\1: ', got2, re.MULTILINE)
30 got2 = re.sub(r'^mercurial\.\w+\.(\w+): ', r'\1: ', got2, re.MULTILINE)
31 return any(
31 return any(
32 doctest.OutputChecker.check_output(self, w, g, optionflags)
32 doctest.OutputChecker.check_output(self, w, g, optionflags)
33 for w, g in [(want, got), (want2, got2)]
33 for w, g in [(want, got), (want2, got2)]
34 )
34 )
35
35
36
36
37 def testmod(name, optionflags=0, testtarget=None):
37 def testmod(name, optionflags=0, testtarget=None):
38 __import__(name)
38 __import__(name)
39 mod = sys.modules[name]
39 mod = sys.modules[name]
40 if testtarget is not None:
40 if testtarget is not None:
41 mod = getattr(mod, testtarget)
41 mod = getattr(mod, testtarget)
42
42
43 # minimal copy of doctest.testmod()
43 # minimal copy of doctest.testmod()
44 finder = doctest.DocTestFinder()
44 finder = doctest.DocTestFinder()
45 checker = None
45 checker = None
46 if ispy3:
46 if ispy3:
47 checker = py3docchecker()
47 checker = py3docchecker()
48 runner = doctest.DocTestRunner(checker=checker, optionflags=optionflags)
48 runner = doctest.DocTestRunner(checker=checker, optionflags=optionflags)
49 for test in finder.find(mod, name):
49 for test in finder.find(mod, name):
50 runner.run(test)
50 runner.run(test)
51 runner.summarize()
51 runner.summarize()
52
52
53
53
54 DONT_RUN = []
54 DONT_RUN = []
55
55
56 # Exceptions to the defaults for a given detected module. The value for each
56 # Exceptions to the defaults for a given detected module. The value for each
57 # module name is a list of dicts that specify the kwargs to pass to testmod.
57 # module name is a list of dicts that specify the kwargs to pass to testmod.
58 # testmod is called once per item in the list, so an empty list will cause the
58 # testmod is called once per item in the list, so an empty list will cause the
59 # module to not be tested.
59 # module to not be tested.
60 testmod_arg_overrides = {
60 testmod_arg_overrides = {
61 'i18n.check-translation': DONT_RUN, # may require extra installation
61 'i18n.check-translation': DONT_RUN, # may require extra installation
62 'mercurial.dagparser': [{'optionflags': doctest.NORMALIZE_WHITESPACE}],
62 'mercurial.dagparser': [{'optionflags': doctest.NORMALIZE_WHITESPACE}],
63 'mercurial.keepalive': DONT_RUN, # >>> is an example, not a doctest
63 'mercurial.keepalive': DONT_RUN, # >>> is an example, not a doctest
64 'mercurial.posix': DONT_RUN, # run by mercurial.platform
64 'mercurial.posix': DONT_RUN, # run by mercurial.platform
65 'mercurial.statprof': DONT_RUN, # >>> is an example, not a doctest
65 'mercurial.statprof': DONT_RUN, # >>> is an example, not a doctest
66 'mercurial.util': [{}, {'testtarget': 'platform'}], # run twice!
66 'mercurial.util': [{}, {'testtarget': 'platform'}], # run twice!
67 'mercurial.windows': DONT_RUN, # run by mercurial.platform
67 'mercurial.windows': DONT_RUN, # run by mercurial.platform
68 'tests.test-url': [{'optionflags': doctest.NORMALIZE_WHITESPACE}],
68 'tests.test-url': [{'optionflags': doctest.NORMALIZE_WHITESPACE}],
69 }
69 }
70
70
71 fileset = 'set:(**.py)'
71 fileset = 'set:(**.py)'
72
72
73 cwd = os.path.dirname(os.environ["TESTDIR"])
73 cwd = os.path.dirname(os.environ["TESTDIR"])
74
74
75 if not os.path.isdir(os.path.join(cwd, ".hg")):
75 if not os.path.isdir(os.path.join(cwd, ".hg")):
76 sys.exit(0)
76 sys.exit(0)
77
77
78 files = subprocess.check_output(
78 files = subprocess.check_output(
79 "hg files --print0 \"%s\"" % fileset,
79 "hg files --print0 \"%s\"" % fileset,
80 shell=True,
80 shell=True,
81 cwd=cwd,
81 cwd=cwd,
82 ).split(b'\0')
82 ).split(b'\0')
83
83
84 if sys.version_info[0] >= 3:
84 if sys.version_info[0] >= 3:
85 cwd = os.fsencode(cwd)
85 cwd = os.fsencode(cwd)
86
86
87 mods_tested = set()
87 mods_tested = set()
88 for f in files:
88 for f in files:
89 if not f:
89 if not f:
90 continue
90 continue
91
91
92 with open(os.path.join(cwd, f), "rb") as fh:
92 with open(os.path.join(cwd, f), "rb") as fh:
93 if not re.search(br'\n\s*>>>', fh.read()):
93 if not re.search(br'\n\s*>>>', fh.read()):
94 continue
94 continue
95
95
96 if ispy3:
96 if ispy3:
97 f = f.decode()
97 f = f.decode()
98
98
99 modname = f.replace('.py', '').replace('\\', '.').replace('/', '.')
99 modname = f.replace('.py', '').replace('\\', '.').replace('/', '.')
100
100
101 # Third-party modules aren't our responsibility to test, and the modules in
101 # Third-party modules aren't our responsibility to test, and the modules in
102 # contrib generally do not have doctests in a good state, plus they're hard
102 # contrib generally do not have doctests in a good state, plus they're hard
103 # to import if this test is running with py2, so we just skip both for now.
103 # to import if this test is running with py2, so we just skip both for now.
104 if modname.startswith('mercurial.thirdparty.') or modname.startswith(
104 if modname.startswith('mercurial.thirdparty.') or modname.startswith(
105 'contrib.'
105 'contrib.'
106 ):
106 ):
107 continue
107 continue
108
108
109 for kwargs in testmod_arg_overrides.get(modname, [{}]):
109 for kwargs in testmod_arg_overrides.get(modname, [{}]):
110 mods_tested.add((modname, '%r' % (kwargs,)))
110 mods_tested.add((modname, '%r' % (kwargs,)))
111 if modname.startswith('tests.'):
111 if modname.startswith('tests.'):
112 # On py2, we can't import from tests.foo, but it works on both py2
112 # On py2, we can't import from tests.foo, but it works on both py2
113 # and py3 with the way that PYTHONPATH is setup to import without
113 # and py3 with the way that PYTHONPATH is setup to import without
114 # the 'tests.' prefix, so we do that.
114 # the 'tests.' prefix, so we do that.
115 modname = modname[len('tests.') :]
115 modname = modname[len('tests.') :]
116
116
117 testmod(modname, **kwargs)
117 testmod(modname, **kwargs)
118
118
119 # Meta-test: let's make sure that we actually ran what we expected to, above.
119 # Meta-test: let's make sure that we actually ran what we expected to, above.
120 # Each item in the set is a 2-tuple of module name and stringified kwargs passed
120 # Each item in the set is a 2-tuple of module name and stringified kwargs passed
121 # to testmod.
121 # to testmod.
122 expected_mods_tested = set(
122 expected_mods_tested = set(
123 [
123 [
124 ('hgext.convert.convcmd', '{}'),
124 ('hgext.convert.convcmd', '{}'),
125 ('hgext.convert.cvsps', '{}'),
125 ('hgext.convert.cvsps', '{}'),
126 ('hgext.convert.filemap', '{}'),
126 ('hgext.convert.filemap', '{}'),
127 ('hgext.convert.p4', '{}'),
127 ('hgext.convert.p4', '{}'),
128 ('hgext.convert.subversion', '{}'),
128 ('hgext.convert.subversion', '{}'),
129 ('hgext.fix', '{}'),
129 ('hgext.fix', '{}'),
130 ('hgext.mq', '{}'),
130 ('hgext.mq', '{}'),
131 ('mercurial.changelog', '{}'),
131 ('mercurial.changelog', '{}'),
132 ('mercurial.cmdutil', '{}'),
132 ('mercurial.cmdutil', '{}'),
133 ('mercurial.color', '{}'),
133 ('mercurial.color', '{}'),
134 ('mercurial.dagparser', "{'optionflags': 4}"),
134 ('mercurial.dagparser', "{'optionflags': 4}"),
135 ('mercurial.dirstateutils.v2', '{}'),
135 ('mercurial.encoding', '{}'),
136 ('mercurial.encoding', '{}'),
136 ('mercurial.fancyopts', '{}'),
137 ('mercurial.fancyopts', '{}'),
137 ('mercurial.formatter', '{}'),
138 ('mercurial.formatter', '{}'),
138 ('mercurial.hg', '{}'),
139 ('mercurial.hg', '{}'),
139 ('mercurial.hgweb.hgwebdir_mod', '{}'),
140 ('mercurial.hgweb.hgwebdir_mod', '{}'),
140 ('mercurial.match', '{}'),
141 ('mercurial.match', '{}'),
141 ('mercurial.mdiff', '{}'),
142 ('mercurial.mdiff', '{}'),
142 ('mercurial.minirst', '{}'),
143 ('mercurial.minirst', '{}'),
143 ('mercurial.parser', '{}'),
144 ('mercurial.parser', '{}'),
144 ('mercurial.patch', '{}'),
145 ('mercurial.patch', '{}'),
145 ('mercurial.pathutil', '{}'),
146 ('mercurial.pathutil', '{}'),
146 ('mercurial.pycompat', '{}'),
147 ('mercurial.pycompat', '{}'),
147 ('mercurial.revlogutils.deltas', '{}'),
148 ('mercurial.revlogutils.deltas', '{}'),
148 ('mercurial.revset', '{}'),
149 ('mercurial.revset', '{}'),
149 ('mercurial.revsetlang', '{}'),
150 ('mercurial.revsetlang', '{}'),
150 ('mercurial.simplemerge', '{}'),
151 ('mercurial.simplemerge', '{}'),
151 ('mercurial.smartset', '{}'),
152 ('mercurial.smartset', '{}'),
152 ('mercurial.store', '{}'),
153 ('mercurial.store', '{}'),
153 ('mercurial.subrepo', '{}'),
154 ('mercurial.subrepo', '{}'),
154 ('mercurial.templater', '{}'),
155 ('mercurial.templater', '{}'),
155 ('mercurial.ui', '{}'),
156 ('mercurial.ui', '{}'),
156 ('mercurial.util', "{'testtarget': 'platform'}"),
157 ('mercurial.util', "{'testtarget': 'platform'}"),
157 ('mercurial.util', '{}'),
158 ('mercurial.util', '{}'),
158 ('mercurial.utils.dateutil', '{}'),
159 ('mercurial.utils.dateutil', '{}'),
159 ('mercurial.utils.stringutil', '{}'),
160 ('mercurial.utils.stringutil', '{}'),
160 ('mercurial.utils.urlutil', '{}'),
161 ('mercurial.utils.urlutil', '{}'),
161 ('tests.drawdag', '{}'),
162 ('tests.drawdag', '{}'),
162 ('tests.test-run-tests', '{}'),
163 ('tests.test-run-tests', '{}'),
163 ('tests.test-url', "{'optionflags': 4}"),
164 ('tests.test-url', "{'optionflags': 4}"),
164 ]
165 ]
165 )
166 )
166
167
167 unexpectedly_run = mods_tested.difference(expected_mods_tested)
168 unexpectedly_run = mods_tested.difference(expected_mods_tested)
168 not_run = expected_mods_tested.difference(mods_tested)
169 not_run = expected_mods_tested.difference(mods_tested)
169
170
170 if unexpectedly_run:
171 if unexpectedly_run:
171 print('Unexpectedly ran (probably need to add to list):')
172 print('Unexpectedly ran (probably need to add to list):')
172 for r in sorted(unexpectedly_run):
173 for r in sorted(unexpectedly_run):
173 print(' %r' % (r,))
174 print(' %r' % (r,))
174 if not_run:
175 if not_run:
175 print('Expected to run, but was not run (doctest removed?):')
176 print('Expected to run, but was not run (doctest removed?):')
176 for r in sorted(not_run):
177 for r in sorted(not_run):
177 print(' %r' % (r,))
178 print(' %r' % (r,))
General Comments 0
You need to be logged in to leave comments. Login now