##// END OF EJS Templates
dirstate: introduce a `set_clean` method on dirstate's map and items...
marmoute -
r48788:4e6f2723 default
parent child Browse files
Show More
@@ -1,1303 +1,1320 b''
1 /*
1 /*
2 parsers.c - efficient content parsing
2 parsers.c - efficient content parsing
3
3
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
5
5
6 This software may be used and distributed according to the terms of
6 This software may be used and distributed according to the terms of
7 the GNU General Public License, incorporated herein by reference.
7 the GNU General Public License, incorporated herein by reference.
8 */
8 */
9
9
10 #define PY_SSIZE_T_CLEAN
10 #define PY_SSIZE_T_CLEAN
11 #include <Python.h>
11 #include <Python.h>
12 #include <ctype.h>
12 #include <ctype.h>
13 #include <stddef.h>
13 #include <stddef.h>
14 #include <string.h>
14 #include <string.h>
15
15
16 #include "bitmanipulation.h"
16 #include "bitmanipulation.h"
17 #include "charencode.h"
17 #include "charencode.h"
18 #include "util.h"
18 #include "util.h"
19
19
20 #ifdef IS_PY3K
20 #ifdef IS_PY3K
21 /* The mapping of Python types is meant to be temporary to get Python
21 /* The mapping of Python types is meant to be temporary to get Python
22 * 3 to compile. We should remove this once Python 3 support is fully
22 * 3 to compile. We should remove this once Python 3 support is fully
23 * supported and proper types are used in the extensions themselves. */
23 * supported and proper types are used in the extensions themselves. */
24 #define PyInt_Check PyLong_Check
24 #define PyInt_Check PyLong_Check
25 #define PyInt_FromLong PyLong_FromLong
25 #define PyInt_FromLong PyLong_FromLong
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AsLong PyLong_AsLong
27 #define PyInt_AsLong PyLong_AsLong
28 #endif
28 #endif
29
29
30 static const char *const versionerrortext = "Python minor version mismatch";
30 static const char *const versionerrortext = "Python minor version mismatch";
31
31
32 static const int dirstate_v1_from_p2 = -2;
32 static const int dirstate_v1_from_p2 = -2;
33 static const int dirstate_v1_nonnormal = -1;
33 static const int dirstate_v1_nonnormal = -1;
34 static const int ambiguous_time = -1;
34 static const int ambiguous_time = -1;
35
35
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
37 {
37 {
38 Py_ssize_t expected_size;
38 Py_ssize_t expected_size;
39
39
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
41 return NULL;
41 return NULL;
42 }
42 }
43
43
44 return _dict_new_presized(expected_size);
44 return _dict_new_presized(expected_size);
45 }
45 }
46
46
47 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
47 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
48 PyObject *kwds)
48 PyObject *kwds)
49 {
49 {
50 /* We do all the initialization here and not a tp_init function because
50 /* We do all the initialization here and not a tp_init function because
51 * dirstate_item is immutable. */
51 * dirstate_item is immutable. */
52 dirstateItemObject *t;
52 dirstateItemObject *t;
53 int wc_tracked;
53 int wc_tracked;
54 int p1_tracked;
54 int p1_tracked;
55 int p2_tracked;
55 int p2_tracked;
56 int merged;
56 int merged;
57 int clean_p1;
57 int clean_p1;
58 int clean_p2;
58 int clean_p2;
59 int possibly_dirty;
59 int possibly_dirty;
60 PyObject *parentfiledata;
60 PyObject *parentfiledata;
61 static char *keywords_name[] = {
61 static char *keywords_name[] = {
62 "wc_tracked", "p1_tracked", "p2_tracked",
62 "wc_tracked", "p1_tracked", "p2_tracked",
63 "merged", "clean_p1", "clean_p2",
63 "merged", "clean_p1", "clean_p2",
64 "possibly_dirty", "parentfiledata", NULL,
64 "possibly_dirty", "parentfiledata", NULL,
65 };
65 };
66 wc_tracked = 0;
66 wc_tracked = 0;
67 p1_tracked = 0;
67 p1_tracked = 0;
68 p2_tracked = 0;
68 p2_tracked = 0;
69 merged = 0;
69 merged = 0;
70 clean_p1 = 0;
70 clean_p1 = 0;
71 clean_p2 = 0;
71 clean_p2 = 0;
72 possibly_dirty = 0;
72 possibly_dirty = 0;
73 parentfiledata = Py_None;
73 parentfiledata = Py_None;
74 if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiiiiiiO", keywords_name,
74 if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiiiiiiO", keywords_name,
75 &wc_tracked, &p1_tracked, &p2_tracked,
75 &wc_tracked, &p1_tracked, &p2_tracked,
76 &merged, &clean_p1, &clean_p2,
76 &merged, &clean_p1, &clean_p2,
77 &possibly_dirty, &parentfiledata
77 &possibly_dirty, &parentfiledata
78
78
79 )) {
79 )) {
80 return NULL;
80 return NULL;
81 }
81 }
82 if (merged && (clean_p1 || clean_p2)) {
82 if (merged && (clean_p1 || clean_p2)) {
83 PyErr_SetString(PyExc_RuntimeError,
83 PyErr_SetString(PyExc_RuntimeError,
84 "`merged` argument incompatible with "
84 "`merged` argument incompatible with "
85 "`clean_p1`/`clean_p2`");
85 "`clean_p1`/`clean_p2`");
86 return NULL;
86 return NULL;
87 }
87 }
88 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
88 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
89 if (!t) {
89 if (!t) {
90 return NULL;
90 return NULL;
91 }
91 }
92
92
93 t->flags = 0;
93 t->flags = 0;
94 if (wc_tracked) {
94 if (wc_tracked) {
95 t->flags |= dirstate_flag_wc_tracked;
95 t->flags |= dirstate_flag_wc_tracked;
96 }
96 }
97 if (p1_tracked) {
97 if (p1_tracked) {
98 t->flags |= dirstate_flag_p1_tracked;
98 t->flags |= dirstate_flag_p1_tracked;
99 }
99 }
100 if (p2_tracked) {
100 if (p2_tracked) {
101 t->flags |= dirstate_flag_p2_tracked;
101 t->flags |= dirstate_flag_p2_tracked;
102 }
102 }
103 if (possibly_dirty) {
103 if (possibly_dirty) {
104 t->flags |= dirstate_flag_possibly_dirty;
104 t->flags |= dirstate_flag_possibly_dirty;
105 }
105 }
106 if (merged) {
106 if (merged) {
107 t->flags |= dirstate_flag_merged;
107 t->flags |= dirstate_flag_merged;
108 }
108 }
109 if (clean_p1) {
109 if (clean_p1) {
110 t->flags |= dirstate_flag_clean_p1;
110 t->flags |= dirstate_flag_clean_p1;
111 }
111 }
112 if (clean_p2) {
112 if (clean_p2) {
113 t->flags |= dirstate_flag_clean_p2;
113 t->flags |= dirstate_flag_clean_p2;
114 }
114 }
115 t->mode = 0;
115 t->mode = 0;
116 t->size = dirstate_v1_nonnormal;
116 t->size = dirstate_v1_nonnormal;
117 t->mtime = ambiguous_time;
117 t->mtime = ambiguous_time;
118 if (parentfiledata != Py_None) {
118 if (parentfiledata != Py_None) {
119 if (!PyTuple_CheckExact(parentfiledata)) {
119 if (!PyTuple_CheckExact(parentfiledata)) {
120 PyErr_SetString(
120 PyErr_SetString(
121 PyExc_TypeError,
121 PyExc_TypeError,
122 "parentfiledata should be a Tuple or None");
122 "parentfiledata should be a Tuple or None");
123 return NULL;
123 return NULL;
124 }
124 }
125 t->mode =
125 t->mode =
126 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 0));
126 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 0));
127 t->size =
127 t->size =
128 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 1));
128 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 1));
129 t->mtime =
129 t->mtime =
130 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 2));
130 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 2));
131 }
131 }
132 return (PyObject *)t;
132 return (PyObject *)t;
133 }
133 }
134
134
135 static void dirstate_item_dealloc(PyObject *o)
135 static void dirstate_item_dealloc(PyObject *o)
136 {
136 {
137 PyObject_Del(o);
137 PyObject_Del(o);
138 }
138 }
139
139
140 static inline bool dirstate_item_c_tracked(dirstateItemObject *self)
140 static inline bool dirstate_item_c_tracked(dirstateItemObject *self)
141 {
141 {
142 return (self->flags & dirstate_flag_wc_tracked);
142 return (self->flags & dirstate_flag_wc_tracked);
143 }
143 }
144
144
145 static inline bool dirstate_item_c_added(dirstateItemObject *self)
145 static inline bool dirstate_item_c_added(dirstateItemObject *self)
146 {
146 {
147 char mask = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
147 char mask = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
148 dirstate_flag_p2_tracked);
148 dirstate_flag_p2_tracked);
149 char target = dirstate_flag_wc_tracked;
149 char target = dirstate_flag_wc_tracked;
150 return (self->flags & mask) == target;
150 return (self->flags & mask) == target;
151 }
151 }
152
152
153 static inline bool dirstate_item_c_removed(dirstateItemObject *self)
153 static inline bool dirstate_item_c_removed(dirstateItemObject *self)
154 {
154 {
155 if (self->flags & dirstate_flag_wc_tracked) {
155 if (self->flags & dirstate_flag_wc_tracked) {
156 return false;
156 return false;
157 }
157 }
158 return (self->flags &
158 return (self->flags &
159 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked));
159 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked));
160 }
160 }
161
161
162 static inline bool dirstate_item_c_merged(dirstateItemObject *self)
162 static inline bool dirstate_item_c_merged(dirstateItemObject *self)
163 {
163 {
164 return ((self->flags & dirstate_flag_wc_tracked) &&
164 return ((self->flags & dirstate_flag_wc_tracked) &&
165 (self->flags & dirstate_flag_merged));
165 (self->flags & dirstate_flag_merged));
166 }
166 }
167
167
168 static inline bool dirstate_item_c_merged_removed(dirstateItemObject *self)
168 static inline bool dirstate_item_c_merged_removed(dirstateItemObject *self)
169 {
169 {
170 if (!dirstate_item_c_removed(self)) {
170 if (!dirstate_item_c_removed(self)) {
171 return false;
171 return false;
172 }
172 }
173 return (self->flags & dirstate_flag_merged);
173 return (self->flags & dirstate_flag_merged);
174 }
174 }
175
175
176 static inline bool dirstate_item_c_from_p2(dirstateItemObject *self)
176 static inline bool dirstate_item_c_from_p2(dirstateItemObject *self)
177 {
177 {
178 if (!dirstate_item_c_tracked(self)) {
178 if (!dirstate_item_c_tracked(self)) {
179 return false;
179 return false;
180 }
180 }
181 return (self->flags & dirstate_flag_clean_p2);
181 return (self->flags & dirstate_flag_clean_p2);
182 }
182 }
183
183
184 static inline bool dirstate_item_c_from_p2_removed(dirstateItemObject *self)
184 static inline bool dirstate_item_c_from_p2_removed(dirstateItemObject *self)
185 {
185 {
186 if (!dirstate_item_c_removed(self)) {
186 if (!dirstate_item_c_removed(self)) {
187 return false;
187 return false;
188 }
188 }
189 return (self->flags & dirstate_flag_clean_p2);
189 return (self->flags & dirstate_flag_clean_p2);
190 }
190 }
191
191
192 static inline char dirstate_item_c_v1_state(dirstateItemObject *self)
192 static inline char dirstate_item_c_v1_state(dirstateItemObject *self)
193 {
193 {
194 if (self->flags & dirstate_flag_rust_special) {
194 if (self->flags & dirstate_flag_rust_special) {
195 return ' ';
195 return ' ';
196 } else if (dirstate_item_c_removed(self)) {
196 } else if (dirstate_item_c_removed(self)) {
197 return 'r';
197 return 'r';
198 } else if (dirstate_item_c_merged(self)) {
198 } else if (dirstate_item_c_merged(self)) {
199 return 'm';
199 return 'm';
200 } else if (dirstate_item_c_added(self)) {
200 } else if (dirstate_item_c_added(self)) {
201 return 'a';
201 return 'a';
202 } else {
202 } else {
203 return 'n';
203 return 'n';
204 }
204 }
205 }
205 }
206
206
207 static inline int dirstate_item_c_v1_mode(dirstateItemObject *self)
207 static inline int dirstate_item_c_v1_mode(dirstateItemObject *self)
208 {
208 {
209 return self->mode;
209 return self->mode;
210 }
210 }
211
211
212 static inline int dirstate_item_c_v1_size(dirstateItemObject *self)
212 static inline int dirstate_item_c_v1_size(dirstateItemObject *self)
213 {
213 {
214 if (self->flags & dirstate_flag_rust_special) {
214 if (self->flags & dirstate_flag_rust_special) {
215 return self->size;
215 return self->size;
216 } else if (dirstate_item_c_merged_removed(self)) {
216 } else if (dirstate_item_c_merged_removed(self)) {
217 return dirstate_v1_nonnormal;
217 return dirstate_v1_nonnormal;
218 } else if (dirstate_item_c_from_p2_removed(self)) {
218 } else if (dirstate_item_c_from_p2_removed(self)) {
219 return dirstate_v1_from_p2;
219 return dirstate_v1_from_p2;
220 } else if (dirstate_item_c_removed(self)) {
220 } else if (dirstate_item_c_removed(self)) {
221 return 0;
221 return 0;
222 } else if (dirstate_item_c_merged(self)) {
222 } else if (dirstate_item_c_merged(self)) {
223 return dirstate_v1_from_p2;
223 return dirstate_v1_from_p2;
224 } else if (dirstate_item_c_added(self)) {
224 } else if (dirstate_item_c_added(self)) {
225 return dirstate_v1_nonnormal;
225 return dirstate_v1_nonnormal;
226 } else if (dirstate_item_c_from_p2(self)) {
226 } else if (dirstate_item_c_from_p2(self)) {
227 return dirstate_v1_from_p2;
227 return dirstate_v1_from_p2;
228 } else if (self->flags & dirstate_flag_possibly_dirty) {
228 } else if (self->flags & dirstate_flag_possibly_dirty) {
229 return self->size; /* NON NORMAL ? */
229 return self->size; /* NON NORMAL ? */
230 } else {
230 } else {
231 return self->size;
231 return self->size;
232 }
232 }
233 }
233 }
234
234
235 static inline int dirstate_item_c_v1_mtime(dirstateItemObject *self)
235 static inline int dirstate_item_c_v1_mtime(dirstateItemObject *self)
236 {
236 {
237 if (self->flags & dirstate_flag_rust_special) {
237 if (self->flags & dirstate_flag_rust_special) {
238 return self->mtime;
238 return self->mtime;
239 } else if (dirstate_item_c_removed(self)) {
239 } else if (dirstate_item_c_removed(self)) {
240 return 0;
240 return 0;
241 } else if (self->flags & dirstate_flag_possibly_dirty) {
241 } else if (self->flags & dirstate_flag_possibly_dirty) {
242 return ambiguous_time;
242 return ambiguous_time;
243 } else if (dirstate_item_c_merged(self)) {
243 } else if (dirstate_item_c_merged(self)) {
244 return ambiguous_time;
244 return ambiguous_time;
245 } else if (dirstate_item_c_added(self)) {
245 } else if (dirstate_item_c_added(self)) {
246 return ambiguous_time;
246 return ambiguous_time;
247 } else if (dirstate_item_c_from_p2(self)) {
247 } else if (dirstate_item_c_from_p2(self)) {
248 return ambiguous_time;
248 return ambiguous_time;
249 } else {
249 } else {
250 return self->mtime;
250 return self->mtime;
251 }
251 }
252 }
252 }
253
253
254 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
254 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
255 {
255 {
256 char state = dirstate_item_c_v1_state(self);
256 char state = dirstate_item_c_v1_state(self);
257 return PyBytes_FromStringAndSize(&state, 1);
257 return PyBytes_FromStringAndSize(&state, 1);
258 };
258 };
259
259
260 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
260 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
261 {
261 {
262 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
262 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
263 };
263 };
264
264
265 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
265 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
266 {
266 {
267 return PyInt_FromLong(dirstate_item_c_v1_size(self));
267 return PyInt_FromLong(dirstate_item_c_v1_size(self));
268 };
268 };
269
269
270 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
270 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
271 {
271 {
272 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
272 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
273 };
273 };
274
274
275 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
275 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
276 PyObject *value)
276 PyObject *value)
277 {
277 {
278 long now;
278 long now;
279 if (!pylong_to_long(value, &now)) {
279 if (!pylong_to_long(value, &now)) {
280 return NULL;
280 return NULL;
281 }
281 }
282 if (dirstate_item_c_v1_state(self) == 'n' &&
282 if (dirstate_item_c_v1_state(self) == 'n' &&
283 dirstate_item_c_v1_mtime(self) == now) {
283 dirstate_item_c_v1_mtime(self) == now) {
284 Py_RETURN_TRUE;
284 Py_RETURN_TRUE;
285 } else {
285 } else {
286 Py_RETURN_FALSE;
286 Py_RETURN_FALSE;
287 }
287 }
288 };
288 };
289
289
290 /* This will never change since it's bound to V1
290 /* This will never change since it's bound to V1
291 */
291 */
292 static inline dirstateItemObject *
292 static inline dirstateItemObject *
293 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
293 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
294 {
294 {
295 dirstateItemObject *t =
295 dirstateItemObject *t =
296 PyObject_New(dirstateItemObject, &dirstateItemType);
296 PyObject_New(dirstateItemObject, &dirstateItemType);
297 if (!t) {
297 if (!t) {
298 return NULL;
298 return NULL;
299 }
299 }
300
300
301 if (state == 'm') {
301 if (state == 'm') {
302 t->flags =
302 t->flags =
303 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
303 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
304 dirstate_flag_p2_tracked | dirstate_flag_merged);
304 dirstate_flag_p2_tracked | dirstate_flag_merged);
305 t->mode = 0;
305 t->mode = 0;
306 t->size = dirstate_v1_from_p2;
306 t->size = dirstate_v1_from_p2;
307 t->mtime = ambiguous_time;
307 t->mtime = ambiguous_time;
308 } else if (state == 'a') {
308 } else if (state == 'a') {
309 t->flags = dirstate_flag_wc_tracked;
309 t->flags = dirstate_flag_wc_tracked;
310 t->mode = 0;
310 t->mode = 0;
311 t->size = dirstate_v1_nonnormal;
311 t->size = dirstate_v1_nonnormal;
312 t->mtime = ambiguous_time;
312 t->mtime = ambiguous_time;
313 } else if (state == 'r') {
313 } else if (state == 'r') {
314 t->mode = 0;
314 t->mode = 0;
315 t->size = 0;
315 t->size = 0;
316 t->mtime = 0;
316 t->mtime = 0;
317 if (size == dirstate_v1_nonnormal) {
317 if (size == dirstate_v1_nonnormal) {
318 t->flags =
318 t->flags =
319 (dirstate_flag_p1_tracked |
319 (dirstate_flag_p1_tracked |
320 dirstate_flag_p2_tracked | dirstate_flag_merged);
320 dirstate_flag_p2_tracked | dirstate_flag_merged);
321 } else if (size == dirstate_v1_from_p2) {
321 } else if (size == dirstate_v1_from_p2) {
322 t->flags =
322 t->flags =
323 (dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
323 (dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
324 } else {
324 } else {
325 t->flags = dirstate_flag_p1_tracked;
325 t->flags = dirstate_flag_p1_tracked;
326 }
326 }
327 } else if (state == 'n') {
327 } else if (state == 'n') {
328 if (size == dirstate_v1_from_p2) {
328 if (size == dirstate_v1_from_p2) {
329 t->flags =
329 t->flags =
330 (dirstate_flag_wc_tracked |
330 (dirstate_flag_wc_tracked |
331 dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
331 dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
332 t->mode = 0;
332 t->mode = 0;
333 t->size = dirstate_v1_from_p2;
333 t->size = dirstate_v1_from_p2;
334 t->mtime = ambiguous_time;
334 t->mtime = ambiguous_time;
335 } else if (size == dirstate_v1_nonnormal) {
335 } else if (size == dirstate_v1_nonnormal) {
336 t->flags = (dirstate_flag_wc_tracked |
336 t->flags = (dirstate_flag_wc_tracked |
337 dirstate_flag_p1_tracked |
337 dirstate_flag_p1_tracked |
338 dirstate_flag_possibly_dirty);
338 dirstate_flag_possibly_dirty);
339 t->mode = 0;
339 t->mode = 0;
340 t->size = dirstate_v1_nonnormal;
340 t->size = dirstate_v1_nonnormal;
341 t->mtime = ambiguous_time;
341 t->mtime = ambiguous_time;
342 } else if (mtime == ambiguous_time) {
342 } else if (mtime == ambiguous_time) {
343 t->flags = (dirstate_flag_wc_tracked |
343 t->flags = (dirstate_flag_wc_tracked |
344 dirstate_flag_p1_tracked |
344 dirstate_flag_p1_tracked |
345 dirstate_flag_possibly_dirty);
345 dirstate_flag_possibly_dirty);
346 t->mode = mode;
346 t->mode = mode;
347 t->size = size;
347 t->size = size;
348 t->mtime = 0;
348 t->mtime = 0;
349 } else {
349 } else {
350 t->flags = (dirstate_flag_wc_tracked |
350 t->flags = (dirstate_flag_wc_tracked |
351 dirstate_flag_p1_tracked);
351 dirstate_flag_p1_tracked);
352 t->mode = mode;
352 t->mode = mode;
353 t->size = size;
353 t->size = size;
354 t->mtime = mtime;
354 t->mtime = mtime;
355 }
355 }
356 } else if (state == ' ') {
356 } else if (state == ' ') {
357 /* XXX Rust is using this special case, it should be clean up
357 /* XXX Rust is using this special case, it should be clean up
358 * later. */
358 * later. */
359 t->flags = dirstate_flag_rust_special;
359 t->flags = dirstate_flag_rust_special;
360 t->mode = mode;
360 t->mode = mode;
361 t->size = size;
361 t->size = size;
362 t->mtime = mtime;
362 t->mtime = mtime;
363 } else {
363 } else {
364 PyErr_Format(PyExc_RuntimeError,
364 PyErr_Format(PyExc_RuntimeError,
365 "unknown state: `%c` (%d, %d, %d)", state, mode,
365 "unknown state: `%c` (%d, %d, %d)", state, mode,
366 size, mtime, NULL);
366 size, mtime, NULL);
367 return NULL;
367 return NULL;
368 }
368 }
369
369
370 return t;
370 return t;
371 }
371 }
372
372
373 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
373 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
374 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
374 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
375 PyObject *args)
375 PyObject *args)
376 {
376 {
377 /* We do all the initialization here and not a tp_init function because
377 /* We do all the initialization here and not a tp_init function because
378 * dirstate_item is immutable. */
378 * dirstate_item is immutable. */
379 char state;
379 char state;
380 int size, mode, mtime;
380 int size, mode, mtime;
381 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
381 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
382 return NULL;
382 return NULL;
383 }
383 }
384 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
384 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
385 };
385 };
386
386
387 /* constructor to help legacy API to build a new "added" item
387 /* constructor to help legacy API to build a new "added" item
388
388
389 Should eventually be removed */
389 Should eventually be removed */
390 static PyObject *dirstate_item_new_added(PyTypeObject *subtype)
390 static PyObject *dirstate_item_new_added(PyTypeObject *subtype)
391 {
391 {
392 dirstateItemObject *t;
392 dirstateItemObject *t;
393 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
393 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
394 if (!t) {
394 if (!t) {
395 return NULL;
395 return NULL;
396 }
396 }
397 t->flags = dirstate_flag_wc_tracked;
397 t->flags = dirstate_flag_wc_tracked;
398 t->mode = 0;
398 t->mode = 0;
399 t->size = dirstate_v1_nonnormal;
399 t->size = dirstate_v1_nonnormal;
400 t->mtime = ambiguous_time;
400 t->mtime = ambiguous_time;
401 return (PyObject *)t;
401 return (PyObject *)t;
402 };
402 };
403
403
404 /* constructor to help legacy API to build a new "merged" item
404 /* constructor to help legacy API to build a new "merged" item
405
405
406 Should eventually be removed */
406 Should eventually be removed */
407 static PyObject *dirstate_item_new_merged(PyTypeObject *subtype)
407 static PyObject *dirstate_item_new_merged(PyTypeObject *subtype)
408 {
408 {
409 dirstateItemObject *t;
409 dirstateItemObject *t;
410 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
410 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
411 if (!t) {
411 if (!t) {
412 return NULL;
412 return NULL;
413 }
413 }
414 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
414 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
415 dirstate_flag_p2_tracked | dirstate_flag_merged);
415 dirstate_flag_p2_tracked | dirstate_flag_merged);
416 t->mode = 0;
416 t->mode = 0;
417 t->size = dirstate_v1_from_p2;
417 t->size = dirstate_v1_from_p2;
418 t->mtime = ambiguous_time;
418 t->mtime = ambiguous_time;
419 return (PyObject *)t;
419 return (PyObject *)t;
420 };
420 };
421
421
422 /* constructor to help legacy API to build a new "from_p2" item
422 /* constructor to help legacy API to build a new "from_p2" item
423
423
424 Should eventually be removed */
424 Should eventually be removed */
425 static PyObject *dirstate_item_new_from_p2(PyTypeObject *subtype)
425 static PyObject *dirstate_item_new_from_p2(PyTypeObject *subtype)
426 {
426 {
427 /* We do all the initialization here and not a tp_init function because
427 /* We do all the initialization here and not a tp_init function because
428 * dirstate_item is immutable. */
428 * dirstate_item is immutable. */
429 dirstateItemObject *t;
429 dirstateItemObject *t;
430 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
430 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
431 if (!t) {
431 if (!t) {
432 return NULL;
432 return NULL;
433 }
433 }
434 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p2_tracked |
434 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p2_tracked |
435 dirstate_flag_clean_p2);
435 dirstate_flag_clean_p2);
436 t->mode = 0;
436 t->mode = 0;
437 t->size = dirstate_v1_from_p2;
437 t->size = dirstate_v1_from_p2;
438 t->mtime = ambiguous_time;
438 t->mtime = ambiguous_time;
439 return (PyObject *)t;
439 return (PyObject *)t;
440 };
440 };
441
441
442 /* constructor to help legacy API to build a new "possibly" item
442 /* constructor to help legacy API to build a new "possibly" item
443
443
444 Should eventually be removed */
444 Should eventually be removed */
445 static PyObject *dirstate_item_new_possibly_dirty(PyTypeObject *subtype)
445 static PyObject *dirstate_item_new_possibly_dirty(PyTypeObject *subtype)
446 {
446 {
447 /* We do all the initialization here and not a tp_init function because
447 /* We do all the initialization here and not a tp_init function because
448 * dirstate_item is immutable. */
448 * dirstate_item is immutable. */
449 dirstateItemObject *t;
449 dirstateItemObject *t;
450 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
450 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
451 if (!t) {
451 if (!t) {
452 return NULL;
452 return NULL;
453 }
453 }
454 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
454 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
455 dirstate_flag_possibly_dirty);
455 dirstate_flag_possibly_dirty);
456 t->mode = 0;
456 t->mode = 0;
457 t->size = dirstate_v1_nonnormal;
457 t->size = dirstate_v1_nonnormal;
458 t->mtime = ambiguous_time;
458 t->mtime = ambiguous_time;
459 return (PyObject *)t;
459 return (PyObject *)t;
460 };
460 };
461
461
462 /* constructor to help legacy API to build a new "normal" item
462 /* constructor to help legacy API to build a new "normal" item
463
463
464 Should eventually be removed */
464 Should eventually be removed */
465 static PyObject *dirstate_item_new_normal(PyTypeObject *subtype, PyObject *args)
465 static PyObject *dirstate_item_new_normal(PyTypeObject *subtype, PyObject *args)
466 {
466 {
467 /* We do all the initialization here and not a tp_init function because
467 /* We do all the initialization here and not a tp_init function because
468 * dirstate_item is immutable. */
468 * dirstate_item is immutable. */
469 dirstateItemObject *t;
469 dirstateItemObject *t;
470 int size, mode, mtime;
470 int size, mode, mtime;
471 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
471 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
472 return NULL;
472 return NULL;
473 }
473 }
474
474
475 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
475 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
476 if (!t) {
476 if (!t) {
477 return NULL;
477 return NULL;
478 }
478 }
479 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked);
479 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked);
480 t->mode = mode;
480 t->mode = mode;
481 t->size = size;
481 t->size = size;
482 t->mtime = mtime;
482 t->mtime = mtime;
483 return (PyObject *)t;
483 return (PyObject *)t;
484 };
484 };
485
485
486 /* This means the next status call will have to actually check its content
486 /* This means the next status call will have to actually check its content
487 to make sure it is correct. */
487 to make sure it is correct. */
488 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
488 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
489 {
489 {
490 if (self->flags |= dirstate_flag_possibly_dirty) {
490 if (self->flags |= dirstate_flag_possibly_dirty) {
491 Py_RETURN_NONE;
491 Py_RETURN_NONE;
492 }
492 }
493 }
493 }
494
494
495 /* See docstring of the python implementation for details */
496 static PyObject *dirstate_item_set_clean(dirstateItemObject *self,
497 PyObject *args)
498 {
499 int size, mode, mtime;
500 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
501 return NULL;
502 }
503 self->flags = dirstate_flag_wc_tracked | dirstate_flag_p1_tracked;
504 self->mode = mode;
505 self->size = size;
506 self->mtime = mtime;
507 Py_RETURN_NONE;
508 }
509
495 static PyObject *dirstate_item_set_untracked(dirstateItemObject *self)
510 static PyObject *dirstate_item_set_untracked(dirstateItemObject *self)
496 {
511 {
497 self->flags &= ~dirstate_flag_wc_tracked;
512 self->flags &= ~dirstate_flag_wc_tracked;
498 self->mode = 0;
513 self->mode = 0;
499 self->mtime = 0;
514 self->mtime = 0;
500 self->size = 0;
515 self->size = 0;
501 Py_RETURN_NONE;
516 Py_RETURN_NONE;
502 }
517 }
503
518
504 static PyMethodDef dirstate_item_methods[] = {
519 static PyMethodDef dirstate_item_methods[] = {
505 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
520 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
506 "return a \"state\" suitable for v1 serialization"},
521 "return a \"state\" suitable for v1 serialization"},
507 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
522 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
508 "return a \"mode\" suitable for v1 serialization"},
523 "return a \"mode\" suitable for v1 serialization"},
509 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
524 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
510 "return a \"size\" suitable for v1 serialization"},
525 "return a \"size\" suitable for v1 serialization"},
511 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
526 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
512 "return a \"mtime\" suitable for v1 serialization"},
527 "return a \"mtime\" suitable for v1 serialization"},
513 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
528 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
514 "True if the stored mtime would be ambiguous with the current time"},
529 "True if the stored mtime would be ambiguous with the current time"},
515 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
530 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
516 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
531 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
517 {"new_added", (PyCFunction)dirstate_item_new_added,
532 {"new_added", (PyCFunction)dirstate_item_new_added,
518 METH_NOARGS | METH_CLASS,
533 METH_NOARGS | METH_CLASS,
519 "constructor to help legacy API to build a new \"added\" item"},
534 "constructor to help legacy API to build a new \"added\" item"},
520 {"new_merged", (PyCFunction)dirstate_item_new_merged,
535 {"new_merged", (PyCFunction)dirstate_item_new_merged,
521 METH_NOARGS | METH_CLASS,
536 METH_NOARGS | METH_CLASS,
522 "constructor to help legacy API to build a new \"merged\" item"},
537 "constructor to help legacy API to build a new \"merged\" item"},
523 {"new_from_p2", (PyCFunction)dirstate_item_new_from_p2,
538 {"new_from_p2", (PyCFunction)dirstate_item_new_from_p2,
524 METH_NOARGS | METH_CLASS,
539 METH_NOARGS | METH_CLASS,
525 "constructor to help legacy API to build a new \"from_p2\" item"},
540 "constructor to help legacy API to build a new \"from_p2\" item"},
526 {"new_possibly_dirty", (PyCFunction)dirstate_item_new_possibly_dirty,
541 {"new_possibly_dirty", (PyCFunction)dirstate_item_new_possibly_dirty,
527 METH_NOARGS | METH_CLASS,
542 METH_NOARGS | METH_CLASS,
528 "constructor to help legacy API to build a new \"possibly_dirty\" item"},
543 "constructor to help legacy API to build a new \"possibly_dirty\" item"},
529 {"new_normal", (PyCFunction)dirstate_item_new_normal,
544 {"new_normal", (PyCFunction)dirstate_item_new_normal,
530 METH_VARARGS | METH_CLASS,
545 METH_VARARGS | METH_CLASS,
531 "constructor to help legacy API to build a new \"normal\" item"},
546 "constructor to help legacy API to build a new \"normal\" item"},
532 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
547 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
533 METH_NOARGS, "mark a file as \"possibly dirty\""},
548 METH_NOARGS, "mark a file as \"possibly dirty\""},
549 {"set_clean", (PyCFunction)dirstate_item_set_clean, METH_VARARGS,
550 "mark a file as \"clean\""},
534 {"set_untracked", (PyCFunction)dirstate_item_set_untracked, METH_NOARGS,
551 {"set_untracked", (PyCFunction)dirstate_item_set_untracked, METH_NOARGS,
535 "mark a file as \"untracked\""},
552 "mark a file as \"untracked\""},
536 {NULL} /* Sentinel */
553 {NULL} /* Sentinel */
537 };
554 };
538
555
539 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
556 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
540 {
557 {
541 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
558 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
542 };
559 };
543
560
544 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
561 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
545 {
562 {
546 return PyInt_FromLong(dirstate_item_c_v1_size(self));
563 return PyInt_FromLong(dirstate_item_c_v1_size(self));
547 };
564 };
548
565
549 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
566 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
550 {
567 {
551 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
568 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
552 };
569 };
553
570
554 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
571 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
555 {
572 {
556 char state = dirstate_item_c_v1_state(self);
573 char state = dirstate_item_c_v1_state(self);
557 return PyBytes_FromStringAndSize(&state, 1);
574 return PyBytes_FromStringAndSize(&state, 1);
558 };
575 };
559
576
560 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
577 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
561 {
578 {
562 if (dirstate_item_c_tracked(self)) {
579 if (dirstate_item_c_tracked(self)) {
563 Py_RETURN_TRUE;
580 Py_RETURN_TRUE;
564 } else {
581 } else {
565 Py_RETURN_FALSE;
582 Py_RETURN_FALSE;
566 }
583 }
567 };
584 };
568
585
569 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
586 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
570 {
587 {
571 if (dirstate_item_c_added(self)) {
588 if (dirstate_item_c_added(self)) {
572 Py_RETURN_TRUE;
589 Py_RETURN_TRUE;
573 } else {
590 } else {
574 Py_RETURN_FALSE;
591 Py_RETURN_FALSE;
575 }
592 }
576 };
593 };
577
594
578 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
595 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
579 {
596 {
580 if (dirstate_item_c_merged(self)) {
597 if (dirstate_item_c_merged(self)) {
581 Py_RETURN_TRUE;
598 Py_RETURN_TRUE;
582 } else {
599 } else {
583 Py_RETURN_FALSE;
600 Py_RETURN_FALSE;
584 }
601 }
585 };
602 };
586
603
587 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
604 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
588 {
605 {
589 if (dirstate_item_c_merged_removed(self)) {
606 if (dirstate_item_c_merged_removed(self)) {
590 Py_RETURN_TRUE;
607 Py_RETURN_TRUE;
591 } else {
608 } else {
592 Py_RETURN_FALSE;
609 Py_RETURN_FALSE;
593 }
610 }
594 };
611 };
595
612
596 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
613 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
597 {
614 {
598 if (dirstate_item_c_from_p2(self)) {
615 if (dirstate_item_c_from_p2(self)) {
599 Py_RETURN_TRUE;
616 Py_RETURN_TRUE;
600 } else {
617 } else {
601 Py_RETURN_FALSE;
618 Py_RETURN_FALSE;
602 }
619 }
603 };
620 };
604
621
605 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
622 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
606 {
623 {
607 if (dirstate_item_c_from_p2_removed(self)) {
624 if (dirstate_item_c_from_p2_removed(self)) {
608 Py_RETURN_TRUE;
625 Py_RETURN_TRUE;
609 } else {
626 } else {
610 Py_RETURN_FALSE;
627 Py_RETURN_FALSE;
611 }
628 }
612 };
629 };
613
630
614 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
631 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
615 {
632 {
616 if (dirstate_item_c_removed(self)) {
633 if (dirstate_item_c_removed(self)) {
617 Py_RETURN_TRUE;
634 Py_RETURN_TRUE;
618 } else {
635 } else {
619 Py_RETURN_FALSE;
636 Py_RETURN_FALSE;
620 }
637 }
621 };
638 };
622
639
623 static PyObject *dm_nonnormal(dirstateItemObject *self)
640 static PyObject *dm_nonnormal(dirstateItemObject *self)
624 {
641 {
625 if ((dirstate_item_c_v1_state(self) != 'n') ||
642 if ((dirstate_item_c_v1_state(self) != 'n') ||
626 (dirstate_item_c_v1_mtime(self) == ambiguous_time)) {
643 (dirstate_item_c_v1_mtime(self) == ambiguous_time)) {
627 Py_RETURN_TRUE;
644 Py_RETURN_TRUE;
628 } else {
645 } else {
629 Py_RETURN_FALSE;
646 Py_RETURN_FALSE;
630 }
647 }
631 };
648 };
632 static PyObject *dm_otherparent(dirstateItemObject *self)
649 static PyObject *dm_otherparent(dirstateItemObject *self)
633 {
650 {
634 if (dirstate_item_c_v1_mtime(self) == dirstate_v1_from_p2) {
651 if (dirstate_item_c_v1_mtime(self) == dirstate_v1_from_p2) {
635 Py_RETURN_TRUE;
652 Py_RETURN_TRUE;
636 } else {
653 } else {
637 Py_RETURN_FALSE;
654 Py_RETURN_FALSE;
638 }
655 }
639 };
656 };
640
657
641 static PyGetSetDef dirstate_item_getset[] = {
658 static PyGetSetDef dirstate_item_getset[] = {
642 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
659 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
643 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
660 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
644 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
661 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
645 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
662 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
646 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
663 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
647 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
664 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
648 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
665 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
649 "merged_removed", NULL},
666 "merged_removed", NULL},
650 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
667 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
651 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
668 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
652 "from_p2_removed", NULL},
669 "from_p2_removed", NULL},
653 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
670 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
654 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
671 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
655 {"dm_nonnormal", (getter)dm_nonnormal, NULL, "dm_nonnormal", NULL},
672 {"dm_nonnormal", (getter)dm_nonnormal, NULL, "dm_nonnormal", NULL},
656 {"dm_otherparent", (getter)dm_otherparent, NULL, "dm_otherparent", NULL},
673 {"dm_otherparent", (getter)dm_otherparent, NULL, "dm_otherparent", NULL},
657 {NULL} /* Sentinel */
674 {NULL} /* Sentinel */
658 };
675 };
659
676
660 PyTypeObject dirstateItemType = {
677 PyTypeObject dirstateItemType = {
661 PyVarObject_HEAD_INIT(NULL, 0) /* header */
678 PyVarObject_HEAD_INIT(NULL, 0) /* header */
662 "dirstate_tuple", /* tp_name */
679 "dirstate_tuple", /* tp_name */
663 sizeof(dirstateItemObject), /* tp_basicsize */
680 sizeof(dirstateItemObject), /* tp_basicsize */
664 0, /* tp_itemsize */
681 0, /* tp_itemsize */
665 (destructor)dirstate_item_dealloc, /* tp_dealloc */
682 (destructor)dirstate_item_dealloc, /* tp_dealloc */
666 0, /* tp_print */
683 0, /* tp_print */
667 0, /* tp_getattr */
684 0, /* tp_getattr */
668 0, /* tp_setattr */
685 0, /* tp_setattr */
669 0, /* tp_compare */
686 0, /* tp_compare */
670 0, /* tp_repr */
687 0, /* tp_repr */
671 0, /* tp_as_number */
688 0, /* tp_as_number */
672 0, /* tp_as_sequence */
689 0, /* tp_as_sequence */
673 0, /* tp_as_mapping */
690 0, /* tp_as_mapping */
674 0, /* tp_hash */
691 0, /* tp_hash */
675 0, /* tp_call */
692 0, /* tp_call */
676 0, /* tp_str */
693 0, /* tp_str */
677 0, /* tp_getattro */
694 0, /* tp_getattro */
678 0, /* tp_setattro */
695 0, /* tp_setattro */
679 0, /* tp_as_buffer */
696 0, /* tp_as_buffer */
680 Py_TPFLAGS_DEFAULT, /* tp_flags */
697 Py_TPFLAGS_DEFAULT, /* tp_flags */
681 "dirstate tuple", /* tp_doc */
698 "dirstate tuple", /* tp_doc */
682 0, /* tp_traverse */
699 0, /* tp_traverse */
683 0, /* tp_clear */
700 0, /* tp_clear */
684 0, /* tp_richcompare */
701 0, /* tp_richcompare */
685 0, /* tp_weaklistoffset */
702 0, /* tp_weaklistoffset */
686 0, /* tp_iter */
703 0, /* tp_iter */
687 0, /* tp_iternext */
704 0, /* tp_iternext */
688 dirstate_item_methods, /* tp_methods */
705 dirstate_item_methods, /* tp_methods */
689 0, /* tp_members */
706 0, /* tp_members */
690 dirstate_item_getset, /* tp_getset */
707 dirstate_item_getset, /* tp_getset */
691 0, /* tp_base */
708 0, /* tp_base */
692 0, /* tp_dict */
709 0, /* tp_dict */
693 0, /* tp_descr_get */
710 0, /* tp_descr_get */
694 0, /* tp_descr_set */
711 0, /* tp_descr_set */
695 0, /* tp_dictoffset */
712 0, /* tp_dictoffset */
696 0, /* tp_init */
713 0, /* tp_init */
697 0, /* tp_alloc */
714 0, /* tp_alloc */
698 dirstate_item_new, /* tp_new */
715 dirstate_item_new, /* tp_new */
699 };
716 };
700
717
701 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
718 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
702 {
719 {
703 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
720 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
704 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
721 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
705 char state, *cur, *str, *cpos;
722 char state, *cur, *str, *cpos;
706 int mode, size, mtime;
723 int mode, size, mtime;
707 unsigned int flen, pos = 40;
724 unsigned int flen, pos = 40;
708 Py_ssize_t len = 40;
725 Py_ssize_t len = 40;
709 Py_ssize_t readlen;
726 Py_ssize_t readlen;
710
727
711 if (!PyArg_ParseTuple(
728 if (!PyArg_ParseTuple(
712 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
729 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
713 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
730 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
714 goto quit;
731 goto quit;
715 }
732 }
716
733
717 len = readlen;
734 len = readlen;
718
735
719 /* read parents */
736 /* read parents */
720 if (len < 40) {
737 if (len < 40) {
721 PyErr_SetString(PyExc_ValueError,
738 PyErr_SetString(PyExc_ValueError,
722 "too little data for parents");
739 "too little data for parents");
723 goto quit;
740 goto quit;
724 }
741 }
725
742
726 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
743 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
727 str + 20, (Py_ssize_t)20);
744 str + 20, (Py_ssize_t)20);
728 if (!parents) {
745 if (!parents) {
729 goto quit;
746 goto quit;
730 }
747 }
731
748
732 /* read filenames */
749 /* read filenames */
733 while (pos >= 40 && pos < len) {
750 while (pos >= 40 && pos < len) {
734 if (pos + 17 > len) {
751 if (pos + 17 > len) {
735 PyErr_SetString(PyExc_ValueError,
752 PyErr_SetString(PyExc_ValueError,
736 "overflow in dirstate");
753 "overflow in dirstate");
737 goto quit;
754 goto quit;
738 }
755 }
739 cur = str + pos;
756 cur = str + pos;
740 /* unpack header */
757 /* unpack header */
741 state = *cur;
758 state = *cur;
742 mode = getbe32(cur + 1);
759 mode = getbe32(cur + 1);
743 size = getbe32(cur + 5);
760 size = getbe32(cur + 5);
744 mtime = getbe32(cur + 9);
761 mtime = getbe32(cur + 9);
745 flen = getbe32(cur + 13);
762 flen = getbe32(cur + 13);
746 pos += 17;
763 pos += 17;
747 cur += 17;
764 cur += 17;
748 if (flen > len - pos) {
765 if (flen > len - pos) {
749 PyErr_SetString(PyExc_ValueError,
766 PyErr_SetString(PyExc_ValueError,
750 "overflow in dirstate");
767 "overflow in dirstate");
751 goto quit;
768 goto quit;
752 }
769 }
753
770
754 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
771 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
755 size, mtime);
772 size, mtime);
756 cpos = memchr(cur, 0, flen);
773 cpos = memchr(cur, 0, flen);
757 if (cpos) {
774 if (cpos) {
758 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
775 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
759 cname = PyBytes_FromStringAndSize(
776 cname = PyBytes_FromStringAndSize(
760 cpos + 1, flen - (cpos - cur) - 1);
777 cpos + 1, flen - (cpos - cur) - 1);
761 if (!fname || !cname ||
778 if (!fname || !cname ||
762 PyDict_SetItem(cmap, fname, cname) == -1 ||
779 PyDict_SetItem(cmap, fname, cname) == -1 ||
763 PyDict_SetItem(dmap, fname, entry) == -1) {
780 PyDict_SetItem(dmap, fname, entry) == -1) {
764 goto quit;
781 goto quit;
765 }
782 }
766 Py_DECREF(cname);
783 Py_DECREF(cname);
767 } else {
784 } else {
768 fname = PyBytes_FromStringAndSize(cur, flen);
785 fname = PyBytes_FromStringAndSize(cur, flen);
769 if (!fname ||
786 if (!fname ||
770 PyDict_SetItem(dmap, fname, entry) == -1) {
787 PyDict_SetItem(dmap, fname, entry) == -1) {
771 goto quit;
788 goto quit;
772 }
789 }
773 }
790 }
774 Py_DECREF(fname);
791 Py_DECREF(fname);
775 Py_DECREF(entry);
792 Py_DECREF(entry);
776 fname = cname = entry = NULL;
793 fname = cname = entry = NULL;
777 pos += flen;
794 pos += flen;
778 }
795 }
779
796
780 ret = parents;
797 ret = parents;
781 Py_INCREF(ret);
798 Py_INCREF(ret);
782 quit:
799 quit:
783 Py_XDECREF(fname);
800 Py_XDECREF(fname);
784 Py_XDECREF(cname);
801 Py_XDECREF(cname);
785 Py_XDECREF(entry);
802 Py_XDECREF(entry);
786 Py_XDECREF(parents);
803 Py_XDECREF(parents);
787 return ret;
804 return ret;
788 }
805 }
789
806
790 /*
807 /*
791 * Build a set of non-normal and other parent entries from the dirstate dmap
808 * Build a set of non-normal and other parent entries from the dirstate dmap
792 */
809 */
793 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
810 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
794 {
811 {
795 PyObject *dmap, *fname, *v;
812 PyObject *dmap, *fname, *v;
796 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
813 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
797 Py_ssize_t pos;
814 Py_ssize_t pos;
798
815
799 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
816 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
800 &dmap)) {
817 &dmap)) {
801 goto bail;
818 goto bail;
802 }
819 }
803
820
804 nonnset = PySet_New(NULL);
821 nonnset = PySet_New(NULL);
805 if (nonnset == NULL) {
822 if (nonnset == NULL) {
806 goto bail;
823 goto bail;
807 }
824 }
808
825
809 otherpset = PySet_New(NULL);
826 otherpset = PySet_New(NULL);
810 if (otherpset == NULL) {
827 if (otherpset == NULL) {
811 goto bail;
828 goto bail;
812 }
829 }
813
830
814 pos = 0;
831 pos = 0;
815 while (PyDict_Next(dmap, &pos, &fname, &v)) {
832 while (PyDict_Next(dmap, &pos, &fname, &v)) {
816 dirstateItemObject *t;
833 dirstateItemObject *t;
817 if (!dirstate_tuple_check(v)) {
834 if (!dirstate_tuple_check(v)) {
818 PyErr_SetString(PyExc_TypeError,
835 PyErr_SetString(PyExc_TypeError,
819 "expected a dirstate tuple");
836 "expected a dirstate tuple");
820 goto bail;
837 goto bail;
821 }
838 }
822 t = (dirstateItemObject *)v;
839 t = (dirstateItemObject *)v;
823
840
824 if (dirstate_item_c_from_p2(t)) {
841 if (dirstate_item_c_from_p2(t)) {
825 if (PySet_Add(otherpset, fname) == -1) {
842 if (PySet_Add(otherpset, fname) == -1) {
826 goto bail;
843 goto bail;
827 }
844 }
828 }
845 }
829 if (!(t->flags & dirstate_flag_wc_tracked) ||
846 if (!(t->flags & dirstate_flag_wc_tracked) ||
830 !(t->flags &
847 !(t->flags &
831 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked)) ||
848 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked)) ||
832 (t->flags &
849 (t->flags &
833 (dirstate_flag_possibly_dirty | dirstate_flag_merged))) {
850 (dirstate_flag_possibly_dirty | dirstate_flag_merged))) {
834 if (PySet_Add(nonnset, fname) == -1) {
851 if (PySet_Add(nonnset, fname) == -1) {
835 goto bail;
852 goto bail;
836 }
853 }
837 }
854 }
838 }
855 }
839
856
840 result = Py_BuildValue("(OO)", nonnset, otherpset);
857 result = Py_BuildValue("(OO)", nonnset, otherpset);
841 if (result == NULL) {
858 if (result == NULL) {
842 goto bail;
859 goto bail;
843 }
860 }
844 Py_DECREF(nonnset);
861 Py_DECREF(nonnset);
845 Py_DECREF(otherpset);
862 Py_DECREF(otherpset);
846 return result;
863 return result;
847 bail:
864 bail:
848 Py_XDECREF(nonnset);
865 Py_XDECREF(nonnset);
849 Py_XDECREF(otherpset);
866 Py_XDECREF(otherpset);
850 Py_XDECREF(result);
867 Py_XDECREF(result);
851 return NULL;
868 return NULL;
852 }
869 }
853
870
854 /*
871 /*
855 * Efficiently pack a dirstate object into its on-disk format.
872 * Efficiently pack a dirstate object into its on-disk format.
856 */
873 */
857 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
874 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
858 {
875 {
859 PyObject *packobj = NULL;
876 PyObject *packobj = NULL;
860 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
877 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
861 Py_ssize_t nbytes, pos, l;
878 Py_ssize_t nbytes, pos, l;
862 PyObject *k, *v = NULL, *pn;
879 PyObject *k, *v = NULL, *pn;
863 char *p, *s;
880 char *p, *s;
864 int now;
881 int now;
865
882
866 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
883 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
867 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
884 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
868 &now)) {
885 &now)) {
869 return NULL;
886 return NULL;
870 }
887 }
871
888
872 if (PyTuple_Size(pl) != 2) {
889 if (PyTuple_Size(pl) != 2) {
873 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
890 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
874 return NULL;
891 return NULL;
875 }
892 }
876
893
877 /* Figure out how much we need to allocate. */
894 /* Figure out how much we need to allocate. */
878 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
895 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
879 PyObject *c;
896 PyObject *c;
880 if (!PyBytes_Check(k)) {
897 if (!PyBytes_Check(k)) {
881 PyErr_SetString(PyExc_TypeError, "expected string key");
898 PyErr_SetString(PyExc_TypeError, "expected string key");
882 goto bail;
899 goto bail;
883 }
900 }
884 nbytes += PyBytes_GET_SIZE(k) + 17;
901 nbytes += PyBytes_GET_SIZE(k) + 17;
885 c = PyDict_GetItem(copymap, k);
902 c = PyDict_GetItem(copymap, k);
886 if (c) {
903 if (c) {
887 if (!PyBytes_Check(c)) {
904 if (!PyBytes_Check(c)) {
888 PyErr_SetString(PyExc_TypeError,
905 PyErr_SetString(PyExc_TypeError,
889 "expected string key");
906 "expected string key");
890 goto bail;
907 goto bail;
891 }
908 }
892 nbytes += PyBytes_GET_SIZE(c) + 1;
909 nbytes += PyBytes_GET_SIZE(c) + 1;
893 }
910 }
894 }
911 }
895
912
896 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
913 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
897 if (packobj == NULL) {
914 if (packobj == NULL) {
898 goto bail;
915 goto bail;
899 }
916 }
900
917
901 p = PyBytes_AS_STRING(packobj);
918 p = PyBytes_AS_STRING(packobj);
902
919
903 pn = PyTuple_GET_ITEM(pl, 0);
920 pn = PyTuple_GET_ITEM(pl, 0);
904 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
921 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
905 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
922 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
906 goto bail;
923 goto bail;
907 }
924 }
908 memcpy(p, s, l);
925 memcpy(p, s, l);
909 p += 20;
926 p += 20;
910 pn = PyTuple_GET_ITEM(pl, 1);
927 pn = PyTuple_GET_ITEM(pl, 1);
911 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
928 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
912 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
929 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
913 goto bail;
930 goto bail;
914 }
931 }
915 memcpy(p, s, l);
932 memcpy(p, s, l);
916 p += 20;
933 p += 20;
917
934
918 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
935 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
919 dirstateItemObject *tuple;
936 dirstateItemObject *tuple;
920 char state;
937 char state;
921 int mode, size, mtime;
938 int mode, size, mtime;
922 Py_ssize_t len, l;
939 Py_ssize_t len, l;
923 PyObject *o;
940 PyObject *o;
924 char *t;
941 char *t;
925
942
926 if (!dirstate_tuple_check(v)) {
943 if (!dirstate_tuple_check(v)) {
927 PyErr_SetString(PyExc_TypeError,
944 PyErr_SetString(PyExc_TypeError,
928 "expected a dirstate tuple");
945 "expected a dirstate tuple");
929 goto bail;
946 goto bail;
930 }
947 }
931 tuple = (dirstateItemObject *)v;
948 tuple = (dirstateItemObject *)v;
932
949
933 state = dirstate_item_c_v1_state(tuple);
950 state = dirstate_item_c_v1_state(tuple);
934 mode = dirstate_item_c_v1_mode(tuple);
951 mode = dirstate_item_c_v1_mode(tuple);
935 size = dirstate_item_c_v1_size(tuple);
952 size = dirstate_item_c_v1_size(tuple);
936 mtime = dirstate_item_c_v1_mtime(tuple);
953 mtime = dirstate_item_c_v1_mtime(tuple);
937 if (state == 'n' && mtime == now) {
954 if (state == 'n' && mtime == now) {
938 /* See pure/parsers.py:pack_dirstate for why we do
955 /* See pure/parsers.py:pack_dirstate for why we do
939 * this. */
956 * this. */
940 mtime = -1;
957 mtime = -1;
941 mtime_unset = (PyObject *)dirstate_item_from_v1_data(
958 mtime_unset = (PyObject *)dirstate_item_from_v1_data(
942 state, mode, size, mtime);
959 state, mode, size, mtime);
943 if (!mtime_unset) {
960 if (!mtime_unset) {
944 goto bail;
961 goto bail;
945 }
962 }
946 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
963 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
947 goto bail;
964 goto bail;
948 }
965 }
949 Py_DECREF(mtime_unset);
966 Py_DECREF(mtime_unset);
950 mtime_unset = NULL;
967 mtime_unset = NULL;
951 }
968 }
952 *p++ = state;
969 *p++ = state;
953 putbe32((uint32_t)mode, p);
970 putbe32((uint32_t)mode, p);
954 putbe32((uint32_t)size, p + 4);
971 putbe32((uint32_t)size, p + 4);
955 putbe32((uint32_t)mtime, p + 8);
972 putbe32((uint32_t)mtime, p + 8);
956 t = p + 12;
973 t = p + 12;
957 p += 16;
974 p += 16;
958 len = PyBytes_GET_SIZE(k);
975 len = PyBytes_GET_SIZE(k);
959 memcpy(p, PyBytes_AS_STRING(k), len);
976 memcpy(p, PyBytes_AS_STRING(k), len);
960 p += len;
977 p += len;
961 o = PyDict_GetItem(copymap, k);
978 o = PyDict_GetItem(copymap, k);
962 if (o) {
979 if (o) {
963 *p++ = '\0';
980 *p++ = '\0';
964 l = PyBytes_GET_SIZE(o);
981 l = PyBytes_GET_SIZE(o);
965 memcpy(p, PyBytes_AS_STRING(o), l);
982 memcpy(p, PyBytes_AS_STRING(o), l);
966 p += l;
983 p += l;
967 len += l + 1;
984 len += l + 1;
968 }
985 }
969 putbe32((uint32_t)len, t);
986 putbe32((uint32_t)len, t);
970 }
987 }
971
988
972 pos = p - PyBytes_AS_STRING(packobj);
989 pos = p - PyBytes_AS_STRING(packobj);
973 if (pos != nbytes) {
990 if (pos != nbytes) {
974 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
991 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
975 (long)pos, (long)nbytes);
992 (long)pos, (long)nbytes);
976 goto bail;
993 goto bail;
977 }
994 }
978
995
979 return packobj;
996 return packobj;
980 bail:
997 bail:
981 Py_XDECREF(mtime_unset);
998 Py_XDECREF(mtime_unset);
982 Py_XDECREF(packobj);
999 Py_XDECREF(packobj);
983 Py_XDECREF(v);
1000 Py_XDECREF(v);
984 return NULL;
1001 return NULL;
985 }
1002 }
986
1003
987 #define BUMPED_FIX 1
1004 #define BUMPED_FIX 1
988 #define USING_SHA_256 2
1005 #define USING_SHA_256 2
989 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
1006 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
990
1007
991 static PyObject *readshas(const char *source, unsigned char num,
1008 static PyObject *readshas(const char *source, unsigned char num,
992 Py_ssize_t hashwidth)
1009 Py_ssize_t hashwidth)
993 {
1010 {
994 int i;
1011 int i;
995 PyObject *list = PyTuple_New(num);
1012 PyObject *list = PyTuple_New(num);
996 if (list == NULL) {
1013 if (list == NULL) {
997 return NULL;
1014 return NULL;
998 }
1015 }
999 for (i = 0; i < num; i++) {
1016 for (i = 0; i < num; i++) {
1000 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
1017 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
1001 if (hash == NULL) {
1018 if (hash == NULL) {
1002 Py_DECREF(list);
1019 Py_DECREF(list);
1003 return NULL;
1020 return NULL;
1004 }
1021 }
1005 PyTuple_SET_ITEM(list, i, hash);
1022 PyTuple_SET_ITEM(list, i, hash);
1006 source += hashwidth;
1023 source += hashwidth;
1007 }
1024 }
1008 return list;
1025 return list;
1009 }
1026 }
1010
1027
1011 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
1028 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
1012 uint32_t *msize)
1029 uint32_t *msize)
1013 {
1030 {
1014 const char *data = databegin;
1031 const char *data = databegin;
1015 const char *meta;
1032 const char *meta;
1016
1033
1017 double mtime;
1034 double mtime;
1018 int16_t tz;
1035 int16_t tz;
1019 uint16_t flags;
1036 uint16_t flags;
1020 unsigned char nsuccs, nparents, nmetadata;
1037 unsigned char nsuccs, nparents, nmetadata;
1021 Py_ssize_t hashwidth = 20;
1038 Py_ssize_t hashwidth = 20;
1022
1039
1023 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
1040 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
1024 PyObject *metadata = NULL, *ret = NULL;
1041 PyObject *metadata = NULL, *ret = NULL;
1025 int i;
1042 int i;
1026
1043
1027 if (data + FM1_HEADER_SIZE > dataend) {
1044 if (data + FM1_HEADER_SIZE > dataend) {
1028 goto overflow;
1045 goto overflow;
1029 }
1046 }
1030
1047
1031 *msize = getbe32(data);
1048 *msize = getbe32(data);
1032 data += 4;
1049 data += 4;
1033 mtime = getbefloat64(data);
1050 mtime = getbefloat64(data);
1034 data += 8;
1051 data += 8;
1035 tz = getbeint16(data);
1052 tz = getbeint16(data);
1036 data += 2;
1053 data += 2;
1037 flags = getbeuint16(data);
1054 flags = getbeuint16(data);
1038 data += 2;
1055 data += 2;
1039
1056
1040 if (flags & USING_SHA_256) {
1057 if (flags & USING_SHA_256) {
1041 hashwidth = 32;
1058 hashwidth = 32;
1042 }
1059 }
1043
1060
1044 nsuccs = (unsigned char)(*data++);
1061 nsuccs = (unsigned char)(*data++);
1045 nparents = (unsigned char)(*data++);
1062 nparents = (unsigned char)(*data++);
1046 nmetadata = (unsigned char)(*data++);
1063 nmetadata = (unsigned char)(*data++);
1047
1064
1048 if (databegin + *msize > dataend) {
1065 if (databegin + *msize > dataend) {
1049 goto overflow;
1066 goto overflow;
1050 }
1067 }
1051 dataend = databegin + *msize; /* narrow down to marker size */
1068 dataend = databegin + *msize; /* narrow down to marker size */
1052
1069
1053 if (data + hashwidth > dataend) {
1070 if (data + hashwidth > dataend) {
1054 goto overflow;
1071 goto overflow;
1055 }
1072 }
1056 prec = PyBytes_FromStringAndSize(data, hashwidth);
1073 prec = PyBytes_FromStringAndSize(data, hashwidth);
1057 data += hashwidth;
1074 data += hashwidth;
1058 if (prec == NULL) {
1075 if (prec == NULL) {
1059 goto bail;
1076 goto bail;
1060 }
1077 }
1061
1078
1062 if (data + nsuccs * hashwidth > dataend) {
1079 if (data + nsuccs * hashwidth > dataend) {
1063 goto overflow;
1080 goto overflow;
1064 }
1081 }
1065 succs = readshas(data, nsuccs, hashwidth);
1082 succs = readshas(data, nsuccs, hashwidth);
1066 if (succs == NULL) {
1083 if (succs == NULL) {
1067 goto bail;
1084 goto bail;
1068 }
1085 }
1069 data += nsuccs * hashwidth;
1086 data += nsuccs * hashwidth;
1070
1087
1071 if (nparents == 1 || nparents == 2) {
1088 if (nparents == 1 || nparents == 2) {
1072 if (data + nparents * hashwidth > dataend) {
1089 if (data + nparents * hashwidth > dataend) {
1073 goto overflow;
1090 goto overflow;
1074 }
1091 }
1075 parents = readshas(data, nparents, hashwidth);
1092 parents = readshas(data, nparents, hashwidth);
1076 if (parents == NULL) {
1093 if (parents == NULL) {
1077 goto bail;
1094 goto bail;
1078 }
1095 }
1079 data += nparents * hashwidth;
1096 data += nparents * hashwidth;
1080 } else {
1097 } else {
1081 parents = Py_None;
1098 parents = Py_None;
1082 Py_INCREF(parents);
1099 Py_INCREF(parents);
1083 }
1100 }
1084
1101
1085 if (data + 2 * nmetadata > dataend) {
1102 if (data + 2 * nmetadata > dataend) {
1086 goto overflow;
1103 goto overflow;
1087 }
1104 }
1088 meta = data + (2 * nmetadata);
1105 meta = data + (2 * nmetadata);
1089 metadata = PyTuple_New(nmetadata);
1106 metadata = PyTuple_New(nmetadata);
1090 if (metadata == NULL) {
1107 if (metadata == NULL) {
1091 goto bail;
1108 goto bail;
1092 }
1109 }
1093 for (i = 0; i < nmetadata; i++) {
1110 for (i = 0; i < nmetadata; i++) {
1094 PyObject *tmp, *left = NULL, *right = NULL;
1111 PyObject *tmp, *left = NULL, *right = NULL;
1095 Py_ssize_t leftsize = (unsigned char)(*data++);
1112 Py_ssize_t leftsize = (unsigned char)(*data++);
1096 Py_ssize_t rightsize = (unsigned char)(*data++);
1113 Py_ssize_t rightsize = (unsigned char)(*data++);
1097 if (meta + leftsize + rightsize > dataend) {
1114 if (meta + leftsize + rightsize > dataend) {
1098 goto overflow;
1115 goto overflow;
1099 }
1116 }
1100 left = PyBytes_FromStringAndSize(meta, leftsize);
1117 left = PyBytes_FromStringAndSize(meta, leftsize);
1101 meta += leftsize;
1118 meta += leftsize;
1102 right = PyBytes_FromStringAndSize(meta, rightsize);
1119 right = PyBytes_FromStringAndSize(meta, rightsize);
1103 meta += rightsize;
1120 meta += rightsize;
1104 tmp = PyTuple_New(2);
1121 tmp = PyTuple_New(2);
1105 if (!left || !right || !tmp) {
1122 if (!left || !right || !tmp) {
1106 Py_XDECREF(left);
1123 Py_XDECREF(left);
1107 Py_XDECREF(right);
1124 Py_XDECREF(right);
1108 Py_XDECREF(tmp);
1125 Py_XDECREF(tmp);
1109 goto bail;
1126 goto bail;
1110 }
1127 }
1111 PyTuple_SET_ITEM(tmp, 0, left);
1128 PyTuple_SET_ITEM(tmp, 0, left);
1112 PyTuple_SET_ITEM(tmp, 1, right);
1129 PyTuple_SET_ITEM(tmp, 1, right);
1113 PyTuple_SET_ITEM(metadata, i, tmp);
1130 PyTuple_SET_ITEM(metadata, i, tmp);
1114 }
1131 }
1115 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
1132 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
1116 (int)tz * 60, parents);
1133 (int)tz * 60, parents);
1117 goto bail; /* return successfully */
1134 goto bail; /* return successfully */
1118
1135
1119 overflow:
1136 overflow:
1120 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
1137 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
1121 bail:
1138 bail:
1122 Py_XDECREF(prec);
1139 Py_XDECREF(prec);
1123 Py_XDECREF(succs);
1140 Py_XDECREF(succs);
1124 Py_XDECREF(metadata);
1141 Py_XDECREF(metadata);
1125 Py_XDECREF(parents);
1142 Py_XDECREF(parents);
1126 return ret;
1143 return ret;
1127 }
1144 }
1128
1145
1129 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
1146 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
1130 {
1147 {
1131 const char *data, *dataend;
1148 const char *data, *dataend;
1132 Py_ssize_t datalen, offset, stop;
1149 Py_ssize_t datalen, offset, stop;
1133 PyObject *markers = NULL;
1150 PyObject *markers = NULL;
1134
1151
1135 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1152 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1136 &offset, &stop)) {
1153 &offset, &stop)) {
1137 return NULL;
1154 return NULL;
1138 }
1155 }
1139 if (offset < 0) {
1156 if (offset < 0) {
1140 PyErr_SetString(PyExc_ValueError,
1157 PyErr_SetString(PyExc_ValueError,
1141 "invalid negative offset in fm1readmarkers");
1158 "invalid negative offset in fm1readmarkers");
1142 return NULL;
1159 return NULL;
1143 }
1160 }
1144 if (stop > datalen) {
1161 if (stop > datalen) {
1145 PyErr_SetString(
1162 PyErr_SetString(
1146 PyExc_ValueError,
1163 PyExc_ValueError,
1147 "stop longer than data length in fm1readmarkers");
1164 "stop longer than data length in fm1readmarkers");
1148 return NULL;
1165 return NULL;
1149 }
1166 }
1150 dataend = data + datalen;
1167 dataend = data + datalen;
1151 data += offset;
1168 data += offset;
1152 markers = PyList_New(0);
1169 markers = PyList_New(0);
1153 if (!markers) {
1170 if (!markers) {
1154 return NULL;
1171 return NULL;
1155 }
1172 }
1156 while (offset < stop) {
1173 while (offset < stop) {
1157 uint32_t msize;
1174 uint32_t msize;
1158 int error;
1175 int error;
1159 PyObject *record = fm1readmarker(data, dataend, &msize);
1176 PyObject *record = fm1readmarker(data, dataend, &msize);
1160 if (!record) {
1177 if (!record) {
1161 goto bail;
1178 goto bail;
1162 }
1179 }
1163 error = PyList_Append(markers, record);
1180 error = PyList_Append(markers, record);
1164 Py_DECREF(record);
1181 Py_DECREF(record);
1165 if (error) {
1182 if (error) {
1166 goto bail;
1183 goto bail;
1167 }
1184 }
1168 data += msize;
1185 data += msize;
1169 offset += msize;
1186 offset += msize;
1170 }
1187 }
1171 return markers;
1188 return markers;
1172 bail:
1189 bail:
1173 Py_DECREF(markers);
1190 Py_DECREF(markers);
1174 return NULL;
1191 return NULL;
1175 }
1192 }
1176
1193
1177 static char parsers_doc[] = "Efficient content parsing.";
1194 static char parsers_doc[] = "Efficient content parsing.";
1178
1195
1179 PyObject *encodedir(PyObject *self, PyObject *args);
1196 PyObject *encodedir(PyObject *self, PyObject *args);
1180 PyObject *pathencode(PyObject *self, PyObject *args);
1197 PyObject *pathencode(PyObject *self, PyObject *args);
1181 PyObject *lowerencode(PyObject *self, PyObject *args);
1198 PyObject *lowerencode(PyObject *self, PyObject *args);
1182 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
1199 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
1183
1200
1184 static PyMethodDef methods[] = {
1201 static PyMethodDef methods[] = {
1185 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
1202 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
1186 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
1203 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
1187 "create a set containing non-normal and other parent entries of given "
1204 "create a set containing non-normal and other parent entries of given "
1188 "dirstate\n"},
1205 "dirstate\n"},
1189 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
1206 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
1190 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
1207 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
1191 "parse a revlog index\n"},
1208 "parse a revlog index\n"},
1192 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
1209 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
1193 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
1210 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
1194 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
1211 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
1195 {"dict_new_presized", dict_new_presized, METH_VARARGS,
1212 {"dict_new_presized", dict_new_presized, METH_VARARGS,
1196 "construct a dict with an expected size\n"},
1213 "construct a dict with an expected size\n"},
1197 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
1214 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
1198 "make file foldmap\n"},
1215 "make file foldmap\n"},
1199 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
1216 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
1200 "escape a UTF-8 byte string to JSON (fast path)\n"},
1217 "escape a UTF-8 byte string to JSON (fast path)\n"},
1201 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
1218 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
1202 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
1219 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
1203 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
1220 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
1204 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
1221 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
1205 "parse v1 obsolete markers\n"},
1222 "parse v1 obsolete markers\n"},
1206 {NULL, NULL}};
1223 {NULL, NULL}};
1207
1224
1208 void dirs_module_init(PyObject *mod);
1225 void dirs_module_init(PyObject *mod);
1209 void manifest_module_init(PyObject *mod);
1226 void manifest_module_init(PyObject *mod);
1210 void revlog_module_init(PyObject *mod);
1227 void revlog_module_init(PyObject *mod);
1211
1228
1212 static const int version = 20;
1229 static const int version = 20;
1213
1230
1214 static void module_init(PyObject *mod)
1231 static void module_init(PyObject *mod)
1215 {
1232 {
1216 PyObject *capsule = NULL;
1233 PyObject *capsule = NULL;
1217 PyModule_AddIntConstant(mod, "version", version);
1234 PyModule_AddIntConstant(mod, "version", version);
1218
1235
1219 /* This module constant has two purposes. First, it lets us unit test
1236 /* This module constant has two purposes. First, it lets us unit test
1220 * the ImportError raised without hard-coding any error text. This
1237 * the ImportError raised without hard-coding any error text. This
1221 * means we can change the text in the future without breaking tests,
1238 * means we can change the text in the future without breaking tests,
1222 * even across changesets without a recompile. Second, its presence
1239 * even across changesets without a recompile. Second, its presence
1223 * can be used to determine whether the version-checking logic is
1240 * can be used to determine whether the version-checking logic is
1224 * present, which also helps in testing across changesets without a
1241 * present, which also helps in testing across changesets without a
1225 * recompile. Note that this means the pure-Python version of parsers
1242 * recompile. Note that this means the pure-Python version of parsers
1226 * should not have this module constant. */
1243 * should not have this module constant. */
1227 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
1244 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
1228
1245
1229 dirs_module_init(mod);
1246 dirs_module_init(mod);
1230 manifest_module_init(mod);
1247 manifest_module_init(mod);
1231 revlog_module_init(mod);
1248 revlog_module_init(mod);
1232
1249
1233 capsule = PyCapsule_New(
1250 capsule = PyCapsule_New(
1234 dirstate_item_from_v1_data,
1251 dirstate_item_from_v1_data,
1235 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
1252 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
1236 if (capsule != NULL)
1253 if (capsule != NULL)
1237 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
1254 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
1238
1255
1239 if (PyType_Ready(&dirstateItemType) < 0) {
1256 if (PyType_Ready(&dirstateItemType) < 0) {
1240 return;
1257 return;
1241 }
1258 }
1242 Py_INCREF(&dirstateItemType);
1259 Py_INCREF(&dirstateItemType);
1243 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
1260 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
1244 }
1261 }
1245
1262
1246 static int check_python_version(void)
1263 static int check_python_version(void)
1247 {
1264 {
1248 PyObject *sys = PyImport_ImportModule("sys"), *ver;
1265 PyObject *sys = PyImport_ImportModule("sys"), *ver;
1249 long hexversion;
1266 long hexversion;
1250 if (!sys) {
1267 if (!sys) {
1251 return -1;
1268 return -1;
1252 }
1269 }
1253 ver = PyObject_GetAttrString(sys, "hexversion");
1270 ver = PyObject_GetAttrString(sys, "hexversion");
1254 Py_DECREF(sys);
1271 Py_DECREF(sys);
1255 if (!ver) {
1272 if (!ver) {
1256 return -1;
1273 return -1;
1257 }
1274 }
1258 hexversion = PyInt_AsLong(ver);
1275 hexversion = PyInt_AsLong(ver);
1259 Py_DECREF(ver);
1276 Py_DECREF(ver);
1260 /* sys.hexversion is a 32-bit number by default, so the -1 case
1277 /* sys.hexversion is a 32-bit number by default, so the -1 case
1261 * should only occur in unusual circumstances (e.g. if sys.hexversion
1278 * should only occur in unusual circumstances (e.g. if sys.hexversion
1262 * is manually set to an invalid value). */
1279 * is manually set to an invalid value). */
1263 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
1280 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
1264 PyErr_Format(PyExc_ImportError,
1281 PyErr_Format(PyExc_ImportError,
1265 "%s: The Mercurial extension "
1282 "%s: The Mercurial extension "
1266 "modules were compiled with Python " PY_VERSION
1283 "modules were compiled with Python " PY_VERSION
1267 ", but "
1284 ", but "
1268 "Mercurial is currently using Python with "
1285 "Mercurial is currently using Python with "
1269 "sys.hexversion=%ld: "
1286 "sys.hexversion=%ld: "
1270 "Python %s\n at: %s",
1287 "Python %s\n at: %s",
1271 versionerrortext, hexversion, Py_GetVersion(),
1288 versionerrortext, hexversion, Py_GetVersion(),
1272 Py_GetProgramFullPath());
1289 Py_GetProgramFullPath());
1273 return -1;
1290 return -1;
1274 }
1291 }
1275 return 0;
1292 return 0;
1276 }
1293 }
1277
1294
1278 #ifdef IS_PY3K
1295 #ifdef IS_PY3K
1279 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1296 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1280 parsers_doc, -1, methods};
1297 parsers_doc, -1, methods};
1281
1298
1282 PyMODINIT_FUNC PyInit_parsers(void)
1299 PyMODINIT_FUNC PyInit_parsers(void)
1283 {
1300 {
1284 PyObject *mod;
1301 PyObject *mod;
1285
1302
1286 if (check_python_version() == -1)
1303 if (check_python_version() == -1)
1287 return NULL;
1304 return NULL;
1288 mod = PyModule_Create(&parsers_module);
1305 mod = PyModule_Create(&parsers_module);
1289 module_init(mod);
1306 module_init(mod);
1290 return mod;
1307 return mod;
1291 }
1308 }
1292 #else
1309 #else
1293 PyMODINIT_FUNC initparsers(void)
1310 PyMODINIT_FUNC initparsers(void)
1294 {
1311 {
1295 PyObject *mod;
1312 PyObject *mod;
1296
1313
1297 if (check_python_version() == -1) {
1314 if (check_python_version() == -1) {
1298 return;
1315 return;
1299 }
1316 }
1300 mod = Py_InitModule3("parsers", methods, parsers_doc);
1317 mod = Py_InitModule3("parsers", methods, parsers_doc);
1301 module_init(mod);
1318 module_init(mod);
1302 }
1319 }
1303 #endif
1320 #endif
@@ -1,1615 +1,1614 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self._normallookup(f)
406 self._normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self._normallookup(filename)
479 self._normallookup(filename)
480 return True
480 return True
481 # XXX This is probably overkill for more case, but we need this to
481 # XXX This is probably overkill for more case, but we need this to
482 # fully replace the `normallookup` call with `set_tracked` one.
482 # fully replace the `normallookup` call with `set_tracked` one.
483 # Consider smoothing this in the future.
483 # Consider smoothing this in the future.
484 self.set_possibly_dirty(filename)
484 self.set_possibly_dirty(filename)
485 return False
485 return False
486
486
487 @requires_no_parents_change
487 @requires_no_parents_change
488 def set_untracked(self, filename):
488 def set_untracked(self, filename):
489 """a "public" method for generic code to mark a file as untracked
489 """a "public" method for generic code to mark a file as untracked
490
490
491 This function is to be called outside of "update/merge" case. For
491 This function is to be called outside of "update/merge" case. For
492 example by a command like `hg remove X`.
492 example by a command like `hg remove X`.
493
493
494 return True the file was previously tracked, False otherwise.
494 return True the file was previously tracked, False otherwise.
495 """
495 """
496 ret = self._map.set_untracked(filename)
496 ret = self._map.set_untracked(filename)
497 if ret:
497 if ret:
498 self._dirty = True
498 self._dirty = True
499 self._updatedfiles.add(filename)
499 self._updatedfiles.add(filename)
500 return ret
500 return ret
501
501
502 @requires_no_parents_change
502 @requires_no_parents_change
503 def set_clean(self, filename, parentfiledata=None):
503 def set_clean(self, filename, parentfiledata=None):
504 """record that the current state of the file on disk is known to be clean"""
504 """record that the current state of the file on disk is known to be clean"""
505 self._dirty = True
505 self._dirty = True
506 self._updatedfiles.add(filename)
506 self._updatedfiles.add(filename)
507 if parentfiledata:
507 if parentfiledata:
508 (mode, size, mtime) = parentfiledata
508 (mode, size, mtime) = parentfiledata
509 else:
509 else:
510 (mode, size, mtime) = self._get_filedata(filename)
510 (mode, size, mtime) = self._get_filedata(filename)
511 self._addpath(filename, mode=mode, size=size, mtime=mtime)
511 if not self._map[filename].tracked:
512 self._map.copymap.pop(filename, None)
512 self._check_new_tracked_filename(filename)
513 if filename in self._map.nonnormalset:
513 self._map.set_clean(filename, mode, size, mtime)
514 self._map.nonnormalset.remove(filename)
515 if mtime > self._lastnormaltime:
514 if mtime > self._lastnormaltime:
516 # Remember the most recent modification timeslot for status(),
515 # Remember the most recent modification timeslot for status(),
517 # to make sure we won't miss future size-preserving file content
516 # to make sure we won't miss future size-preserving file content
518 # modifications that happen within the same timeslot.
517 # modifications that happen within the same timeslot.
519 self._lastnormaltime = mtime
518 self._lastnormaltime = mtime
520
519
521 @requires_no_parents_change
520 @requires_no_parents_change
522 def set_possibly_dirty(self, filename):
521 def set_possibly_dirty(self, filename):
523 """record that the current state of the file on disk is unknown"""
522 """record that the current state of the file on disk is unknown"""
524 self._dirty = True
523 self._dirty = True
525 self._updatedfiles.add(filename)
524 self._updatedfiles.add(filename)
526 self._map.set_possibly_dirty(filename)
525 self._map.set_possibly_dirty(filename)
527
526
528 @requires_parents_change
527 @requires_parents_change
529 def update_file_p1(
528 def update_file_p1(
530 self,
529 self,
531 filename,
530 filename,
532 p1_tracked,
531 p1_tracked,
533 ):
532 ):
534 """Set a file as tracked in the parent (or not)
533 """Set a file as tracked in the parent (or not)
535
534
536 This is to be called when adjust the dirstate to a new parent after an history
535 This is to be called when adjust the dirstate to a new parent after an history
537 rewriting operation.
536 rewriting operation.
538
537
539 It should not be called during a merge (p2 != nullid) and only within
538 It should not be called during a merge (p2 != nullid) and only within
540 a `with dirstate.parentchange():` context.
539 a `with dirstate.parentchange():` context.
541 """
540 """
542 if self.in_merge:
541 if self.in_merge:
543 msg = b'update_file_reference should not be called when merging'
542 msg = b'update_file_reference should not be called when merging'
544 raise error.ProgrammingError(msg)
543 raise error.ProgrammingError(msg)
545 entry = self._map.get(filename)
544 entry = self._map.get(filename)
546 if entry is None:
545 if entry is None:
547 wc_tracked = False
546 wc_tracked = False
548 else:
547 else:
549 wc_tracked = entry.tracked
548 wc_tracked = entry.tracked
550 possibly_dirty = False
549 possibly_dirty = False
551 if p1_tracked and wc_tracked:
550 if p1_tracked and wc_tracked:
552 # the underlying reference might have changed, we will have to
551 # the underlying reference might have changed, we will have to
553 # check it.
552 # check it.
554 possibly_dirty = True
553 possibly_dirty = True
555 elif not (p1_tracked or wc_tracked):
554 elif not (p1_tracked or wc_tracked):
556 # the file is no longer relevant to anyone
555 # the file is no longer relevant to anyone
557 self._drop(filename)
556 self._drop(filename)
558 elif (not p1_tracked) and wc_tracked:
557 elif (not p1_tracked) and wc_tracked:
559 if entry is not None and entry.added:
558 if entry is not None and entry.added:
560 return # avoid dropping copy information (maybe?)
559 return # avoid dropping copy information (maybe?)
561 elif p1_tracked and not wc_tracked:
560 elif p1_tracked and not wc_tracked:
562 pass
561 pass
563 else:
562 else:
564 assert False, 'unreachable'
563 assert False, 'unreachable'
565
564
566 # this mean we are doing call for file we do not really care about the
565 # this mean we are doing call for file we do not really care about the
567 # data (eg: added or removed), however this should be a minor overhead
566 # data (eg: added or removed), however this should be a minor overhead
568 # compared to the overall update process calling this.
567 # compared to the overall update process calling this.
569 parentfiledata = None
568 parentfiledata = None
570 if wc_tracked:
569 if wc_tracked:
571 parentfiledata = self._get_filedata(filename)
570 parentfiledata = self._get_filedata(filename)
572
571
573 self._updatedfiles.add(filename)
572 self._updatedfiles.add(filename)
574 self._map.reset_state(
573 self._map.reset_state(
575 filename,
574 filename,
576 wc_tracked,
575 wc_tracked,
577 p1_tracked,
576 p1_tracked,
578 possibly_dirty=possibly_dirty,
577 possibly_dirty=possibly_dirty,
579 parentfiledata=parentfiledata,
578 parentfiledata=parentfiledata,
580 )
579 )
581 if (
580 if (
582 parentfiledata is not None
581 parentfiledata is not None
583 and parentfiledata[2] > self._lastnormaltime
582 and parentfiledata[2] > self._lastnormaltime
584 ):
583 ):
585 # Remember the most recent modification timeslot for status(),
584 # Remember the most recent modification timeslot for status(),
586 # to make sure we won't miss future size-preserving file content
585 # to make sure we won't miss future size-preserving file content
587 # modifications that happen within the same timeslot.
586 # modifications that happen within the same timeslot.
588 self._lastnormaltime = parentfiledata[2]
587 self._lastnormaltime = parentfiledata[2]
589
588
590 @requires_parents_change
589 @requires_parents_change
591 def update_file(
590 def update_file(
592 self,
591 self,
593 filename,
592 filename,
594 wc_tracked,
593 wc_tracked,
595 p1_tracked,
594 p1_tracked,
596 p2_tracked=False,
595 p2_tracked=False,
597 merged=False,
596 merged=False,
598 clean_p1=False,
597 clean_p1=False,
599 clean_p2=False,
598 clean_p2=False,
600 possibly_dirty=False,
599 possibly_dirty=False,
601 parentfiledata=None,
600 parentfiledata=None,
602 ):
601 ):
603 """update the information about a file in the dirstate
602 """update the information about a file in the dirstate
604
603
605 This is to be called when the direstates parent changes to keep track
604 This is to be called when the direstates parent changes to keep track
606 of what is the file situation in regards to the working copy and its parent.
605 of what is the file situation in regards to the working copy and its parent.
607
606
608 This function must be called within a `dirstate.parentchange` context.
607 This function must be called within a `dirstate.parentchange` context.
609
608
610 note: the API is at an early stage and we might need to adjust it
609 note: the API is at an early stage and we might need to adjust it
611 depending of what information ends up being relevant and useful to
610 depending of what information ends up being relevant and useful to
612 other processing.
611 other processing.
613 """
612 """
614 if merged and (clean_p1 or clean_p2):
613 if merged and (clean_p1 or clean_p2):
615 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
614 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
616 raise error.ProgrammingError(msg)
615 raise error.ProgrammingError(msg)
617
616
618 # note: I do not think we need to double check name clash here since we
617 # note: I do not think we need to double check name clash here since we
619 # are in a update/merge case that should already have taken care of
618 # are in a update/merge case that should already have taken care of
620 # this. The test agrees
619 # this. The test agrees
621
620
622 self._dirty = True
621 self._dirty = True
623 self._updatedfiles.add(filename)
622 self._updatedfiles.add(filename)
624
623
625 need_parent_file_data = (
624 need_parent_file_data = (
626 not (possibly_dirty or clean_p2 or merged)
625 not (possibly_dirty or clean_p2 or merged)
627 and wc_tracked
626 and wc_tracked
628 and p1_tracked
627 and p1_tracked
629 )
628 )
630
629
631 # this mean we are doing call for file we do not really care about the
630 # this mean we are doing call for file we do not really care about the
632 # data (eg: added or removed), however this should be a minor overhead
631 # data (eg: added or removed), however this should be a minor overhead
633 # compared to the overall update process calling this.
632 # compared to the overall update process calling this.
634 if need_parent_file_data:
633 if need_parent_file_data:
635 if parentfiledata is None:
634 if parentfiledata is None:
636 parentfiledata = self._get_filedata(filename)
635 parentfiledata = self._get_filedata(filename)
637 mtime = parentfiledata[2]
636 mtime = parentfiledata[2]
638
637
639 if mtime > self._lastnormaltime:
638 if mtime > self._lastnormaltime:
640 # Remember the most recent modification timeslot for
639 # Remember the most recent modification timeslot for
641 # status(), to make sure we won't miss future
640 # status(), to make sure we won't miss future
642 # size-preserving file content modifications that happen
641 # size-preserving file content modifications that happen
643 # within the same timeslot.
642 # within the same timeslot.
644 self._lastnormaltime = mtime
643 self._lastnormaltime = mtime
645
644
646 self._map.reset_state(
645 self._map.reset_state(
647 filename,
646 filename,
648 wc_tracked,
647 wc_tracked,
649 p1_tracked,
648 p1_tracked,
650 p2_tracked=p2_tracked,
649 p2_tracked=p2_tracked,
651 merged=merged,
650 merged=merged,
652 clean_p1=clean_p1,
651 clean_p1=clean_p1,
653 clean_p2=clean_p2,
652 clean_p2=clean_p2,
654 possibly_dirty=possibly_dirty,
653 possibly_dirty=possibly_dirty,
655 parentfiledata=parentfiledata,
654 parentfiledata=parentfiledata,
656 )
655 )
657 if (
656 if (
658 parentfiledata is not None
657 parentfiledata is not None
659 and parentfiledata[2] > self._lastnormaltime
658 and parentfiledata[2] > self._lastnormaltime
660 ):
659 ):
661 # Remember the most recent modification timeslot for status(),
660 # Remember the most recent modification timeslot for status(),
662 # to make sure we won't miss future size-preserving file content
661 # to make sure we won't miss future size-preserving file content
663 # modifications that happen within the same timeslot.
662 # modifications that happen within the same timeslot.
664 self._lastnormaltime = parentfiledata[2]
663 self._lastnormaltime = parentfiledata[2]
665
664
666 def _addpath(
665 def _addpath(
667 self,
666 self,
668 f,
667 f,
669 mode=0,
668 mode=0,
670 size=None,
669 size=None,
671 mtime=None,
670 mtime=None,
672 added=False,
671 added=False,
673 merged=False,
672 merged=False,
674 from_p2=False,
673 from_p2=False,
675 possibly_dirty=False,
674 possibly_dirty=False,
676 ):
675 ):
677 entry = self._map.get(f)
676 entry = self._map.get(f)
678 if added or entry is not None and entry.removed:
677 if added or entry is not None and entry.removed:
679 self._check_new_tracked_filename(f)
678 self._check_new_tracked_filename(f)
680 self._dirty = True
679 self._dirty = True
681 self._updatedfiles.add(f)
680 self._updatedfiles.add(f)
682 self._map.addfile(
681 self._map.addfile(
683 f,
682 f,
684 mode=mode,
683 mode=mode,
685 size=size,
684 size=size,
686 mtime=mtime,
685 mtime=mtime,
687 added=added,
686 added=added,
688 merged=merged,
687 merged=merged,
689 from_p2=from_p2,
688 from_p2=from_p2,
690 possibly_dirty=possibly_dirty,
689 possibly_dirty=possibly_dirty,
691 )
690 )
692
691
693 def _check_new_tracked_filename(self, filename):
692 def _check_new_tracked_filename(self, filename):
694 scmutil.checkfilename(filename)
693 scmutil.checkfilename(filename)
695 if self._map.hastrackeddir(filename):
694 if self._map.hastrackeddir(filename):
696 msg = _(b'directory %r already in dirstate')
695 msg = _(b'directory %r already in dirstate')
697 msg %= pycompat.bytestr(filename)
696 msg %= pycompat.bytestr(filename)
698 raise error.Abort(msg)
697 raise error.Abort(msg)
699 # shadows
698 # shadows
700 for d in pathutil.finddirs(filename):
699 for d in pathutil.finddirs(filename):
701 if self._map.hastrackeddir(d):
700 if self._map.hastrackeddir(d):
702 break
701 break
703 entry = self._map.get(d)
702 entry = self._map.get(d)
704 if entry is not None and not entry.removed:
703 if entry is not None and not entry.removed:
705 msg = _(b'file %r in dirstate clashes with %r')
704 msg = _(b'file %r in dirstate clashes with %r')
706 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
705 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
707 raise error.Abort(msg)
706 raise error.Abort(msg)
708
707
709 def _get_filedata(self, filename):
708 def _get_filedata(self, filename):
710 """returns"""
709 """returns"""
711 s = os.lstat(self._join(filename))
710 s = os.lstat(self._join(filename))
712 mode = s.st_mode
711 mode = s.st_mode
713 size = s.st_size
712 size = s.st_size
714 mtime = s[stat.ST_MTIME]
713 mtime = s[stat.ST_MTIME]
715 return (mode, size, mtime)
714 return (mode, size, mtime)
716
715
717 def _normallookup(self, f):
716 def _normallookup(self, f):
718 '''Mark a file normal, but possibly dirty.'''
717 '''Mark a file normal, but possibly dirty.'''
719 if self.in_merge:
718 if self.in_merge:
720 # if there is a merge going on and the file was either
719 # if there is a merge going on and the file was either
721 # "merged" or coming from other parent (-2) before
720 # "merged" or coming from other parent (-2) before
722 # being removed, restore that state.
721 # being removed, restore that state.
723 entry = self._map.get(f)
722 entry = self._map.get(f)
724 if entry is not None:
723 if entry is not None:
725 # XXX this should probably be dealt with a a lower level
724 # XXX this should probably be dealt with a a lower level
726 # (see `merged_removed` and `from_p2_removed`)
725 # (see `merged_removed` and `from_p2_removed`)
727 if entry.merged_removed or entry.from_p2_removed:
726 if entry.merged_removed or entry.from_p2_removed:
728 source = self._map.copymap.get(f)
727 source = self._map.copymap.get(f)
729 self._addpath(f, from_p2=True)
728 self._addpath(f, from_p2=True)
730 self._map.copymap.pop(f, None)
729 self._map.copymap.pop(f, None)
731 if source is not None:
730 if source is not None:
732 self.copy(source, f)
731 self.copy(source, f)
733 return
732 return
734 elif entry.merged or entry.from_p2:
733 elif entry.merged or entry.from_p2:
735 return
734 return
736 self._addpath(f, possibly_dirty=True)
735 self._addpath(f, possibly_dirty=True)
737 self._map.copymap.pop(f, None)
736 self._map.copymap.pop(f, None)
738
737
739 def _add(self, filename):
738 def _add(self, filename):
740 """internal function to mark a file as added"""
739 """internal function to mark a file as added"""
741 self._addpath(filename, added=True)
740 self._addpath(filename, added=True)
742 self._map.copymap.pop(filename, None)
741 self._map.copymap.pop(filename, None)
743
742
744 def _drop(self, filename):
743 def _drop(self, filename):
745 """internal function to drop a file from the dirstate"""
744 """internal function to drop a file from the dirstate"""
746 if self._map.dropfile(filename):
745 if self._map.dropfile(filename):
747 self._dirty = True
746 self._dirty = True
748 self._updatedfiles.add(filename)
747 self._updatedfiles.add(filename)
749
748
750 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
749 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
751 if exists is None:
750 if exists is None:
752 exists = os.path.lexists(os.path.join(self._root, path))
751 exists = os.path.lexists(os.path.join(self._root, path))
753 if not exists:
752 if not exists:
754 # Maybe a path component exists
753 # Maybe a path component exists
755 if not ignoremissing and b'/' in path:
754 if not ignoremissing and b'/' in path:
756 d, f = path.rsplit(b'/', 1)
755 d, f = path.rsplit(b'/', 1)
757 d = self._normalize(d, False, ignoremissing, None)
756 d = self._normalize(d, False, ignoremissing, None)
758 folded = d + b"/" + f
757 folded = d + b"/" + f
759 else:
758 else:
760 # No path components, preserve original case
759 # No path components, preserve original case
761 folded = path
760 folded = path
762 else:
761 else:
763 # recursively normalize leading directory components
762 # recursively normalize leading directory components
764 # against dirstate
763 # against dirstate
765 if b'/' in normed:
764 if b'/' in normed:
766 d, f = normed.rsplit(b'/', 1)
765 d, f = normed.rsplit(b'/', 1)
767 d = self._normalize(d, False, ignoremissing, True)
766 d = self._normalize(d, False, ignoremissing, True)
768 r = self._root + b"/" + d
767 r = self._root + b"/" + d
769 folded = d + b"/" + util.fspath(f, r)
768 folded = d + b"/" + util.fspath(f, r)
770 else:
769 else:
771 folded = util.fspath(normed, self._root)
770 folded = util.fspath(normed, self._root)
772 storemap[normed] = folded
771 storemap[normed] = folded
773
772
774 return folded
773 return folded
775
774
776 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
775 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
777 normed = util.normcase(path)
776 normed = util.normcase(path)
778 folded = self._map.filefoldmap.get(normed, None)
777 folded = self._map.filefoldmap.get(normed, None)
779 if folded is None:
778 if folded is None:
780 if isknown:
779 if isknown:
781 folded = path
780 folded = path
782 else:
781 else:
783 folded = self._discoverpath(
782 folded = self._discoverpath(
784 path, normed, ignoremissing, exists, self._map.filefoldmap
783 path, normed, ignoremissing, exists, self._map.filefoldmap
785 )
784 )
786 return folded
785 return folded
787
786
788 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
787 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
789 normed = util.normcase(path)
788 normed = util.normcase(path)
790 folded = self._map.filefoldmap.get(normed, None)
789 folded = self._map.filefoldmap.get(normed, None)
791 if folded is None:
790 if folded is None:
792 folded = self._map.dirfoldmap.get(normed, None)
791 folded = self._map.dirfoldmap.get(normed, None)
793 if folded is None:
792 if folded is None:
794 if isknown:
793 if isknown:
795 folded = path
794 folded = path
796 else:
795 else:
797 # store discovered result in dirfoldmap so that future
796 # store discovered result in dirfoldmap so that future
798 # normalizefile calls don't start matching directories
797 # normalizefile calls don't start matching directories
799 folded = self._discoverpath(
798 folded = self._discoverpath(
800 path, normed, ignoremissing, exists, self._map.dirfoldmap
799 path, normed, ignoremissing, exists, self._map.dirfoldmap
801 )
800 )
802 return folded
801 return folded
803
802
804 def normalize(self, path, isknown=False, ignoremissing=False):
803 def normalize(self, path, isknown=False, ignoremissing=False):
805 """
804 """
806 normalize the case of a pathname when on a casefolding filesystem
805 normalize the case of a pathname when on a casefolding filesystem
807
806
808 isknown specifies whether the filename came from walking the
807 isknown specifies whether the filename came from walking the
809 disk, to avoid extra filesystem access.
808 disk, to avoid extra filesystem access.
810
809
811 If ignoremissing is True, missing path are returned
810 If ignoremissing is True, missing path are returned
812 unchanged. Otherwise, we try harder to normalize possibly
811 unchanged. Otherwise, we try harder to normalize possibly
813 existing path components.
812 existing path components.
814
813
815 The normalized case is determined based on the following precedence:
814 The normalized case is determined based on the following precedence:
816
815
817 - version of name already stored in the dirstate
816 - version of name already stored in the dirstate
818 - version of name stored on disk
817 - version of name stored on disk
819 - version provided via command arguments
818 - version provided via command arguments
820 """
819 """
821
820
822 if self._checkcase:
821 if self._checkcase:
823 return self._normalize(path, isknown, ignoremissing)
822 return self._normalize(path, isknown, ignoremissing)
824 return path
823 return path
825
824
826 def clear(self):
825 def clear(self):
827 self._map.clear()
826 self._map.clear()
828 self._lastnormaltime = 0
827 self._lastnormaltime = 0
829 self._updatedfiles.clear()
828 self._updatedfiles.clear()
830 self._dirty = True
829 self._dirty = True
831
830
832 def rebuild(self, parent, allfiles, changedfiles=None):
831 def rebuild(self, parent, allfiles, changedfiles=None):
833 if changedfiles is None:
832 if changedfiles is None:
834 # Rebuild entire dirstate
833 # Rebuild entire dirstate
835 to_lookup = allfiles
834 to_lookup = allfiles
836 to_drop = []
835 to_drop = []
837 lastnormaltime = self._lastnormaltime
836 lastnormaltime = self._lastnormaltime
838 self.clear()
837 self.clear()
839 self._lastnormaltime = lastnormaltime
838 self._lastnormaltime = lastnormaltime
840 elif len(changedfiles) < 10:
839 elif len(changedfiles) < 10:
841 # Avoid turning allfiles into a set, which can be expensive if it's
840 # Avoid turning allfiles into a set, which can be expensive if it's
842 # large.
841 # large.
843 to_lookup = []
842 to_lookup = []
844 to_drop = []
843 to_drop = []
845 for f in changedfiles:
844 for f in changedfiles:
846 if f in allfiles:
845 if f in allfiles:
847 to_lookup.append(f)
846 to_lookup.append(f)
848 else:
847 else:
849 to_drop.append(f)
848 to_drop.append(f)
850 else:
849 else:
851 changedfilesset = set(changedfiles)
850 changedfilesset = set(changedfiles)
852 to_lookup = changedfilesset & set(allfiles)
851 to_lookup = changedfilesset & set(allfiles)
853 to_drop = changedfilesset - to_lookup
852 to_drop = changedfilesset - to_lookup
854
853
855 if self._origpl is None:
854 if self._origpl is None:
856 self._origpl = self._pl
855 self._origpl = self._pl
857 self._map.setparents(parent, self._nodeconstants.nullid)
856 self._map.setparents(parent, self._nodeconstants.nullid)
858
857
859 for f in to_lookup:
858 for f in to_lookup:
860 self._normallookup(f)
859 self._normallookup(f)
861 for f in to_drop:
860 for f in to_drop:
862 self._drop(f)
861 self._drop(f)
863
862
864 self._dirty = True
863 self._dirty = True
865
864
866 def identity(self):
865 def identity(self):
867 """Return identity of dirstate itself to detect changing in storage
866 """Return identity of dirstate itself to detect changing in storage
868
867
869 If identity of previous dirstate is equal to this, writing
868 If identity of previous dirstate is equal to this, writing
870 changes based on the former dirstate out can keep consistency.
869 changes based on the former dirstate out can keep consistency.
871 """
870 """
872 return self._map.identity
871 return self._map.identity
873
872
874 def write(self, tr):
873 def write(self, tr):
875 if not self._dirty:
874 if not self._dirty:
876 return
875 return
877
876
878 filename = self._filename
877 filename = self._filename
879 if tr:
878 if tr:
880 # 'dirstate.write()' is not only for writing in-memory
879 # 'dirstate.write()' is not only for writing in-memory
881 # changes out, but also for dropping ambiguous timestamp.
880 # changes out, but also for dropping ambiguous timestamp.
882 # delayed writing re-raise "ambiguous timestamp issue".
881 # delayed writing re-raise "ambiguous timestamp issue".
883 # See also the wiki page below for detail:
882 # See also the wiki page below for detail:
884 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
883 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
885
884
886 # emulate dropping timestamp in 'parsers.pack_dirstate'
885 # emulate dropping timestamp in 'parsers.pack_dirstate'
887 now = _getfsnow(self._opener)
886 now = _getfsnow(self._opener)
888 self._map.clearambiguoustimes(self._updatedfiles, now)
887 self._map.clearambiguoustimes(self._updatedfiles, now)
889
888
890 # emulate that all 'dirstate.normal' results are written out
889 # emulate that all 'dirstate.normal' results are written out
891 self._lastnormaltime = 0
890 self._lastnormaltime = 0
892 self._updatedfiles.clear()
891 self._updatedfiles.clear()
893
892
894 # delay writing in-memory changes out
893 # delay writing in-memory changes out
895 tr.addfilegenerator(
894 tr.addfilegenerator(
896 b'dirstate',
895 b'dirstate',
897 (self._filename,),
896 (self._filename,),
898 lambda f: self._writedirstate(tr, f),
897 lambda f: self._writedirstate(tr, f),
899 location=b'plain',
898 location=b'plain',
900 )
899 )
901 return
900 return
902
901
903 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
902 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
904 self._writedirstate(tr, st)
903 self._writedirstate(tr, st)
905
904
906 def addparentchangecallback(self, category, callback):
905 def addparentchangecallback(self, category, callback):
907 """add a callback to be called when the wd parents are changed
906 """add a callback to be called when the wd parents are changed
908
907
909 Callback will be called with the following arguments:
908 Callback will be called with the following arguments:
910 dirstate, (oldp1, oldp2), (newp1, newp2)
909 dirstate, (oldp1, oldp2), (newp1, newp2)
911
910
912 Category is a unique identifier to allow overwriting an old callback
911 Category is a unique identifier to allow overwriting an old callback
913 with a newer callback.
912 with a newer callback.
914 """
913 """
915 self._plchangecallbacks[category] = callback
914 self._plchangecallbacks[category] = callback
916
915
917 def _writedirstate(self, tr, st):
916 def _writedirstate(self, tr, st):
918 # notify callbacks about parents change
917 # notify callbacks about parents change
919 if self._origpl is not None and self._origpl != self._pl:
918 if self._origpl is not None and self._origpl != self._pl:
920 for c, callback in sorted(
919 for c, callback in sorted(
921 pycompat.iteritems(self._plchangecallbacks)
920 pycompat.iteritems(self._plchangecallbacks)
922 ):
921 ):
923 callback(self, self._origpl, self._pl)
922 callback(self, self._origpl, self._pl)
924 self._origpl = None
923 self._origpl = None
925 # use the modification time of the newly created temporary file as the
924 # use the modification time of the newly created temporary file as the
926 # filesystem's notion of 'now'
925 # filesystem's notion of 'now'
927 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
926 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
928
927
929 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
928 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
930 # timestamp of each entries in dirstate, because of 'now > mtime'
929 # timestamp of each entries in dirstate, because of 'now > mtime'
931 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
930 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
932 if delaywrite > 0:
931 if delaywrite > 0:
933 # do we have any files to delay for?
932 # do we have any files to delay for?
934 for f, e in pycompat.iteritems(self._map):
933 for f, e in pycompat.iteritems(self._map):
935 if e.need_delay(now):
934 if e.need_delay(now):
936 import time # to avoid useless import
935 import time # to avoid useless import
937
936
938 # rather than sleep n seconds, sleep until the next
937 # rather than sleep n seconds, sleep until the next
939 # multiple of n seconds
938 # multiple of n seconds
940 clock = time.time()
939 clock = time.time()
941 start = int(clock) - (int(clock) % delaywrite)
940 start = int(clock) - (int(clock) % delaywrite)
942 end = start + delaywrite
941 end = start + delaywrite
943 time.sleep(end - clock)
942 time.sleep(end - clock)
944 now = end # trust our estimate that the end is near now
943 now = end # trust our estimate that the end is near now
945 break
944 break
946
945
947 self._map.write(tr, st, now)
946 self._map.write(tr, st, now)
948 self._lastnormaltime = 0
947 self._lastnormaltime = 0
949 self._dirty = False
948 self._dirty = False
950
949
951 def _dirignore(self, f):
950 def _dirignore(self, f):
952 if self._ignore(f):
951 if self._ignore(f):
953 return True
952 return True
954 for p in pathutil.finddirs(f):
953 for p in pathutil.finddirs(f):
955 if self._ignore(p):
954 if self._ignore(p):
956 return True
955 return True
957 return False
956 return False
958
957
959 def _ignorefiles(self):
958 def _ignorefiles(self):
960 files = []
959 files = []
961 if os.path.exists(self._join(b'.hgignore')):
960 if os.path.exists(self._join(b'.hgignore')):
962 files.append(self._join(b'.hgignore'))
961 files.append(self._join(b'.hgignore'))
963 for name, path in self._ui.configitems(b"ui"):
962 for name, path in self._ui.configitems(b"ui"):
964 if name == b'ignore' or name.startswith(b'ignore.'):
963 if name == b'ignore' or name.startswith(b'ignore.'):
965 # we need to use os.path.join here rather than self._join
964 # we need to use os.path.join here rather than self._join
966 # because path is arbitrary and user-specified
965 # because path is arbitrary and user-specified
967 files.append(os.path.join(self._rootdir, util.expandpath(path)))
966 files.append(os.path.join(self._rootdir, util.expandpath(path)))
968 return files
967 return files
969
968
970 def _ignorefileandline(self, f):
969 def _ignorefileandline(self, f):
971 files = collections.deque(self._ignorefiles())
970 files = collections.deque(self._ignorefiles())
972 visited = set()
971 visited = set()
973 while files:
972 while files:
974 i = files.popleft()
973 i = files.popleft()
975 patterns = matchmod.readpatternfile(
974 patterns = matchmod.readpatternfile(
976 i, self._ui.warn, sourceinfo=True
975 i, self._ui.warn, sourceinfo=True
977 )
976 )
978 for pattern, lineno, line in patterns:
977 for pattern, lineno, line in patterns:
979 kind, p = matchmod._patsplit(pattern, b'glob')
978 kind, p = matchmod._patsplit(pattern, b'glob')
980 if kind == b"subinclude":
979 if kind == b"subinclude":
981 if p not in visited:
980 if p not in visited:
982 files.append(p)
981 files.append(p)
983 continue
982 continue
984 m = matchmod.match(
983 m = matchmod.match(
985 self._root, b'', [], [pattern], warn=self._ui.warn
984 self._root, b'', [], [pattern], warn=self._ui.warn
986 )
985 )
987 if m(f):
986 if m(f):
988 return (i, lineno, line)
987 return (i, lineno, line)
989 visited.add(i)
988 visited.add(i)
990 return (None, -1, b"")
989 return (None, -1, b"")
991
990
992 def _walkexplicit(self, match, subrepos):
991 def _walkexplicit(self, match, subrepos):
993 """Get stat data about the files explicitly specified by match.
992 """Get stat data about the files explicitly specified by match.
994
993
995 Return a triple (results, dirsfound, dirsnotfound).
994 Return a triple (results, dirsfound, dirsnotfound).
996 - results is a mapping from filename to stat result. It also contains
995 - results is a mapping from filename to stat result. It also contains
997 listings mapping subrepos and .hg to None.
996 listings mapping subrepos and .hg to None.
998 - dirsfound is a list of files found to be directories.
997 - dirsfound is a list of files found to be directories.
999 - dirsnotfound is a list of files that the dirstate thinks are
998 - dirsnotfound is a list of files that the dirstate thinks are
1000 directories and that were not found."""
999 directories and that were not found."""
1001
1000
1002 def badtype(mode):
1001 def badtype(mode):
1003 kind = _(b'unknown')
1002 kind = _(b'unknown')
1004 if stat.S_ISCHR(mode):
1003 if stat.S_ISCHR(mode):
1005 kind = _(b'character device')
1004 kind = _(b'character device')
1006 elif stat.S_ISBLK(mode):
1005 elif stat.S_ISBLK(mode):
1007 kind = _(b'block device')
1006 kind = _(b'block device')
1008 elif stat.S_ISFIFO(mode):
1007 elif stat.S_ISFIFO(mode):
1009 kind = _(b'fifo')
1008 kind = _(b'fifo')
1010 elif stat.S_ISSOCK(mode):
1009 elif stat.S_ISSOCK(mode):
1011 kind = _(b'socket')
1010 kind = _(b'socket')
1012 elif stat.S_ISDIR(mode):
1011 elif stat.S_ISDIR(mode):
1013 kind = _(b'directory')
1012 kind = _(b'directory')
1014 return _(b'unsupported file type (type is %s)') % kind
1013 return _(b'unsupported file type (type is %s)') % kind
1015
1014
1016 badfn = match.bad
1015 badfn = match.bad
1017 dmap = self._map
1016 dmap = self._map
1018 lstat = os.lstat
1017 lstat = os.lstat
1019 getkind = stat.S_IFMT
1018 getkind = stat.S_IFMT
1020 dirkind = stat.S_IFDIR
1019 dirkind = stat.S_IFDIR
1021 regkind = stat.S_IFREG
1020 regkind = stat.S_IFREG
1022 lnkkind = stat.S_IFLNK
1021 lnkkind = stat.S_IFLNK
1023 join = self._join
1022 join = self._join
1024 dirsfound = []
1023 dirsfound = []
1025 foundadd = dirsfound.append
1024 foundadd = dirsfound.append
1026 dirsnotfound = []
1025 dirsnotfound = []
1027 notfoundadd = dirsnotfound.append
1026 notfoundadd = dirsnotfound.append
1028
1027
1029 if not match.isexact() and self._checkcase:
1028 if not match.isexact() and self._checkcase:
1030 normalize = self._normalize
1029 normalize = self._normalize
1031 else:
1030 else:
1032 normalize = None
1031 normalize = None
1033
1032
1034 files = sorted(match.files())
1033 files = sorted(match.files())
1035 subrepos.sort()
1034 subrepos.sort()
1036 i, j = 0, 0
1035 i, j = 0, 0
1037 while i < len(files) and j < len(subrepos):
1036 while i < len(files) and j < len(subrepos):
1038 subpath = subrepos[j] + b"/"
1037 subpath = subrepos[j] + b"/"
1039 if files[i] < subpath:
1038 if files[i] < subpath:
1040 i += 1
1039 i += 1
1041 continue
1040 continue
1042 while i < len(files) and files[i].startswith(subpath):
1041 while i < len(files) and files[i].startswith(subpath):
1043 del files[i]
1042 del files[i]
1044 j += 1
1043 j += 1
1045
1044
1046 if not files or b'' in files:
1045 if not files or b'' in files:
1047 files = [b'']
1046 files = [b'']
1048 # constructing the foldmap is expensive, so don't do it for the
1047 # constructing the foldmap is expensive, so don't do it for the
1049 # common case where files is ['']
1048 # common case where files is ['']
1050 normalize = None
1049 normalize = None
1051 results = dict.fromkeys(subrepos)
1050 results = dict.fromkeys(subrepos)
1052 results[b'.hg'] = None
1051 results[b'.hg'] = None
1053
1052
1054 for ff in files:
1053 for ff in files:
1055 if normalize:
1054 if normalize:
1056 nf = normalize(ff, False, True)
1055 nf = normalize(ff, False, True)
1057 else:
1056 else:
1058 nf = ff
1057 nf = ff
1059 if nf in results:
1058 if nf in results:
1060 continue
1059 continue
1061
1060
1062 try:
1061 try:
1063 st = lstat(join(nf))
1062 st = lstat(join(nf))
1064 kind = getkind(st.st_mode)
1063 kind = getkind(st.st_mode)
1065 if kind == dirkind:
1064 if kind == dirkind:
1066 if nf in dmap:
1065 if nf in dmap:
1067 # file replaced by dir on disk but still in dirstate
1066 # file replaced by dir on disk but still in dirstate
1068 results[nf] = None
1067 results[nf] = None
1069 foundadd((nf, ff))
1068 foundadd((nf, ff))
1070 elif kind == regkind or kind == lnkkind:
1069 elif kind == regkind or kind == lnkkind:
1071 results[nf] = st
1070 results[nf] = st
1072 else:
1071 else:
1073 badfn(ff, badtype(kind))
1072 badfn(ff, badtype(kind))
1074 if nf in dmap:
1073 if nf in dmap:
1075 results[nf] = None
1074 results[nf] = None
1076 except OSError as inst: # nf not found on disk - it is dirstate only
1075 except OSError as inst: # nf not found on disk - it is dirstate only
1077 if nf in dmap: # does it exactly match a missing file?
1076 if nf in dmap: # does it exactly match a missing file?
1078 results[nf] = None
1077 results[nf] = None
1079 else: # does it match a missing directory?
1078 else: # does it match a missing directory?
1080 if self._map.hasdir(nf):
1079 if self._map.hasdir(nf):
1081 notfoundadd(nf)
1080 notfoundadd(nf)
1082 else:
1081 else:
1083 badfn(ff, encoding.strtolocal(inst.strerror))
1082 badfn(ff, encoding.strtolocal(inst.strerror))
1084
1083
1085 # match.files() may contain explicitly-specified paths that shouldn't
1084 # match.files() may contain explicitly-specified paths that shouldn't
1086 # be taken; drop them from the list of files found. dirsfound/notfound
1085 # be taken; drop them from the list of files found. dirsfound/notfound
1087 # aren't filtered here because they will be tested later.
1086 # aren't filtered here because they will be tested later.
1088 if match.anypats():
1087 if match.anypats():
1089 for f in list(results):
1088 for f in list(results):
1090 if f == b'.hg' or f in subrepos:
1089 if f == b'.hg' or f in subrepos:
1091 # keep sentinel to disable further out-of-repo walks
1090 # keep sentinel to disable further out-of-repo walks
1092 continue
1091 continue
1093 if not match(f):
1092 if not match(f):
1094 del results[f]
1093 del results[f]
1095
1094
1096 # Case insensitive filesystems cannot rely on lstat() failing to detect
1095 # Case insensitive filesystems cannot rely on lstat() failing to detect
1097 # a case-only rename. Prune the stat object for any file that does not
1096 # a case-only rename. Prune the stat object for any file that does not
1098 # match the case in the filesystem, if there are multiple files that
1097 # match the case in the filesystem, if there are multiple files that
1099 # normalize to the same path.
1098 # normalize to the same path.
1100 if match.isexact() and self._checkcase:
1099 if match.isexact() and self._checkcase:
1101 normed = {}
1100 normed = {}
1102
1101
1103 for f, st in pycompat.iteritems(results):
1102 for f, st in pycompat.iteritems(results):
1104 if st is None:
1103 if st is None:
1105 continue
1104 continue
1106
1105
1107 nc = util.normcase(f)
1106 nc = util.normcase(f)
1108 paths = normed.get(nc)
1107 paths = normed.get(nc)
1109
1108
1110 if paths is None:
1109 if paths is None:
1111 paths = set()
1110 paths = set()
1112 normed[nc] = paths
1111 normed[nc] = paths
1113
1112
1114 paths.add(f)
1113 paths.add(f)
1115
1114
1116 for norm, paths in pycompat.iteritems(normed):
1115 for norm, paths in pycompat.iteritems(normed):
1117 if len(paths) > 1:
1116 if len(paths) > 1:
1118 for path in paths:
1117 for path in paths:
1119 folded = self._discoverpath(
1118 folded = self._discoverpath(
1120 path, norm, True, None, self._map.dirfoldmap
1119 path, norm, True, None, self._map.dirfoldmap
1121 )
1120 )
1122 if path != folded:
1121 if path != folded:
1123 results[path] = None
1122 results[path] = None
1124
1123
1125 return results, dirsfound, dirsnotfound
1124 return results, dirsfound, dirsnotfound
1126
1125
1127 def walk(self, match, subrepos, unknown, ignored, full=True):
1126 def walk(self, match, subrepos, unknown, ignored, full=True):
1128 """
1127 """
1129 Walk recursively through the directory tree, finding all files
1128 Walk recursively through the directory tree, finding all files
1130 matched by match.
1129 matched by match.
1131
1130
1132 If full is False, maybe skip some known-clean files.
1131 If full is False, maybe skip some known-clean files.
1133
1132
1134 Return a dict mapping filename to stat-like object (either
1133 Return a dict mapping filename to stat-like object (either
1135 mercurial.osutil.stat instance or return value of os.stat()).
1134 mercurial.osutil.stat instance or return value of os.stat()).
1136
1135
1137 """
1136 """
1138 # full is a flag that extensions that hook into walk can use -- this
1137 # full is a flag that extensions that hook into walk can use -- this
1139 # implementation doesn't use it at all. This satisfies the contract
1138 # implementation doesn't use it at all. This satisfies the contract
1140 # because we only guarantee a "maybe".
1139 # because we only guarantee a "maybe".
1141
1140
1142 if ignored:
1141 if ignored:
1143 ignore = util.never
1142 ignore = util.never
1144 dirignore = util.never
1143 dirignore = util.never
1145 elif unknown:
1144 elif unknown:
1146 ignore = self._ignore
1145 ignore = self._ignore
1147 dirignore = self._dirignore
1146 dirignore = self._dirignore
1148 else:
1147 else:
1149 # if not unknown and not ignored, drop dir recursion and step 2
1148 # if not unknown and not ignored, drop dir recursion and step 2
1150 ignore = util.always
1149 ignore = util.always
1151 dirignore = util.always
1150 dirignore = util.always
1152
1151
1153 matchfn = match.matchfn
1152 matchfn = match.matchfn
1154 matchalways = match.always()
1153 matchalways = match.always()
1155 matchtdir = match.traversedir
1154 matchtdir = match.traversedir
1156 dmap = self._map
1155 dmap = self._map
1157 listdir = util.listdir
1156 listdir = util.listdir
1158 lstat = os.lstat
1157 lstat = os.lstat
1159 dirkind = stat.S_IFDIR
1158 dirkind = stat.S_IFDIR
1160 regkind = stat.S_IFREG
1159 regkind = stat.S_IFREG
1161 lnkkind = stat.S_IFLNK
1160 lnkkind = stat.S_IFLNK
1162 join = self._join
1161 join = self._join
1163
1162
1164 exact = skipstep3 = False
1163 exact = skipstep3 = False
1165 if match.isexact(): # match.exact
1164 if match.isexact(): # match.exact
1166 exact = True
1165 exact = True
1167 dirignore = util.always # skip step 2
1166 dirignore = util.always # skip step 2
1168 elif match.prefix(): # match.match, no patterns
1167 elif match.prefix(): # match.match, no patterns
1169 skipstep3 = True
1168 skipstep3 = True
1170
1169
1171 if not exact and self._checkcase:
1170 if not exact and self._checkcase:
1172 normalize = self._normalize
1171 normalize = self._normalize
1173 normalizefile = self._normalizefile
1172 normalizefile = self._normalizefile
1174 skipstep3 = False
1173 skipstep3 = False
1175 else:
1174 else:
1176 normalize = self._normalize
1175 normalize = self._normalize
1177 normalizefile = None
1176 normalizefile = None
1178
1177
1179 # step 1: find all explicit files
1178 # step 1: find all explicit files
1180 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1179 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1181 if matchtdir:
1180 if matchtdir:
1182 for d in work:
1181 for d in work:
1183 matchtdir(d[0])
1182 matchtdir(d[0])
1184 for d in dirsnotfound:
1183 for d in dirsnotfound:
1185 matchtdir(d)
1184 matchtdir(d)
1186
1185
1187 skipstep3 = skipstep3 and not (work or dirsnotfound)
1186 skipstep3 = skipstep3 and not (work or dirsnotfound)
1188 work = [d for d in work if not dirignore(d[0])]
1187 work = [d for d in work if not dirignore(d[0])]
1189
1188
1190 # step 2: visit subdirectories
1189 # step 2: visit subdirectories
1191 def traverse(work, alreadynormed):
1190 def traverse(work, alreadynormed):
1192 wadd = work.append
1191 wadd = work.append
1193 while work:
1192 while work:
1194 tracing.counter('dirstate.walk work', len(work))
1193 tracing.counter('dirstate.walk work', len(work))
1195 nd = work.pop()
1194 nd = work.pop()
1196 visitentries = match.visitchildrenset(nd)
1195 visitentries = match.visitchildrenset(nd)
1197 if not visitentries:
1196 if not visitentries:
1198 continue
1197 continue
1199 if visitentries == b'this' or visitentries == b'all':
1198 if visitentries == b'this' or visitentries == b'all':
1200 visitentries = None
1199 visitentries = None
1201 skip = None
1200 skip = None
1202 if nd != b'':
1201 if nd != b'':
1203 skip = b'.hg'
1202 skip = b'.hg'
1204 try:
1203 try:
1205 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1204 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1206 entries = listdir(join(nd), stat=True, skip=skip)
1205 entries = listdir(join(nd), stat=True, skip=skip)
1207 except OSError as inst:
1206 except OSError as inst:
1208 if inst.errno in (errno.EACCES, errno.ENOENT):
1207 if inst.errno in (errno.EACCES, errno.ENOENT):
1209 match.bad(
1208 match.bad(
1210 self.pathto(nd), encoding.strtolocal(inst.strerror)
1209 self.pathto(nd), encoding.strtolocal(inst.strerror)
1211 )
1210 )
1212 continue
1211 continue
1213 raise
1212 raise
1214 for f, kind, st in entries:
1213 for f, kind, st in entries:
1215 # Some matchers may return files in the visitentries set,
1214 # Some matchers may return files in the visitentries set,
1216 # instead of 'this', if the matcher explicitly mentions them
1215 # instead of 'this', if the matcher explicitly mentions them
1217 # and is not an exactmatcher. This is acceptable; we do not
1216 # and is not an exactmatcher. This is acceptable; we do not
1218 # make any hard assumptions about file-or-directory below
1217 # make any hard assumptions about file-or-directory below
1219 # based on the presence of `f` in visitentries. If
1218 # based on the presence of `f` in visitentries. If
1220 # visitchildrenset returned a set, we can always skip the
1219 # visitchildrenset returned a set, we can always skip the
1221 # entries *not* in the set it provided regardless of whether
1220 # entries *not* in the set it provided regardless of whether
1222 # they're actually a file or a directory.
1221 # they're actually a file or a directory.
1223 if visitentries and f not in visitentries:
1222 if visitentries and f not in visitentries:
1224 continue
1223 continue
1225 if normalizefile:
1224 if normalizefile:
1226 # even though f might be a directory, we're only
1225 # even though f might be a directory, we're only
1227 # interested in comparing it to files currently in the
1226 # interested in comparing it to files currently in the
1228 # dmap -- therefore normalizefile is enough
1227 # dmap -- therefore normalizefile is enough
1229 nf = normalizefile(
1228 nf = normalizefile(
1230 nd and (nd + b"/" + f) or f, True, True
1229 nd and (nd + b"/" + f) or f, True, True
1231 )
1230 )
1232 else:
1231 else:
1233 nf = nd and (nd + b"/" + f) or f
1232 nf = nd and (nd + b"/" + f) or f
1234 if nf not in results:
1233 if nf not in results:
1235 if kind == dirkind:
1234 if kind == dirkind:
1236 if not ignore(nf):
1235 if not ignore(nf):
1237 if matchtdir:
1236 if matchtdir:
1238 matchtdir(nf)
1237 matchtdir(nf)
1239 wadd(nf)
1238 wadd(nf)
1240 if nf in dmap and (matchalways or matchfn(nf)):
1239 if nf in dmap and (matchalways or matchfn(nf)):
1241 results[nf] = None
1240 results[nf] = None
1242 elif kind == regkind or kind == lnkkind:
1241 elif kind == regkind or kind == lnkkind:
1243 if nf in dmap:
1242 if nf in dmap:
1244 if matchalways or matchfn(nf):
1243 if matchalways or matchfn(nf):
1245 results[nf] = st
1244 results[nf] = st
1246 elif (matchalways or matchfn(nf)) and not ignore(
1245 elif (matchalways or matchfn(nf)) and not ignore(
1247 nf
1246 nf
1248 ):
1247 ):
1249 # unknown file -- normalize if necessary
1248 # unknown file -- normalize if necessary
1250 if not alreadynormed:
1249 if not alreadynormed:
1251 nf = normalize(nf, False, True)
1250 nf = normalize(nf, False, True)
1252 results[nf] = st
1251 results[nf] = st
1253 elif nf in dmap and (matchalways or matchfn(nf)):
1252 elif nf in dmap and (matchalways or matchfn(nf)):
1254 results[nf] = None
1253 results[nf] = None
1255
1254
1256 for nd, d in work:
1255 for nd, d in work:
1257 # alreadynormed means that processwork doesn't have to do any
1256 # alreadynormed means that processwork doesn't have to do any
1258 # expensive directory normalization
1257 # expensive directory normalization
1259 alreadynormed = not normalize or nd == d
1258 alreadynormed = not normalize or nd == d
1260 traverse([d], alreadynormed)
1259 traverse([d], alreadynormed)
1261
1260
1262 for s in subrepos:
1261 for s in subrepos:
1263 del results[s]
1262 del results[s]
1264 del results[b'.hg']
1263 del results[b'.hg']
1265
1264
1266 # step 3: visit remaining files from dmap
1265 # step 3: visit remaining files from dmap
1267 if not skipstep3 and not exact:
1266 if not skipstep3 and not exact:
1268 # If a dmap file is not in results yet, it was either
1267 # If a dmap file is not in results yet, it was either
1269 # a) not matching matchfn b) ignored, c) missing, or d) under a
1268 # a) not matching matchfn b) ignored, c) missing, or d) under a
1270 # symlink directory.
1269 # symlink directory.
1271 if not results and matchalways:
1270 if not results and matchalways:
1272 visit = [f for f in dmap]
1271 visit = [f for f in dmap]
1273 else:
1272 else:
1274 visit = [f for f in dmap if f not in results and matchfn(f)]
1273 visit = [f for f in dmap if f not in results and matchfn(f)]
1275 visit.sort()
1274 visit.sort()
1276
1275
1277 if unknown:
1276 if unknown:
1278 # unknown == True means we walked all dirs under the roots
1277 # unknown == True means we walked all dirs under the roots
1279 # that wasn't ignored, and everything that matched was stat'ed
1278 # that wasn't ignored, and everything that matched was stat'ed
1280 # and is already in results.
1279 # and is already in results.
1281 # The rest must thus be ignored or under a symlink.
1280 # The rest must thus be ignored or under a symlink.
1282 audit_path = pathutil.pathauditor(self._root, cached=True)
1281 audit_path = pathutil.pathauditor(self._root, cached=True)
1283
1282
1284 for nf in iter(visit):
1283 for nf in iter(visit):
1285 # If a stat for the same file was already added with a
1284 # If a stat for the same file was already added with a
1286 # different case, don't add one for this, since that would
1285 # different case, don't add one for this, since that would
1287 # make it appear as if the file exists under both names
1286 # make it appear as if the file exists under both names
1288 # on disk.
1287 # on disk.
1289 if (
1288 if (
1290 normalizefile
1289 normalizefile
1291 and normalizefile(nf, True, True) in results
1290 and normalizefile(nf, True, True) in results
1292 ):
1291 ):
1293 results[nf] = None
1292 results[nf] = None
1294 # Report ignored items in the dmap as long as they are not
1293 # Report ignored items in the dmap as long as they are not
1295 # under a symlink directory.
1294 # under a symlink directory.
1296 elif audit_path.check(nf):
1295 elif audit_path.check(nf):
1297 try:
1296 try:
1298 results[nf] = lstat(join(nf))
1297 results[nf] = lstat(join(nf))
1299 # file was just ignored, no links, and exists
1298 # file was just ignored, no links, and exists
1300 except OSError:
1299 except OSError:
1301 # file doesn't exist
1300 # file doesn't exist
1302 results[nf] = None
1301 results[nf] = None
1303 else:
1302 else:
1304 # It's either missing or under a symlink directory
1303 # It's either missing or under a symlink directory
1305 # which we in this case report as missing
1304 # which we in this case report as missing
1306 results[nf] = None
1305 results[nf] = None
1307 else:
1306 else:
1308 # We may not have walked the full directory tree above,
1307 # We may not have walked the full directory tree above,
1309 # so stat and check everything we missed.
1308 # so stat and check everything we missed.
1310 iv = iter(visit)
1309 iv = iter(visit)
1311 for st in util.statfiles([join(i) for i in visit]):
1310 for st in util.statfiles([join(i) for i in visit]):
1312 results[next(iv)] = st
1311 results[next(iv)] = st
1313 return results
1312 return results
1314
1313
1315 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1314 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1316 # Force Rayon (Rust parallelism library) to respect the number of
1315 # Force Rayon (Rust parallelism library) to respect the number of
1317 # workers. This is a temporary workaround until Rust code knows
1316 # workers. This is a temporary workaround until Rust code knows
1318 # how to read the config file.
1317 # how to read the config file.
1319 numcpus = self._ui.configint(b"worker", b"numcpus")
1318 numcpus = self._ui.configint(b"worker", b"numcpus")
1320 if numcpus is not None:
1319 if numcpus is not None:
1321 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1320 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1322
1321
1323 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1322 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1324 if not workers_enabled:
1323 if not workers_enabled:
1325 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1324 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1326
1325
1327 (
1326 (
1328 lookup,
1327 lookup,
1329 modified,
1328 modified,
1330 added,
1329 added,
1331 removed,
1330 removed,
1332 deleted,
1331 deleted,
1333 clean,
1332 clean,
1334 ignored,
1333 ignored,
1335 unknown,
1334 unknown,
1336 warnings,
1335 warnings,
1337 bad,
1336 bad,
1338 traversed,
1337 traversed,
1339 dirty,
1338 dirty,
1340 ) = rustmod.status(
1339 ) = rustmod.status(
1341 self._map._rustmap,
1340 self._map._rustmap,
1342 matcher,
1341 matcher,
1343 self._rootdir,
1342 self._rootdir,
1344 self._ignorefiles(),
1343 self._ignorefiles(),
1345 self._checkexec,
1344 self._checkexec,
1346 self._lastnormaltime,
1345 self._lastnormaltime,
1347 bool(list_clean),
1346 bool(list_clean),
1348 bool(list_ignored),
1347 bool(list_ignored),
1349 bool(list_unknown),
1348 bool(list_unknown),
1350 bool(matcher.traversedir),
1349 bool(matcher.traversedir),
1351 )
1350 )
1352
1351
1353 self._dirty |= dirty
1352 self._dirty |= dirty
1354
1353
1355 if matcher.traversedir:
1354 if matcher.traversedir:
1356 for dir in traversed:
1355 for dir in traversed:
1357 matcher.traversedir(dir)
1356 matcher.traversedir(dir)
1358
1357
1359 if self._ui.warn:
1358 if self._ui.warn:
1360 for item in warnings:
1359 for item in warnings:
1361 if isinstance(item, tuple):
1360 if isinstance(item, tuple):
1362 file_path, syntax = item
1361 file_path, syntax = item
1363 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1362 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1364 file_path,
1363 file_path,
1365 syntax,
1364 syntax,
1366 )
1365 )
1367 self._ui.warn(msg)
1366 self._ui.warn(msg)
1368 else:
1367 else:
1369 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1368 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1370 self._ui.warn(
1369 self._ui.warn(
1371 msg
1370 msg
1372 % (
1371 % (
1373 pathutil.canonpath(
1372 pathutil.canonpath(
1374 self._rootdir, self._rootdir, item
1373 self._rootdir, self._rootdir, item
1375 ),
1374 ),
1376 b"No such file or directory",
1375 b"No such file or directory",
1377 )
1376 )
1378 )
1377 )
1379
1378
1380 for (fn, message) in bad:
1379 for (fn, message) in bad:
1381 matcher.bad(fn, encoding.strtolocal(message))
1380 matcher.bad(fn, encoding.strtolocal(message))
1382
1381
1383 status = scmutil.status(
1382 status = scmutil.status(
1384 modified=modified,
1383 modified=modified,
1385 added=added,
1384 added=added,
1386 removed=removed,
1385 removed=removed,
1387 deleted=deleted,
1386 deleted=deleted,
1388 unknown=unknown,
1387 unknown=unknown,
1389 ignored=ignored,
1388 ignored=ignored,
1390 clean=clean,
1389 clean=clean,
1391 )
1390 )
1392 return (lookup, status)
1391 return (lookup, status)
1393
1392
1394 def status(self, match, subrepos, ignored, clean, unknown):
1393 def status(self, match, subrepos, ignored, clean, unknown):
1395 """Determine the status of the working copy relative to the
1394 """Determine the status of the working copy relative to the
1396 dirstate and return a pair of (unsure, status), where status is of type
1395 dirstate and return a pair of (unsure, status), where status is of type
1397 scmutil.status and:
1396 scmutil.status and:
1398
1397
1399 unsure:
1398 unsure:
1400 files that might have been modified since the dirstate was
1399 files that might have been modified since the dirstate was
1401 written, but need to be read to be sure (size is the same
1400 written, but need to be read to be sure (size is the same
1402 but mtime differs)
1401 but mtime differs)
1403 status.modified:
1402 status.modified:
1404 files that have definitely been modified since the dirstate
1403 files that have definitely been modified since the dirstate
1405 was written (different size or mode)
1404 was written (different size or mode)
1406 status.clean:
1405 status.clean:
1407 files that have definitely not been modified since the
1406 files that have definitely not been modified since the
1408 dirstate was written
1407 dirstate was written
1409 """
1408 """
1410 listignored, listclean, listunknown = ignored, clean, unknown
1409 listignored, listclean, listunknown = ignored, clean, unknown
1411 lookup, modified, added, unknown, ignored = [], [], [], [], []
1410 lookup, modified, added, unknown, ignored = [], [], [], [], []
1412 removed, deleted, clean = [], [], []
1411 removed, deleted, clean = [], [], []
1413
1412
1414 dmap = self._map
1413 dmap = self._map
1415 dmap.preload()
1414 dmap.preload()
1416
1415
1417 use_rust = True
1416 use_rust = True
1418
1417
1419 allowed_matchers = (
1418 allowed_matchers = (
1420 matchmod.alwaysmatcher,
1419 matchmod.alwaysmatcher,
1421 matchmod.exactmatcher,
1420 matchmod.exactmatcher,
1422 matchmod.includematcher,
1421 matchmod.includematcher,
1423 )
1422 )
1424
1423
1425 if rustmod is None:
1424 if rustmod is None:
1426 use_rust = False
1425 use_rust = False
1427 elif self._checkcase:
1426 elif self._checkcase:
1428 # Case-insensitive filesystems are not handled yet
1427 # Case-insensitive filesystems are not handled yet
1429 use_rust = False
1428 use_rust = False
1430 elif subrepos:
1429 elif subrepos:
1431 use_rust = False
1430 use_rust = False
1432 elif sparse.enabled:
1431 elif sparse.enabled:
1433 use_rust = False
1432 use_rust = False
1434 elif not isinstance(match, allowed_matchers):
1433 elif not isinstance(match, allowed_matchers):
1435 # Some matchers have yet to be implemented
1434 # Some matchers have yet to be implemented
1436 use_rust = False
1435 use_rust = False
1437
1436
1438 if use_rust:
1437 if use_rust:
1439 try:
1438 try:
1440 return self._rust_status(
1439 return self._rust_status(
1441 match, listclean, listignored, listunknown
1440 match, listclean, listignored, listunknown
1442 )
1441 )
1443 except rustmod.FallbackError:
1442 except rustmod.FallbackError:
1444 pass
1443 pass
1445
1444
1446 def noop(f):
1445 def noop(f):
1447 pass
1446 pass
1448
1447
1449 dcontains = dmap.__contains__
1448 dcontains = dmap.__contains__
1450 dget = dmap.__getitem__
1449 dget = dmap.__getitem__
1451 ladd = lookup.append # aka "unsure"
1450 ladd = lookup.append # aka "unsure"
1452 madd = modified.append
1451 madd = modified.append
1453 aadd = added.append
1452 aadd = added.append
1454 uadd = unknown.append if listunknown else noop
1453 uadd = unknown.append if listunknown else noop
1455 iadd = ignored.append if listignored else noop
1454 iadd = ignored.append if listignored else noop
1456 radd = removed.append
1455 radd = removed.append
1457 dadd = deleted.append
1456 dadd = deleted.append
1458 cadd = clean.append if listclean else noop
1457 cadd = clean.append if listclean else noop
1459 mexact = match.exact
1458 mexact = match.exact
1460 dirignore = self._dirignore
1459 dirignore = self._dirignore
1461 checkexec = self._checkexec
1460 checkexec = self._checkexec
1462 copymap = self._map.copymap
1461 copymap = self._map.copymap
1463 lastnormaltime = self._lastnormaltime
1462 lastnormaltime = self._lastnormaltime
1464
1463
1465 # We need to do full walks when either
1464 # We need to do full walks when either
1466 # - we're listing all clean files, or
1465 # - we're listing all clean files, or
1467 # - match.traversedir does something, because match.traversedir should
1466 # - match.traversedir does something, because match.traversedir should
1468 # be called for every dir in the working dir
1467 # be called for every dir in the working dir
1469 full = listclean or match.traversedir is not None
1468 full = listclean or match.traversedir is not None
1470 for fn, st in pycompat.iteritems(
1469 for fn, st in pycompat.iteritems(
1471 self.walk(match, subrepos, listunknown, listignored, full=full)
1470 self.walk(match, subrepos, listunknown, listignored, full=full)
1472 ):
1471 ):
1473 if not dcontains(fn):
1472 if not dcontains(fn):
1474 if (listignored or mexact(fn)) and dirignore(fn):
1473 if (listignored or mexact(fn)) and dirignore(fn):
1475 if listignored:
1474 if listignored:
1476 iadd(fn)
1475 iadd(fn)
1477 else:
1476 else:
1478 uadd(fn)
1477 uadd(fn)
1479 continue
1478 continue
1480
1479
1481 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1480 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1482 # written like that for performance reasons. dmap[fn] is not a
1481 # written like that for performance reasons. dmap[fn] is not a
1483 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1482 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1484 # opcode has fast paths when the value to be unpacked is a tuple or
1483 # opcode has fast paths when the value to be unpacked is a tuple or
1485 # a list, but falls back to creating a full-fledged iterator in
1484 # a list, but falls back to creating a full-fledged iterator in
1486 # general. That is much slower than simply accessing and storing the
1485 # general. That is much slower than simply accessing and storing the
1487 # tuple members one by one.
1486 # tuple members one by one.
1488 t = dget(fn)
1487 t = dget(fn)
1489 mode = t.mode
1488 mode = t.mode
1490 size = t.size
1489 size = t.size
1491 time = t.mtime
1490 time = t.mtime
1492
1491
1493 if not st and t.tracked:
1492 if not st and t.tracked:
1494 dadd(fn)
1493 dadd(fn)
1495 elif t.merged:
1494 elif t.merged:
1496 madd(fn)
1495 madd(fn)
1497 elif t.added:
1496 elif t.added:
1498 aadd(fn)
1497 aadd(fn)
1499 elif t.removed:
1498 elif t.removed:
1500 radd(fn)
1499 radd(fn)
1501 elif t.tracked:
1500 elif t.tracked:
1502 if (
1501 if (
1503 size >= 0
1502 size >= 0
1504 and (
1503 and (
1505 (size != st.st_size and size != st.st_size & _rangemask)
1504 (size != st.st_size and size != st.st_size & _rangemask)
1506 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1505 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1507 )
1506 )
1508 or t.from_p2
1507 or t.from_p2
1509 or fn in copymap
1508 or fn in copymap
1510 ):
1509 ):
1511 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1510 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1512 # issue6456: Size returned may be longer due to
1511 # issue6456: Size returned may be longer due to
1513 # encryption on EXT-4 fscrypt, undecided.
1512 # encryption on EXT-4 fscrypt, undecided.
1514 ladd(fn)
1513 ladd(fn)
1515 else:
1514 else:
1516 madd(fn)
1515 madd(fn)
1517 elif (
1516 elif (
1518 time != st[stat.ST_MTIME]
1517 time != st[stat.ST_MTIME]
1519 and time != st[stat.ST_MTIME] & _rangemask
1518 and time != st[stat.ST_MTIME] & _rangemask
1520 ):
1519 ):
1521 ladd(fn)
1520 ladd(fn)
1522 elif st[stat.ST_MTIME] == lastnormaltime:
1521 elif st[stat.ST_MTIME] == lastnormaltime:
1523 # fn may have just been marked as normal and it may have
1522 # fn may have just been marked as normal and it may have
1524 # changed in the same second without changing its size.
1523 # changed in the same second without changing its size.
1525 # This can happen if we quickly do multiple commits.
1524 # This can happen if we quickly do multiple commits.
1526 # Force lookup, so we don't miss such a racy file change.
1525 # Force lookup, so we don't miss such a racy file change.
1527 ladd(fn)
1526 ladd(fn)
1528 elif listclean:
1527 elif listclean:
1529 cadd(fn)
1528 cadd(fn)
1530 status = scmutil.status(
1529 status = scmutil.status(
1531 modified, added, removed, deleted, unknown, ignored, clean
1530 modified, added, removed, deleted, unknown, ignored, clean
1532 )
1531 )
1533 return (lookup, status)
1532 return (lookup, status)
1534
1533
1535 def matches(self, match):
1534 def matches(self, match):
1536 """
1535 """
1537 return files in the dirstate (in whatever state) filtered by match
1536 return files in the dirstate (in whatever state) filtered by match
1538 """
1537 """
1539 dmap = self._map
1538 dmap = self._map
1540 if rustmod is not None:
1539 if rustmod is not None:
1541 dmap = self._map._rustmap
1540 dmap = self._map._rustmap
1542
1541
1543 if match.always():
1542 if match.always():
1544 return dmap.keys()
1543 return dmap.keys()
1545 files = match.files()
1544 files = match.files()
1546 if match.isexact():
1545 if match.isexact():
1547 # fast path -- filter the other way around, since typically files is
1546 # fast path -- filter the other way around, since typically files is
1548 # much smaller than dmap
1547 # much smaller than dmap
1549 return [f for f in files if f in dmap]
1548 return [f for f in files if f in dmap]
1550 if match.prefix() and all(fn in dmap for fn in files):
1549 if match.prefix() and all(fn in dmap for fn in files):
1551 # fast path -- all the values are known to be files, so just return
1550 # fast path -- all the values are known to be files, so just return
1552 # that
1551 # that
1553 return list(files)
1552 return list(files)
1554 return [f for f in dmap if match(f)]
1553 return [f for f in dmap if match(f)]
1555
1554
1556 def _actualfilename(self, tr):
1555 def _actualfilename(self, tr):
1557 if tr:
1556 if tr:
1558 return self._pendingfilename
1557 return self._pendingfilename
1559 else:
1558 else:
1560 return self._filename
1559 return self._filename
1561
1560
1562 def savebackup(self, tr, backupname):
1561 def savebackup(self, tr, backupname):
1563 '''Save current dirstate into backup file'''
1562 '''Save current dirstate into backup file'''
1564 filename = self._actualfilename(tr)
1563 filename = self._actualfilename(tr)
1565 assert backupname != filename
1564 assert backupname != filename
1566
1565
1567 # use '_writedirstate' instead of 'write' to write changes certainly,
1566 # use '_writedirstate' instead of 'write' to write changes certainly,
1568 # because the latter omits writing out if transaction is running.
1567 # because the latter omits writing out if transaction is running.
1569 # output file will be used to create backup of dirstate at this point.
1568 # output file will be used to create backup of dirstate at this point.
1570 if self._dirty or not self._opener.exists(filename):
1569 if self._dirty or not self._opener.exists(filename):
1571 self._writedirstate(
1570 self._writedirstate(
1572 tr,
1571 tr,
1573 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1572 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1574 )
1573 )
1575
1574
1576 if tr:
1575 if tr:
1577 # ensure that subsequent tr.writepending returns True for
1576 # ensure that subsequent tr.writepending returns True for
1578 # changes written out above, even if dirstate is never
1577 # changes written out above, even if dirstate is never
1579 # changed after this
1578 # changed after this
1580 tr.addfilegenerator(
1579 tr.addfilegenerator(
1581 b'dirstate',
1580 b'dirstate',
1582 (self._filename,),
1581 (self._filename,),
1583 lambda f: self._writedirstate(tr, f),
1582 lambda f: self._writedirstate(tr, f),
1584 location=b'plain',
1583 location=b'plain',
1585 )
1584 )
1586
1585
1587 # ensure that pending file written above is unlinked at
1586 # ensure that pending file written above is unlinked at
1588 # failure, even if tr.writepending isn't invoked until the
1587 # failure, even if tr.writepending isn't invoked until the
1589 # end of this transaction
1588 # end of this transaction
1590 tr.registertmp(filename, location=b'plain')
1589 tr.registertmp(filename, location=b'plain')
1591
1590
1592 self._opener.tryunlink(backupname)
1591 self._opener.tryunlink(backupname)
1593 # hardlink backup is okay because _writedirstate is always called
1592 # hardlink backup is okay because _writedirstate is always called
1594 # with an "atomictemp=True" file.
1593 # with an "atomictemp=True" file.
1595 util.copyfile(
1594 util.copyfile(
1596 self._opener.join(filename),
1595 self._opener.join(filename),
1597 self._opener.join(backupname),
1596 self._opener.join(backupname),
1598 hardlink=True,
1597 hardlink=True,
1599 )
1598 )
1600
1599
1601 def restorebackup(self, tr, backupname):
1600 def restorebackup(self, tr, backupname):
1602 '''Restore dirstate by backup file'''
1601 '''Restore dirstate by backup file'''
1603 # this "invalidate()" prevents "wlock.release()" from writing
1602 # this "invalidate()" prevents "wlock.release()" from writing
1604 # changes of dirstate out after restoring from backup file
1603 # changes of dirstate out after restoring from backup file
1605 self.invalidate()
1604 self.invalidate()
1606 filename = self._actualfilename(tr)
1605 filename = self._actualfilename(tr)
1607 o = self._opener
1606 o = self._opener
1608 if util.samefile(o.join(backupname), o.join(filename)):
1607 if util.samefile(o.join(backupname), o.join(filename)):
1609 o.unlink(backupname)
1608 o.unlink(backupname)
1610 else:
1609 else:
1611 o.rename(backupname, filename, checkambig=True)
1610 o.rename(backupname, filename, checkambig=True)
1612
1611
1613 def clearbackup(self, tr, backupname):
1612 def clearbackup(self, tr, backupname):
1614 '''Clear backup file'''
1613 '''Clear backup file'''
1615 self._opener.unlink(backupname)
1614 self._opener.unlink(backupname)
@@ -1,929 +1,947 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 from .dirstateutils import (
21 from .dirstateutils import (
22 docket as docketmod,
22 docket as docketmod,
23 )
23 )
24
24
25 parsers = policy.importmod('parsers')
25 parsers = policy.importmod('parsers')
26 rustmod = policy.importrust('dirstate')
26 rustmod = policy.importrust('dirstate')
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 DirstateItem = parsers.DirstateItem
30 DirstateItem = parsers.DirstateItem
31
31
32 rangemask = 0x7FFFFFFF
32 rangemask = 0x7FFFFFFF
33
33
34
34
35 class dirstatemap(object):
35 class dirstatemap(object):
36 """Map encapsulating the dirstate's contents.
36 """Map encapsulating the dirstate's contents.
37
37
38 The dirstate contains the following state:
38 The dirstate contains the following state:
39
39
40 - `identity` is the identity of the dirstate file, which can be used to
40 - `identity` is the identity of the dirstate file, which can be used to
41 detect when changes have occurred to the dirstate file.
41 detect when changes have occurred to the dirstate file.
42
42
43 - `parents` is a pair containing the parents of the working copy. The
43 - `parents` is a pair containing the parents of the working copy. The
44 parents are updated by calling `setparents`.
44 parents are updated by calling `setparents`.
45
45
46 - the state map maps filenames to tuples of (state, mode, size, mtime),
46 - the state map maps filenames to tuples of (state, mode, size, mtime),
47 where state is a single character representing 'normal', 'added',
47 where state is a single character representing 'normal', 'added',
48 'removed', or 'merged'. It is read by treating the dirstate as a
48 'removed', or 'merged'. It is read by treating the dirstate as a
49 dict. File state is updated by calling the `addfile`, `removefile` and
49 dict. File state is updated by calling the `addfile`, `removefile` and
50 `dropfile` methods.
50 `dropfile` methods.
51
51
52 - `copymap` maps destination filenames to their source filename.
52 - `copymap` maps destination filenames to their source filename.
53
53
54 The dirstate also provides the following views onto the state:
54 The dirstate also provides the following views onto the state:
55
55
56 - `nonnormalset` is a set of the filenames that have state other
56 - `nonnormalset` is a set of the filenames that have state other
57 than 'normal', or are normal but have an mtime of -1 ('normallookup').
57 than 'normal', or are normal but have an mtime of -1 ('normallookup').
58
58
59 - `otherparentset` is a set of the filenames that are marked as coming
59 - `otherparentset` is a set of the filenames that are marked as coming
60 from the second parent when the dirstate is currently being merged.
60 from the second parent when the dirstate is currently being merged.
61
61
62 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
62 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
63 form that they appear as in the dirstate.
63 form that they appear as in the dirstate.
64
64
65 - `dirfoldmap` is a dict mapping normalized directory names to the
65 - `dirfoldmap` is a dict mapping normalized directory names to the
66 denormalized form that they appear as in the dirstate.
66 denormalized form that they appear as in the dirstate.
67 """
67 """
68
68
69 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
69 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
70 self._ui = ui
70 self._ui = ui
71 self._opener = opener
71 self._opener = opener
72 self._root = root
72 self._root = root
73 self._filename = b'dirstate'
73 self._filename = b'dirstate'
74 self._nodelen = 20
74 self._nodelen = 20
75 self._nodeconstants = nodeconstants
75 self._nodeconstants = nodeconstants
76 assert (
76 assert (
77 not use_dirstate_v2
77 not use_dirstate_v2
78 ), "should have detected unsupported requirement"
78 ), "should have detected unsupported requirement"
79
79
80 self._parents = None
80 self._parents = None
81 self._dirtyparents = False
81 self._dirtyparents = False
82
82
83 # for consistent view between _pl() and _read() invocations
83 # for consistent view between _pl() and _read() invocations
84 self._pendingmode = None
84 self._pendingmode = None
85
85
86 @propertycache
86 @propertycache
87 def _map(self):
87 def _map(self):
88 self._map = {}
88 self._map = {}
89 self.read()
89 self.read()
90 return self._map
90 return self._map
91
91
92 @propertycache
92 @propertycache
93 def copymap(self):
93 def copymap(self):
94 self.copymap = {}
94 self.copymap = {}
95 self._map
95 self._map
96 return self.copymap
96 return self.copymap
97
97
98 def clear(self):
98 def clear(self):
99 self._map.clear()
99 self._map.clear()
100 self.copymap.clear()
100 self.copymap.clear()
101 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
101 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
102 util.clearcachedproperty(self, b"_dirs")
102 util.clearcachedproperty(self, b"_dirs")
103 util.clearcachedproperty(self, b"_alldirs")
103 util.clearcachedproperty(self, b"_alldirs")
104 util.clearcachedproperty(self, b"filefoldmap")
104 util.clearcachedproperty(self, b"filefoldmap")
105 util.clearcachedproperty(self, b"dirfoldmap")
105 util.clearcachedproperty(self, b"dirfoldmap")
106 util.clearcachedproperty(self, b"nonnormalset")
106 util.clearcachedproperty(self, b"nonnormalset")
107 util.clearcachedproperty(self, b"otherparentset")
107 util.clearcachedproperty(self, b"otherparentset")
108
108
109 def items(self):
109 def items(self):
110 return pycompat.iteritems(self._map)
110 return pycompat.iteritems(self._map)
111
111
112 # forward for python2,3 compat
112 # forward for python2,3 compat
113 iteritems = items
113 iteritems = items
114
114
115 debug_iter = items
115 debug_iter = items
116
116
117 def __len__(self):
117 def __len__(self):
118 return len(self._map)
118 return len(self._map)
119
119
120 def __iter__(self):
120 def __iter__(self):
121 return iter(self._map)
121 return iter(self._map)
122
122
123 def get(self, key, default=None):
123 def get(self, key, default=None):
124 return self._map.get(key, default)
124 return self._map.get(key, default)
125
125
126 def __contains__(self, key):
126 def __contains__(self, key):
127 return key in self._map
127 return key in self._map
128
128
129 def __getitem__(self, key):
129 def __getitem__(self, key):
130 return self._map[key]
130 return self._map[key]
131
131
132 def keys(self):
132 def keys(self):
133 return self._map.keys()
133 return self._map.keys()
134
134
135 def preload(self):
135 def preload(self):
136 """Loads the underlying data, if it's not already loaded"""
136 """Loads the underlying data, if it's not already loaded"""
137 self._map
137 self._map
138
138
139 def _dirs_incr(self, filename, old_entry=None):
139 def _dirs_incr(self, filename, old_entry=None):
140 """incremente the dirstate counter if applicable"""
140 """incremente the dirstate counter if applicable"""
141 if (
141 if (
142 old_entry is None or old_entry.removed
142 old_entry is None or old_entry.removed
143 ) and "_dirs" in self.__dict__:
143 ) and "_dirs" in self.__dict__:
144 self._dirs.addpath(filename)
144 self._dirs.addpath(filename)
145 if old_entry is None and "_alldirs" in self.__dict__:
145 if old_entry is None and "_alldirs" in self.__dict__:
146 self._alldirs.addpath(filename)
146 self._alldirs.addpath(filename)
147
147
148 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
148 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
149 """decremente the dirstate counter if applicable"""
149 """decremente the dirstate counter if applicable"""
150 if old_entry is not None:
150 if old_entry is not None:
151 if "_dirs" in self.__dict__ and not old_entry.removed:
151 if "_dirs" in self.__dict__ and not old_entry.removed:
152 self._dirs.delpath(filename)
152 self._dirs.delpath(filename)
153 if "_alldirs" in self.__dict__ and not remove_variant:
153 if "_alldirs" in self.__dict__ and not remove_variant:
154 self._alldirs.delpath(filename)
154 self._alldirs.delpath(filename)
155 elif remove_variant and "_alldirs" in self.__dict__:
155 elif remove_variant and "_alldirs" in self.__dict__:
156 self._alldirs.addpath(filename)
156 self._alldirs.addpath(filename)
157 if "filefoldmap" in self.__dict__:
157 if "filefoldmap" in self.__dict__:
158 normed = util.normcase(filename)
158 normed = util.normcase(filename)
159 self.filefoldmap.pop(normed, None)
159 self.filefoldmap.pop(normed, None)
160
160
161 def set_possibly_dirty(self, filename):
161 def set_possibly_dirty(self, filename):
162 """record that the current state of the file on disk is unknown"""
162 """record that the current state of the file on disk is unknown"""
163 self[filename].set_possibly_dirty()
163 self[filename].set_possibly_dirty()
164
164
165 def set_clean(self, filename, mode, size, mtime):
166 """mark a file as back to a clean state"""
167 entry = self[filename]
168 mtime = mtime & rangemask
169 size = size & rangemask
170 entry.set_clean(mode, size, mtime)
171 self.copymap.pop(filename, None)
172 self.nonnormalset.discard(filename)
173
165 def addfile(
174 def addfile(
166 self,
175 self,
167 f,
176 f,
168 mode=0,
177 mode=0,
169 size=None,
178 size=None,
170 mtime=None,
179 mtime=None,
171 added=False,
180 added=False,
172 merged=False,
181 merged=False,
173 from_p2=False,
182 from_p2=False,
174 possibly_dirty=False,
183 possibly_dirty=False,
175 ):
184 ):
176 """Add a tracked file to the dirstate."""
185 """Add a tracked file to the dirstate."""
177 if added:
186 if added:
178 assert not merged
187 assert not merged
179 assert not possibly_dirty
188 assert not possibly_dirty
180 assert not from_p2
189 assert not from_p2
181 new_entry = DirstateItem.new_added()
190 new_entry = DirstateItem.new_added()
182 elif merged:
191 elif merged:
183 assert not possibly_dirty
192 assert not possibly_dirty
184 assert not from_p2
193 assert not from_p2
185 new_entry = DirstateItem.new_merged()
194 new_entry = DirstateItem.new_merged()
186 elif from_p2:
195 elif from_p2:
187 assert not possibly_dirty
196 assert not possibly_dirty
188 new_entry = DirstateItem.new_from_p2()
197 new_entry = DirstateItem.new_from_p2()
189 elif possibly_dirty:
198 elif possibly_dirty:
190 new_entry = DirstateItem.new_possibly_dirty()
199 new_entry = DirstateItem.new_possibly_dirty()
191 else:
200 else:
192 assert size is not None
201 assert size is not None
193 assert mtime is not None
202 assert mtime is not None
194 size = size & rangemask
203 size = size & rangemask
195 mtime = mtime & rangemask
204 mtime = mtime & rangemask
196 new_entry = DirstateItem.new_normal(mode, size, mtime)
205 new_entry = DirstateItem.new_normal(mode, size, mtime)
197 old_entry = self.get(f)
206 old_entry = self.get(f)
198 self._dirs_incr(f, old_entry)
207 self._dirs_incr(f, old_entry)
199 self._map[f] = new_entry
208 self._map[f] = new_entry
200 if new_entry.dm_nonnormal:
209 if new_entry.dm_nonnormal:
201 self.nonnormalset.add(f)
210 self.nonnormalset.add(f)
202 else:
211 else:
203 self.nonnormalset.discard(f)
212 self.nonnormalset.discard(f)
204 if new_entry.dm_otherparent:
213 if new_entry.dm_otherparent:
205 self.otherparentset.add(f)
214 self.otherparentset.add(f)
206 else:
215 else:
207 self.otherparentset.discard(f)
216 self.otherparentset.discard(f)
208
217
209 def reset_state(
218 def reset_state(
210 self,
219 self,
211 filename,
220 filename,
212 wc_tracked,
221 wc_tracked,
213 p1_tracked,
222 p1_tracked,
214 p2_tracked=False,
223 p2_tracked=False,
215 merged=False,
224 merged=False,
216 clean_p1=False,
225 clean_p1=False,
217 clean_p2=False,
226 clean_p2=False,
218 possibly_dirty=False,
227 possibly_dirty=False,
219 parentfiledata=None,
228 parentfiledata=None,
220 ):
229 ):
221 """Set a entry to a given state, diregarding all previous state
230 """Set a entry to a given state, diregarding all previous state
222
231
223 This is to be used by the part of the dirstate API dedicated to
232 This is to be used by the part of the dirstate API dedicated to
224 adjusting the dirstate after a update/merge.
233 adjusting the dirstate after a update/merge.
225
234
226 note: calling this might result to no entry existing at all if the
235 note: calling this might result to no entry existing at all if the
227 dirstate map does not see any point at having one for this file
236 dirstate map does not see any point at having one for this file
228 anymore.
237 anymore.
229 """
238 """
230 if merged and (clean_p1 or clean_p2):
239 if merged and (clean_p1 or clean_p2):
231 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
240 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
232 raise error.ProgrammingError(msg)
241 raise error.ProgrammingError(msg)
233 # copy information are now outdated
242 # copy information are now outdated
234 # (maybe new information should be in directly passed to this function)
243 # (maybe new information should be in directly passed to this function)
235 self.copymap.pop(filename, None)
244 self.copymap.pop(filename, None)
236
245
237 if not (p1_tracked or p2_tracked or wc_tracked):
246 if not (p1_tracked or p2_tracked or wc_tracked):
238 self.dropfile(filename)
247 self.dropfile(filename)
239 return
248 return
240 elif merged:
249 elif merged:
241 # XXX might be merged and removed ?
250 # XXX might be merged and removed ?
242 entry = self.get(filename)
251 entry = self.get(filename)
243 if entry is None or not entry.tracked:
252 if entry is None or not entry.tracked:
244 # XXX mostly replicate dirstate.other parent. We should get
253 # XXX mostly replicate dirstate.other parent. We should get
245 # the higher layer to pass us more reliable data where `merged`
254 # the higher layer to pass us more reliable data where `merged`
246 # actually mean merged. Dropping this clause will show failure
255 # actually mean merged. Dropping this clause will show failure
247 # in `test-graft.t`
256 # in `test-graft.t`
248 merged = False
257 merged = False
249 clean_p2 = True
258 clean_p2 = True
250 elif not (p1_tracked or p2_tracked) and wc_tracked:
259 elif not (p1_tracked or p2_tracked) and wc_tracked:
251 pass # file is added, nothing special to adjust
260 pass # file is added, nothing special to adjust
252 elif (p1_tracked or p2_tracked) and not wc_tracked:
261 elif (p1_tracked or p2_tracked) and not wc_tracked:
253 pass
262 pass
254 elif clean_p2 and wc_tracked:
263 elif clean_p2 and wc_tracked:
255 if p1_tracked or self.get(filename) is not None:
264 if p1_tracked or self.get(filename) is not None:
256 # XXX the `self.get` call is catching some case in
265 # XXX the `self.get` call is catching some case in
257 # `test-merge-remove.t` where the file is tracked in p1, the
266 # `test-merge-remove.t` where the file is tracked in p1, the
258 # p1_tracked argument is False.
267 # p1_tracked argument is False.
259 #
268 #
260 # In addition, this seems to be a case where the file is marked
269 # In addition, this seems to be a case where the file is marked
261 # as merged without actually being the result of a merge
270 # as merged without actually being the result of a merge
262 # action. So thing are not ideal here.
271 # action. So thing are not ideal here.
263 merged = True
272 merged = True
264 clean_p2 = False
273 clean_p2 = False
265 elif not p1_tracked and p2_tracked and wc_tracked:
274 elif not p1_tracked and p2_tracked and wc_tracked:
266 clean_p2 = True
275 clean_p2 = True
267 elif possibly_dirty:
276 elif possibly_dirty:
268 pass
277 pass
269 elif wc_tracked:
278 elif wc_tracked:
270 # this is a "normal" file
279 # this is a "normal" file
271 if parentfiledata is None:
280 if parentfiledata is None:
272 msg = b'failed to pass parentfiledata for a normal file: %s'
281 msg = b'failed to pass parentfiledata for a normal file: %s'
273 msg %= filename
282 msg %= filename
274 raise error.ProgrammingError(msg)
283 raise error.ProgrammingError(msg)
275 else:
284 else:
276 assert False, 'unreachable'
285 assert False, 'unreachable'
277
286
278 old_entry = self._map.get(filename)
287 old_entry = self._map.get(filename)
279 self._dirs_incr(filename, old_entry)
288 self._dirs_incr(filename, old_entry)
280 entry = DirstateItem(
289 entry = DirstateItem(
281 wc_tracked=wc_tracked,
290 wc_tracked=wc_tracked,
282 p1_tracked=p1_tracked,
291 p1_tracked=p1_tracked,
283 p2_tracked=p2_tracked,
292 p2_tracked=p2_tracked,
284 merged=merged,
293 merged=merged,
285 clean_p1=clean_p1,
294 clean_p1=clean_p1,
286 clean_p2=clean_p2,
295 clean_p2=clean_p2,
287 possibly_dirty=possibly_dirty,
296 possibly_dirty=possibly_dirty,
288 parentfiledata=parentfiledata,
297 parentfiledata=parentfiledata,
289 )
298 )
290 if entry.dm_nonnormal:
299 if entry.dm_nonnormal:
291 self.nonnormalset.add(filename)
300 self.nonnormalset.add(filename)
292 else:
301 else:
293 self.nonnormalset.discard(filename)
302 self.nonnormalset.discard(filename)
294 if entry.dm_otherparent:
303 if entry.dm_otherparent:
295 self.otherparentset.add(filename)
304 self.otherparentset.add(filename)
296 else:
305 else:
297 self.otherparentset.discard(filename)
306 self.otherparentset.discard(filename)
298 self._map[filename] = entry
307 self._map[filename] = entry
299
308
300 def set_untracked(self, f):
309 def set_untracked(self, f):
301 """Mark a file as no longer tracked in the dirstate map"""
310 """Mark a file as no longer tracked in the dirstate map"""
302 entry = self.get(f)
311 entry = self.get(f)
303 if entry is None:
312 if entry is None:
304 return False
313 return False
305 else:
314 else:
306 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
315 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
307 if not entry.merged:
316 if not entry.merged:
308 self.copymap.pop(f, None)
317 self.copymap.pop(f, None)
309 if entry.added:
318 if entry.added:
310 self.nonnormalset.discard(f)
319 self.nonnormalset.discard(f)
311 self._map.pop(f, None)
320 self._map.pop(f, None)
312 else:
321 else:
313 self.nonnormalset.add(f)
322 self.nonnormalset.add(f)
314 if entry.from_p2:
323 if entry.from_p2:
315 self.otherparentset.add(f)
324 self.otherparentset.add(f)
316 entry.set_untracked()
325 entry.set_untracked()
317 return True
326 return True
318
327
319 def dropfile(self, f):
328 def dropfile(self, f):
320 """
329 """
321 Remove a file from the dirstate. Returns True if the file was
330 Remove a file from the dirstate. Returns True if the file was
322 previously recorded.
331 previously recorded.
323 """
332 """
324 old_entry = self._map.pop(f, None)
333 old_entry = self._map.pop(f, None)
325 self._dirs_decr(f, old_entry=old_entry)
334 self._dirs_decr(f, old_entry=old_entry)
326 self.nonnormalset.discard(f)
335 self.nonnormalset.discard(f)
327 self.copymap.pop(f, None)
336 self.copymap.pop(f, None)
328 return old_entry is not None
337 return old_entry is not None
329
338
330 def clearambiguoustimes(self, files, now):
339 def clearambiguoustimes(self, files, now):
331 for f in files:
340 for f in files:
332 e = self.get(f)
341 e = self.get(f)
333 if e is not None and e.need_delay(now):
342 if e is not None and e.need_delay(now):
334 e.set_possibly_dirty()
343 e.set_possibly_dirty()
335 self.nonnormalset.add(f)
344 self.nonnormalset.add(f)
336
345
337 def nonnormalentries(self):
346 def nonnormalentries(self):
338 '''Compute the nonnormal dirstate entries from the dmap'''
347 '''Compute the nonnormal dirstate entries from the dmap'''
339 try:
348 try:
340 return parsers.nonnormalotherparententries(self._map)
349 return parsers.nonnormalotherparententries(self._map)
341 except AttributeError:
350 except AttributeError:
342 nonnorm = set()
351 nonnorm = set()
343 otherparent = set()
352 otherparent = set()
344 for fname, e in pycompat.iteritems(self._map):
353 for fname, e in pycompat.iteritems(self._map):
345 if e.dm_nonnormal:
354 if e.dm_nonnormal:
346 nonnorm.add(fname)
355 nonnorm.add(fname)
347 if e.from_p2:
356 if e.from_p2:
348 otherparent.add(fname)
357 otherparent.add(fname)
349 return nonnorm, otherparent
358 return nonnorm, otherparent
350
359
351 @propertycache
360 @propertycache
352 def filefoldmap(self):
361 def filefoldmap(self):
353 """Returns a dictionary mapping normalized case paths to their
362 """Returns a dictionary mapping normalized case paths to their
354 non-normalized versions.
363 non-normalized versions.
355 """
364 """
356 try:
365 try:
357 makefilefoldmap = parsers.make_file_foldmap
366 makefilefoldmap = parsers.make_file_foldmap
358 except AttributeError:
367 except AttributeError:
359 pass
368 pass
360 else:
369 else:
361 return makefilefoldmap(
370 return makefilefoldmap(
362 self._map, util.normcasespec, util.normcasefallback
371 self._map, util.normcasespec, util.normcasefallback
363 )
372 )
364
373
365 f = {}
374 f = {}
366 normcase = util.normcase
375 normcase = util.normcase
367 for name, s in pycompat.iteritems(self._map):
376 for name, s in pycompat.iteritems(self._map):
368 if not s.removed:
377 if not s.removed:
369 f[normcase(name)] = name
378 f[normcase(name)] = name
370 f[b'.'] = b'.' # prevents useless util.fspath() invocation
379 f[b'.'] = b'.' # prevents useless util.fspath() invocation
371 return f
380 return f
372
381
373 def hastrackeddir(self, d):
382 def hastrackeddir(self, d):
374 """
383 """
375 Returns True if the dirstate contains a tracked (not removed) file
384 Returns True if the dirstate contains a tracked (not removed) file
376 in this directory.
385 in this directory.
377 """
386 """
378 return d in self._dirs
387 return d in self._dirs
379
388
380 def hasdir(self, d):
389 def hasdir(self, d):
381 """
390 """
382 Returns True if the dirstate contains a file (tracked or removed)
391 Returns True if the dirstate contains a file (tracked or removed)
383 in this directory.
392 in this directory.
384 """
393 """
385 return d in self._alldirs
394 return d in self._alldirs
386
395
387 @propertycache
396 @propertycache
388 def _dirs(self):
397 def _dirs(self):
389 return pathutil.dirs(self._map, only_tracked=True)
398 return pathutil.dirs(self._map, only_tracked=True)
390
399
391 @propertycache
400 @propertycache
392 def _alldirs(self):
401 def _alldirs(self):
393 return pathutil.dirs(self._map)
402 return pathutil.dirs(self._map)
394
403
395 def _opendirstatefile(self):
404 def _opendirstatefile(self):
396 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
405 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
397 if self._pendingmode is not None and self._pendingmode != mode:
406 if self._pendingmode is not None and self._pendingmode != mode:
398 fp.close()
407 fp.close()
399 raise error.Abort(
408 raise error.Abort(
400 _(b'working directory state may be changed parallelly')
409 _(b'working directory state may be changed parallelly')
401 )
410 )
402 self._pendingmode = mode
411 self._pendingmode = mode
403 return fp
412 return fp
404
413
405 def parents(self):
414 def parents(self):
406 if not self._parents:
415 if not self._parents:
407 try:
416 try:
408 fp = self._opendirstatefile()
417 fp = self._opendirstatefile()
409 st = fp.read(2 * self._nodelen)
418 st = fp.read(2 * self._nodelen)
410 fp.close()
419 fp.close()
411 except IOError as err:
420 except IOError as err:
412 if err.errno != errno.ENOENT:
421 if err.errno != errno.ENOENT:
413 raise
422 raise
414 # File doesn't exist, so the current state is empty
423 # File doesn't exist, so the current state is empty
415 st = b''
424 st = b''
416
425
417 l = len(st)
426 l = len(st)
418 if l == self._nodelen * 2:
427 if l == self._nodelen * 2:
419 self._parents = (
428 self._parents = (
420 st[: self._nodelen],
429 st[: self._nodelen],
421 st[self._nodelen : 2 * self._nodelen],
430 st[self._nodelen : 2 * self._nodelen],
422 )
431 )
423 elif l == 0:
432 elif l == 0:
424 self._parents = (
433 self._parents = (
425 self._nodeconstants.nullid,
434 self._nodeconstants.nullid,
426 self._nodeconstants.nullid,
435 self._nodeconstants.nullid,
427 )
436 )
428 else:
437 else:
429 raise error.Abort(
438 raise error.Abort(
430 _(b'working directory state appears damaged!')
439 _(b'working directory state appears damaged!')
431 )
440 )
432
441
433 return self._parents
442 return self._parents
434
443
435 def setparents(self, p1, p2):
444 def setparents(self, p1, p2):
436 self._parents = (p1, p2)
445 self._parents = (p1, p2)
437 self._dirtyparents = True
446 self._dirtyparents = True
438
447
439 def read(self):
448 def read(self):
440 # ignore HG_PENDING because identity is used only for writing
449 # ignore HG_PENDING because identity is used only for writing
441 self.identity = util.filestat.frompath(
450 self.identity = util.filestat.frompath(
442 self._opener.join(self._filename)
451 self._opener.join(self._filename)
443 )
452 )
444
453
445 try:
454 try:
446 fp = self._opendirstatefile()
455 fp = self._opendirstatefile()
447 try:
456 try:
448 st = fp.read()
457 st = fp.read()
449 finally:
458 finally:
450 fp.close()
459 fp.close()
451 except IOError as err:
460 except IOError as err:
452 if err.errno != errno.ENOENT:
461 if err.errno != errno.ENOENT:
453 raise
462 raise
454 return
463 return
455 if not st:
464 if not st:
456 return
465 return
457
466
458 if util.safehasattr(parsers, b'dict_new_presized'):
467 if util.safehasattr(parsers, b'dict_new_presized'):
459 # Make an estimate of the number of files in the dirstate based on
468 # Make an estimate of the number of files in the dirstate based on
460 # its size. This trades wasting some memory for avoiding costly
469 # its size. This trades wasting some memory for avoiding costly
461 # resizes. Each entry have a prefix of 17 bytes followed by one or
470 # resizes. Each entry have a prefix of 17 bytes followed by one or
462 # two path names. Studies on various large-scale real-world repositories
471 # two path names. Studies on various large-scale real-world repositories
463 # found 54 bytes a reasonable upper limit for the average path names.
472 # found 54 bytes a reasonable upper limit for the average path names.
464 # Copy entries are ignored for the sake of this estimate.
473 # Copy entries are ignored for the sake of this estimate.
465 self._map = parsers.dict_new_presized(len(st) // 71)
474 self._map = parsers.dict_new_presized(len(st) // 71)
466
475
467 # Python's garbage collector triggers a GC each time a certain number
476 # Python's garbage collector triggers a GC each time a certain number
468 # of container objects (the number being defined by
477 # of container objects (the number being defined by
469 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
478 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
470 # for each file in the dirstate. The C version then immediately marks
479 # for each file in the dirstate. The C version then immediately marks
471 # them as not to be tracked by the collector. However, this has no
480 # them as not to be tracked by the collector. However, this has no
472 # effect on when GCs are triggered, only on what objects the GC looks
481 # effect on when GCs are triggered, only on what objects the GC looks
473 # into. This means that O(number of files) GCs are unavoidable.
482 # into. This means that O(number of files) GCs are unavoidable.
474 # Depending on when in the process's lifetime the dirstate is parsed,
483 # Depending on when in the process's lifetime the dirstate is parsed,
475 # this can get very expensive. As a workaround, disable GC while
484 # this can get very expensive. As a workaround, disable GC while
476 # parsing the dirstate.
485 # parsing the dirstate.
477 #
486 #
478 # (we cannot decorate the function directly since it is in a C module)
487 # (we cannot decorate the function directly since it is in a C module)
479 parse_dirstate = util.nogc(parsers.parse_dirstate)
488 parse_dirstate = util.nogc(parsers.parse_dirstate)
480 p = parse_dirstate(self._map, self.copymap, st)
489 p = parse_dirstate(self._map, self.copymap, st)
481 if not self._dirtyparents:
490 if not self._dirtyparents:
482 self.setparents(*p)
491 self.setparents(*p)
483
492
484 # Avoid excess attribute lookups by fast pathing certain checks
493 # Avoid excess attribute lookups by fast pathing certain checks
485 self.__contains__ = self._map.__contains__
494 self.__contains__ = self._map.__contains__
486 self.__getitem__ = self._map.__getitem__
495 self.__getitem__ = self._map.__getitem__
487 self.get = self._map.get
496 self.get = self._map.get
488
497
489 def write(self, _tr, st, now):
498 def write(self, _tr, st, now):
490 st.write(
499 st.write(
491 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
500 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
492 )
501 )
493 st.close()
502 st.close()
494 self._dirtyparents = False
503 self._dirtyparents = False
495 self.nonnormalset, self.otherparentset = self.nonnormalentries()
504 self.nonnormalset, self.otherparentset = self.nonnormalentries()
496
505
497 @propertycache
506 @propertycache
498 def nonnormalset(self):
507 def nonnormalset(self):
499 nonnorm, otherparents = self.nonnormalentries()
508 nonnorm, otherparents = self.nonnormalentries()
500 self.otherparentset = otherparents
509 self.otherparentset = otherparents
501 return nonnorm
510 return nonnorm
502
511
503 @propertycache
512 @propertycache
504 def otherparentset(self):
513 def otherparentset(self):
505 nonnorm, otherparents = self.nonnormalentries()
514 nonnorm, otherparents = self.nonnormalentries()
506 self.nonnormalset = nonnorm
515 self.nonnormalset = nonnorm
507 return otherparents
516 return otherparents
508
517
509 def non_normal_or_other_parent_paths(self):
518 def non_normal_or_other_parent_paths(self):
510 return self.nonnormalset.union(self.otherparentset)
519 return self.nonnormalset.union(self.otherparentset)
511
520
512 @propertycache
521 @propertycache
513 def identity(self):
522 def identity(self):
514 self._map
523 self._map
515 return self.identity
524 return self.identity
516
525
517 @propertycache
526 @propertycache
518 def dirfoldmap(self):
527 def dirfoldmap(self):
519 f = {}
528 f = {}
520 normcase = util.normcase
529 normcase = util.normcase
521 for name in self._dirs:
530 for name in self._dirs:
522 f[normcase(name)] = name
531 f[normcase(name)] = name
523 return f
532 return f
524
533
525
534
526 if rustmod is not None:
535 if rustmod is not None:
527
536
528 class dirstatemap(object):
537 class dirstatemap(object):
529 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
538 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
530 self._use_dirstate_v2 = use_dirstate_v2
539 self._use_dirstate_v2 = use_dirstate_v2
531 self._nodeconstants = nodeconstants
540 self._nodeconstants = nodeconstants
532 self._ui = ui
541 self._ui = ui
533 self._opener = opener
542 self._opener = opener
534 self._root = root
543 self._root = root
535 self._filename = b'dirstate'
544 self._filename = b'dirstate'
536 self._nodelen = 20 # Also update Rust code when changing this!
545 self._nodelen = 20 # Also update Rust code when changing this!
537 self._parents = None
546 self._parents = None
538 self._dirtyparents = False
547 self._dirtyparents = False
539 self._docket = None
548 self._docket = None
540
549
541 # for consistent view between _pl() and _read() invocations
550 # for consistent view between _pl() and _read() invocations
542 self._pendingmode = None
551 self._pendingmode = None
543
552
544 self._use_dirstate_tree = self._ui.configbool(
553 self._use_dirstate_tree = self._ui.configbool(
545 b"experimental",
554 b"experimental",
546 b"dirstate-tree.in-memory",
555 b"dirstate-tree.in-memory",
547 False,
556 False,
548 )
557 )
549
558
550 def addfile(
559 def addfile(
551 self,
560 self,
552 f,
561 f,
553 mode=0,
562 mode=0,
554 size=None,
563 size=None,
555 mtime=None,
564 mtime=None,
556 added=False,
565 added=False,
557 merged=False,
566 merged=False,
558 from_p2=False,
567 from_p2=False,
559 possibly_dirty=False,
568 possibly_dirty=False,
560 ):
569 ):
561 return self._rustmap.addfile(
570 return self._rustmap.addfile(
562 f,
571 f,
563 mode,
572 mode,
564 size,
573 size,
565 mtime,
574 mtime,
566 added,
575 added,
567 merged,
576 merged,
568 from_p2,
577 from_p2,
569 possibly_dirty,
578 possibly_dirty,
570 )
579 )
571
580
572 def reset_state(
581 def reset_state(
573 self,
582 self,
574 filename,
583 filename,
575 wc_tracked,
584 wc_tracked,
576 p1_tracked,
585 p1_tracked,
577 p2_tracked=False,
586 p2_tracked=False,
578 merged=False,
587 merged=False,
579 clean_p1=False,
588 clean_p1=False,
580 clean_p2=False,
589 clean_p2=False,
581 possibly_dirty=False,
590 possibly_dirty=False,
582 parentfiledata=None,
591 parentfiledata=None,
583 ):
592 ):
584 """Set a entry to a given state, disregarding all previous state
593 """Set a entry to a given state, disregarding all previous state
585
594
586 This is to be used by the part of the dirstate API dedicated to
595 This is to be used by the part of the dirstate API dedicated to
587 adjusting the dirstate after a update/merge.
596 adjusting the dirstate after a update/merge.
588
597
589 note: calling this might result to no entry existing at all if the
598 note: calling this might result to no entry existing at all if the
590 dirstate map does not see any point at having one for this file
599 dirstate map does not see any point at having one for this file
591 anymore.
600 anymore.
592 """
601 """
593 if merged and (clean_p1 or clean_p2):
602 if merged and (clean_p1 or clean_p2):
594 msg = (
603 msg = (
595 b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
604 b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
596 )
605 )
597 raise error.ProgrammingError(msg)
606 raise error.ProgrammingError(msg)
598 # copy information are now outdated
607 # copy information are now outdated
599 # (maybe new information should be in directly passed to this function)
608 # (maybe new information should be in directly passed to this function)
600 self.copymap.pop(filename, None)
609 self.copymap.pop(filename, None)
601
610
602 if not (p1_tracked or p2_tracked or wc_tracked):
611 if not (p1_tracked or p2_tracked or wc_tracked):
603 self.dropfile(filename)
612 self.dropfile(filename)
604 elif merged:
613 elif merged:
605 # XXX might be merged and removed ?
614 # XXX might be merged and removed ?
606 entry = self.get(filename)
615 entry = self.get(filename)
607 if entry is not None and entry.tracked:
616 if entry is not None and entry.tracked:
608 # XXX mostly replicate dirstate.other parent. We should get
617 # XXX mostly replicate dirstate.other parent. We should get
609 # the higher layer to pass us more reliable data where `merged`
618 # the higher layer to pass us more reliable data where `merged`
610 # actually mean merged. Dropping the else clause will show
619 # actually mean merged. Dropping the else clause will show
611 # failure in `test-graft.t`
620 # failure in `test-graft.t`
612 self.addfile(filename, merged=True)
621 self.addfile(filename, merged=True)
613 else:
622 else:
614 self.addfile(filename, from_p2=True)
623 self.addfile(filename, from_p2=True)
615 elif not (p1_tracked or p2_tracked) and wc_tracked:
624 elif not (p1_tracked or p2_tracked) and wc_tracked:
616 self.addfile(
625 self.addfile(
617 filename, added=True, possibly_dirty=possibly_dirty
626 filename, added=True, possibly_dirty=possibly_dirty
618 )
627 )
619 elif (p1_tracked or p2_tracked) and not wc_tracked:
628 elif (p1_tracked or p2_tracked) and not wc_tracked:
620 # XXX might be merged and removed ?
629 # XXX might be merged and removed ?
621 self[filename] = DirstateItem.from_v1_data(b'r', 0, 0, 0)
630 self[filename] = DirstateItem.from_v1_data(b'r', 0, 0, 0)
622 self.nonnormalset.add(filename)
631 self.nonnormalset.add(filename)
623 elif clean_p2 and wc_tracked:
632 elif clean_p2 and wc_tracked:
624 if p1_tracked or self.get(filename) is not None:
633 if p1_tracked or self.get(filename) is not None:
625 # XXX the `self.get` call is catching some case in
634 # XXX the `self.get` call is catching some case in
626 # `test-merge-remove.t` where the file is tracked in p1, the
635 # `test-merge-remove.t` where the file is tracked in p1, the
627 # p1_tracked argument is False.
636 # p1_tracked argument is False.
628 #
637 #
629 # In addition, this seems to be a case where the file is marked
638 # In addition, this seems to be a case where the file is marked
630 # as merged without actually being the result of a merge
639 # as merged without actually being the result of a merge
631 # action. So thing are not ideal here.
640 # action. So thing are not ideal here.
632 self.addfile(filename, merged=True)
641 self.addfile(filename, merged=True)
633 else:
642 else:
634 self.addfile(filename, from_p2=True)
643 self.addfile(filename, from_p2=True)
635 elif not p1_tracked and p2_tracked and wc_tracked:
644 elif not p1_tracked and p2_tracked and wc_tracked:
636 self.addfile(
645 self.addfile(
637 filename, from_p2=True, possibly_dirty=possibly_dirty
646 filename, from_p2=True, possibly_dirty=possibly_dirty
638 )
647 )
639 elif possibly_dirty:
648 elif possibly_dirty:
640 self.addfile(filename, possibly_dirty=possibly_dirty)
649 self.addfile(filename, possibly_dirty=possibly_dirty)
641 elif wc_tracked:
650 elif wc_tracked:
642 # this is a "normal" file
651 # this is a "normal" file
643 if parentfiledata is None:
652 if parentfiledata is None:
644 msg = b'failed to pass parentfiledata for a normal file: %s'
653 msg = b'failed to pass parentfiledata for a normal file: %s'
645 msg %= filename
654 msg %= filename
646 raise error.ProgrammingError(msg)
655 raise error.ProgrammingError(msg)
647 mode, size, mtime = parentfiledata
656 mode, size, mtime = parentfiledata
648 self.addfile(filename, mode=mode, size=size, mtime=mtime)
657 self.addfile(filename, mode=mode, size=size, mtime=mtime)
649 self.nonnormalset.discard(filename)
658 self.nonnormalset.discard(filename)
650 else:
659 else:
651 assert False, 'unreachable'
660 assert False, 'unreachable'
652
661
653 def set_untracked(self, f):
662 def set_untracked(self, f):
654 """Mark a file as no longer tracked in the dirstate map"""
663 """Mark a file as no longer tracked in the dirstate map"""
655 # in merge is only trigger more logic, so it "fine" to pass it.
664 # in merge is only trigger more logic, so it "fine" to pass it.
656 #
665 #
657 # the inner rust dirstate map code need to be adjusted once the API
666 # the inner rust dirstate map code need to be adjusted once the API
658 # for dirstate/dirstatemap/DirstateItem is a bit more settled
667 # for dirstate/dirstatemap/DirstateItem is a bit more settled
659 entry = self.get(f)
668 entry = self.get(f)
660 if entry is None:
669 if entry is None:
661 return False
670 return False
662 else:
671 else:
663 if entry.added:
672 if entry.added:
664 self._rustmap.copymap().pop(f, None)
673 self._rustmap.copymap().pop(f, None)
665 self._rustmap.dropfile(f)
674 self._rustmap.dropfile(f)
666 else:
675 else:
667 self._rustmap.removefile(f, in_merge=True)
676 self._rustmap.removefile(f, in_merge=True)
668 return True
677 return True
669
678
670 def removefile(self, *args, **kwargs):
679 def removefile(self, *args, **kwargs):
671 return self._rustmap.removefile(*args, **kwargs)
680 return self._rustmap.removefile(*args, **kwargs)
672
681
673 def dropfile(self, f, *args, **kwargs):
682 def dropfile(self, f, *args, **kwargs):
674 self._rustmap.copymap().pop(f, None)
683 self._rustmap.copymap().pop(f, None)
675 return self._rustmap.dropfile(f, *args, **kwargs)
684 return self._rustmap.dropfile(f, *args, **kwargs)
676
685
677 def clearambiguoustimes(self, *args, **kwargs):
686 def clearambiguoustimes(self, *args, **kwargs):
678 return self._rustmap.clearambiguoustimes(*args, **kwargs)
687 return self._rustmap.clearambiguoustimes(*args, **kwargs)
679
688
680 def nonnormalentries(self):
689 def nonnormalentries(self):
681 return self._rustmap.nonnormalentries()
690 return self._rustmap.nonnormalentries()
682
691
683 def get(self, *args, **kwargs):
692 def get(self, *args, **kwargs):
684 return self._rustmap.get(*args, **kwargs)
693 return self._rustmap.get(*args, **kwargs)
685
694
686 @property
695 @property
687 def copymap(self):
696 def copymap(self):
688 return self._rustmap.copymap()
697 return self._rustmap.copymap()
689
698
690 def directories(self):
699 def directories(self):
691 return self._rustmap.directories()
700 return self._rustmap.directories()
692
701
693 def debug_iter(self):
702 def debug_iter(self):
694 return self._rustmap.debug_iter()
703 return self._rustmap.debug_iter()
695
704
696 def preload(self):
705 def preload(self):
697 self._rustmap
706 self._rustmap
698
707
699 def clear(self):
708 def clear(self):
700 self._rustmap.clear()
709 self._rustmap.clear()
701 self.setparents(
710 self.setparents(
702 self._nodeconstants.nullid, self._nodeconstants.nullid
711 self._nodeconstants.nullid, self._nodeconstants.nullid
703 )
712 )
704 util.clearcachedproperty(self, b"_dirs")
713 util.clearcachedproperty(self, b"_dirs")
705 util.clearcachedproperty(self, b"_alldirs")
714 util.clearcachedproperty(self, b"_alldirs")
706 util.clearcachedproperty(self, b"dirfoldmap")
715 util.clearcachedproperty(self, b"dirfoldmap")
707
716
708 def items(self):
717 def items(self):
709 return self._rustmap.items()
718 return self._rustmap.items()
710
719
711 def keys(self):
720 def keys(self):
712 return iter(self._rustmap)
721 return iter(self._rustmap)
713
722
714 def __contains__(self, key):
723 def __contains__(self, key):
715 return key in self._rustmap
724 return key in self._rustmap
716
725
717 def __getitem__(self, item):
726 def __getitem__(self, item):
718 return self._rustmap[item]
727 return self._rustmap[item]
719
728
720 def __len__(self):
729 def __len__(self):
721 return len(self._rustmap)
730 return len(self._rustmap)
722
731
723 def __iter__(self):
732 def __iter__(self):
724 return iter(self._rustmap)
733 return iter(self._rustmap)
725
734
726 # forward for python2,3 compat
735 # forward for python2,3 compat
727 iteritems = items
736 iteritems = items
728
737
729 def _opendirstatefile(self):
738 def _opendirstatefile(self):
730 fp, mode = txnutil.trypending(
739 fp, mode = txnutil.trypending(
731 self._root, self._opener, self._filename
740 self._root, self._opener, self._filename
732 )
741 )
733 if self._pendingmode is not None and self._pendingmode != mode:
742 if self._pendingmode is not None and self._pendingmode != mode:
734 fp.close()
743 fp.close()
735 raise error.Abort(
744 raise error.Abort(
736 _(b'working directory state may be changed parallelly')
745 _(b'working directory state may be changed parallelly')
737 )
746 )
738 self._pendingmode = mode
747 self._pendingmode = mode
739 return fp
748 return fp
740
749
741 def _readdirstatefile(self, size=-1):
750 def _readdirstatefile(self, size=-1):
742 try:
751 try:
743 with self._opendirstatefile() as fp:
752 with self._opendirstatefile() as fp:
744 return fp.read(size)
753 return fp.read(size)
745 except IOError as err:
754 except IOError as err:
746 if err.errno != errno.ENOENT:
755 if err.errno != errno.ENOENT:
747 raise
756 raise
748 # File doesn't exist, so the current state is empty
757 # File doesn't exist, so the current state is empty
749 return b''
758 return b''
750
759
751 def setparents(self, p1, p2):
760 def setparents(self, p1, p2):
752 self._parents = (p1, p2)
761 self._parents = (p1, p2)
753 self._dirtyparents = True
762 self._dirtyparents = True
754
763
755 def parents(self):
764 def parents(self):
756 if not self._parents:
765 if not self._parents:
757 if self._use_dirstate_v2:
766 if self._use_dirstate_v2:
758 self._parents = self.docket.parents
767 self._parents = self.docket.parents
759 else:
768 else:
760 read_len = self._nodelen * 2
769 read_len = self._nodelen * 2
761 st = self._readdirstatefile(read_len)
770 st = self._readdirstatefile(read_len)
762 l = len(st)
771 l = len(st)
763 if l == read_len:
772 if l == read_len:
764 self._parents = (
773 self._parents = (
765 st[: self._nodelen],
774 st[: self._nodelen],
766 st[self._nodelen : 2 * self._nodelen],
775 st[self._nodelen : 2 * self._nodelen],
767 )
776 )
768 elif l == 0:
777 elif l == 0:
769 self._parents = (
778 self._parents = (
770 self._nodeconstants.nullid,
779 self._nodeconstants.nullid,
771 self._nodeconstants.nullid,
780 self._nodeconstants.nullid,
772 )
781 )
773 else:
782 else:
774 raise error.Abort(
783 raise error.Abort(
775 _(b'working directory state appears damaged!')
784 _(b'working directory state appears damaged!')
776 )
785 )
777
786
778 return self._parents
787 return self._parents
779
788
780 @property
789 @property
781 def docket(self):
790 def docket(self):
782 if not self._docket:
791 if not self._docket:
783 if not self._use_dirstate_v2:
792 if not self._use_dirstate_v2:
784 raise error.ProgrammingError(
793 raise error.ProgrammingError(
785 b'dirstate only has a docket in v2 format'
794 b'dirstate only has a docket in v2 format'
786 )
795 )
787 self._docket = docketmod.DirstateDocket.parse(
796 self._docket = docketmod.DirstateDocket.parse(
788 self._readdirstatefile(), self._nodeconstants
797 self._readdirstatefile(), self._nodeconstants
789 )
798 )
790 return self._docket
799 return self._docket
791
800
792 @propertycache
801 @propertycache
793 def _rustmap(self):
802 def _rustmap(self):
794 """
803 """
795 Fills the Dirstatemap when called.
804 Fills the Dirstatemap when called.
796 """
805 """
797 # ignore HG_PENDING because identity is used only for writing
806 # ignore HG_PENDING because identity is used only for writing
798 self.identity = util.filestat.frompath(
807 self.identity = util.filestat.frompath(
799 self._opener.join(self._filename)
808 self._opener.join(self._filename)
800 )
809 )
801
810
802 if self._use_dirstate_v2:
811 if self._use_dirstate_v2:
803 if self.docket.uuid:
812 if self.docket.uuid:
804 # TODO: use mmap when possible
813 # TODO: use mmap when possible
805 data = self._opener.read(self.docket.data_filename())
814 data = self._opener.read(self.docket.data_filename())
806 else:
815 else:
807 data = b''
816 data = b''
808 self._rustmap = rustmod.DirstateMap.new_v2(
817 self._rustmap = rustmod.DirstateMap.new_v2(
809 data, self.docket.data_size, self.docket.tree_metadata
818 data, self.docket.data_size, self.docket.tree_metadata
810 )
819 )
811 parents = self.docket.parents
820 parents = self.docket.parents
812 else:
821 else:
813 self._rustmap, parents = rustmod.DirstateMap.new_v1(
822 self._rustmap, parents = rustmod.DirstateMap.new_v1(
814 self._use_dirstate_tree, self._readdirstatefile()
823 self._use_dirstate_tree, self._readdirstatefile()
815 )
824 )
816
825
817 if parents and not self._dirtyparents:
826 if parents and not self._dirtyparents:
818 self.setparents(*parents)
827 self.setparents(*parents)
819
828
820 self.__contains__ = self._rustmap.__contains__
829 self.__contains__ = self._rustmap.__contains__
821 self.__getitem__ = self._rustmap.__getitem__
830 self.__getitem__ = self._rustmap.__getitem__
822 self.get = self._rustmap.get
831 self.get = self._rustmap.get
823 return self._rustmap
832 return self._rustmap
824
833
825 def write(self, tr, st, now):
834 def write(self, tr, st, now):
826 if not self._use_dirstate_v2:
835 if not self._use_dirstate_v2:
827 p1, p2 = self.parents()
836 p1, p2 = self.parents()
828 packed = self._rustmap.write_v1(p1, p2, now)
837 packed = self._rustmap.write_v1(p1, p2, now)
829 st.write(packed)
838 st.write(packed)
830 st.close()
839 st.close()
831 self._dirtyparents = False
840 self._dirtyparents = False
832 return
841 return
833
842
834 # We can only append to an existing data file if there is one
843 # We can only append to an existing data file if there is one
835 can_append = self.docket.uuid is not None
844 can_append = self.docket.uuid is not None
836 packed, meta, append = self._rustmap.write_v2(now, can_append)
845 packed, meta, append = self._rustmap.write_v2(now, can_append)
837 if append:
846 if append:
838 docket = self.docket
847 docket = self.docket
839 data_filename = docket.data_filename()
848 data_filename = docket.data_filename()
840 if tr:
849 if tr:
841 tr.add(data_filename, docket.data_size)
850 tr.add(data_filename, docket.data_size)
842 with self._opener(data_filename, b'r+b') as fp:
851 with self._opener(data_filename, b'r+b') as fp:
843 fp.seek(docket.data_size)
852 fp.seek(docket.data_size)
844 assert fp.tell() == docket.data_size
853 assert fp.tell() == docket.data_size
845 written = fp.write(packed)
854 written = fp.write(packed)
846 if written is not None: # py2 may return None
855 if written is not None: # py2 may return None
847 assert written == len(packed), (written, len(packed))
856 assert written == len(packed), (written, len(packed))
848 docket.data_size += len(packed)
857 docket.data_size += len(packed)
849 docket.parents = self.parents()
858 docket.parents = self.parents()
850 docket.tree_metadata = meta
859 docket.tree_metadata = meta
851 st.write(docket.serialize())
860 st.write(docket.serialize())
852 st.close()
861 st.close()
853 else:
862 else:
854 old_docket = self.docket
863 old_docket = self.docket
855 new_docket = docketmod.DirstateDocket.with_new_uuid(
864 new_docket = docketmod.DirstateDocket.with_new_uuid(
856 self.parents(), len(packed), meta
865 self.parents(), len(packed), meta
857 )
866 )
858 data_filename = new_docket.data_filename()
867 data_filename = new_docket.data_filename()
859 if tr:
868 if tr:
860 tr.add(data_filename, 0)
869 tr.add(data_filename, 0)
861 self._opener.write(data_filename, packed)
870 self._opener.write(data_filename, packed)
862 # Write the new docket after the new data file has been
871 # Write the new docket after the new data file has been
863 # written. Because `st` was opened with `atomictemp=True`,
872 # written. Because `st` was opened with `atomictemp=True`,
864 # the actual `.hg/dirstate` file is only affected on close.
873 # the actual `.hg/dirstate` file is only affected on close.
865 st.write(new_docket.serialize())
874 st.write(new_docket.serialize())
866 st.close()
875 st.close()
867 # Remove the old data file after the new docket pointing to
876 # Remove the old data file after the new docket pointing to
868 # the new data file was written.
877 # the new data file was written.
869 if old_docket.uuid:
878 if old_docket.uuid:
870 data_filename = old_docket.data_filename()
879 data_filename = old_docket.data_filename()
871 unlink = lambda _tr=None: self._opener.unlink(data_filename)
880 unlink = lambda _tr=None: self._opener.unlink(data_filename)
872 if tr:
881 if tr:
873 category = b"dirstate-v2-clean-" + old_docket.uuid
882 category = b"dirstate-v2-clean-" + old_docket.uuid
874 tr.addpostclose(category, unlink)
883 tr.addpostclose(category, unlink)
875 else:
884 else:
876 unlink()
885 unlink()
877 self._docket = new_docket
886 self._docket = new_docket
878 # Reload from the newly-written file
887 # Reload from the newly-written file
879 util.clearcachedproperty(self, b"_rustmap")
888 util.clearcachedproperty(self, b"_rustmap")
880 self._dirtyparents = False
889 self._dirtyparents = False
881
890
882 @propertycache
891 @propertycache
883 def filefoldmap(self):
892 def filefoldmap(self):
884 """Returns a dictionary mapping normalized case paths to their
893 """Returns a dictionary mapping normalized case paths to their
885 non-normalized versions.
894 non-normalized versions.
886 """
895 """
887 return self._rustmap.filefoldmapasdict()
896 return self._rustmap.filefoldmapasdict()
888
897
889 def hastrackeddir(self, d):
898 def hastrackeddir(self, d):
890 return self._rustmap.hastrackeddir(d)
899 return self._rustmap.hastrackeddir(d)
891
900
892 def hasdir(self, d):
901 def hasdir(self, d):
893 return self._rustmap.hasdir(d)
902 return self._rustmap.hasdir(d)
894
903
895 @propertycache
904 @propertycache
896 def identity(self):
905 def identity(self):
897 self._rustmap
906 self._rustmap
898 return self.identity
907 return self.identity
899
908
900 @property
909 @property
901 def nonnormalset(self):
910 def nonnormalset(self):
902 nonnorm = self._rustmap.non_normal_entries()
911 nonnorm = self._rustmap.non_normal_entries()
903 return nonnorm
912 return nonnorm
904
913
905 @propertycache
914 @propertycache
906 def otherparentset(self):
915 def otherparentset(self):
907 otherparents = self._rustmap.other_parent_entries()
916 otherparents = self._rustmap.other_parent_entries()
908 return otherparents
917 return otherparents
909
918
910 def non_normal_or_other_parent_paths(self):
919 def non_normal_or_other_parent_paths(self):
911 return self._rustmap.non_normal_or_other_parent_paths()
920 return self._rustmap.non_normal_or_other_parent_paths()
912
921
913 @propertycache
922 @propertycache
914 def dirfoldmap(self):
923 def dirfoldmap(self):
915 f = {}
924 f = {}
916 normcase = util.normcase
925 normcase = util.normcase
917 for name in self._rustmap.tracked_dirs():
926 for name in self._rustmap.tracked_dirs():
918 f[normcase(name)] = name
927 f[normcase(name)] = name
919 return f
928 return f
920
929
921 def set_possibly_dirty(self, filename):
930 def set_possibly_dirty(self, filename):
922 """record that the current state of the file on disk is unknown"""
931 """record that the current state of the file on disk is unknown"""
923 entry = self[filename]
932 entry = self[filename]
924 entry.set_possibly_dirty()
933 entry.set_possibly_dirty()
925 self._rustmap.set_v1(filename, entry)
934 self._rustmap.set_v1(filename, entry)
926
935
936 def set_clean(self, filename, mode, size, mtime):
937 """mark a file as back to a clean state"""
938 entry = self[filename]
939 mtime = mtime & rangemask
940 size = size & rangemask
941 entry.set_clean(mode, size, mtime)
942 self._rustmap.set_v1(filename, entry)
943 self._rustmap.copymap().pop(filename, None)
944
927 def __setitem__(self, key, value):
945 def __setitem__(self, key, value):
928 assert isinstance(value, DirstateItem)
946 assert isinstance(value, DirstateItem)
929 self._rustmap.set_v1(key, value)
947 self._rustmap.set_v1(key, value)
@@ -1,792 +1,810 b''
1 # parsers.py - Python implementation of parsers.c
1 # parsers.py - Python implementation of parsers.c
2 #
2 #
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import struct
10 import struct
11 import zlib
11 import zlib
12
12
13 from ..node import (
13 from ..node import (
14 nullrev,
14 nullrev,
15 sha1nodeconstants,
15 sha1nodeconstants,
16 )
16 )
17 from ..thirdparty import attr
17 from ..thirdparty import attr
18 from .. import (
18 from .. import (
19 error,
19 error,
20 pycompat,
20 pycompat,
21 revlogutils,
21 revlogutils,
22 util,
22 util,
23 )
23 )
24
24
25 from ..revlogutils import nodemap as nodemaputil
25 from ..revlogutils import nodemap as nodemaputil
26 from ..revlogutils import constants as revlog_constants
26 from ..revlogutils import constants as revlog_constants
27
27
28 stringio = pycompat.bytesio
28 stringio = pycompat.bytesio
29
29
30
30
31 _pack = struct.pack
31 _pack = struct.pack
32 _unpack = struct.unpack
32 _unpack = struct.unpack
33 _compress = zlib.compress
33 _compress = zlib.compress
34 _decompress = zlib.decompress
34 _decompress = zlib.decompress
35
35
36
36
37 # a special value used internally for `size` if the file come from the other parent
37 # a special value used internally for `size` if the file come from the other parent
38 FROM_P2 = -2
38 FROM_P2 = -2
39
39
40 # a special value used internally for `size` if the file is modified/merged/added
40 # a special value used internally for `size` if the file is modified/merged/added
41 NONNORMAL = -1
41 NONNORMAL = -1
42
42
43 # a special value used internally for `time` if the time is ambigeous
43 # a special value used internally for `time` if the time is ambigeous
44 AMBIGUOUS_TIME = -1
44 AMBIGUOUS_TIME = -1
45
45
46
46
47 @attr.s(slots=True, init=False)
47 @attr.s(slots=True, init=False)
48 class DirstateItem(object):
48 class DirstateItem(object):
49 """represent a dirstate entry
49 """represent a dirstate entry
50
50
51 It contains:
51 It contains:
52
52
53 - state (one of 'n', 'a', 'r', 'm')
53 - state (one of 'n', 'a', 'r', 'm')
54 - mode,
54 - mode,
55 - size,
55 - size,
56 - mtime,
56 - mtime,
57 """
57 """
58
58
59 _wc_tracked = attr.ib()
59 _wc_tracked = attr.ib()
60 _p1_tracked = attr.ib()
60 _p1_tracked = attr.ib()
61 _p2_tracked = attr.ib()
61 _p2_tracked = attr.ib()
62 # the three item above should probably be combined
62 # the three item above should probably be combined
63 #
63 #
64 # However it is unclear if they properly cover some of the most advanced
64 # However it is unclear if they properly cover some of the most advanced
65 # merge case. So we should probably wait on this to be settled.
65 # merge case. So we should probably wait on this to be settled.
66 _merged = attr.ib()
66 _merged = attr.ib()
67 _clean_p1 = attr.ib()
67 _clean_p1 = attr.ib()
68 _clean_p2 = attr.ib()
68 _clean_p2 = attr.ib()
69 _possibly_dirty = attr.ib()
69 _possibly_dirty = attr.ib()
70 _mode = attr.ib()
70 _mode = attr.ib()
71 _size = attr.ib()
71 _size = attr.ib()
72 _mtime = attr.ib()
72 _mtime = attr.ib()
73
73
74 def __init__(
74 def __init__(
75 self,
75 self,
76 wc_tracked=False,
76 wc_tracked=False,
77 p1_tracked=False,
77 p1_tracked=False,
78 p2_tracked=False,
78 p2_tracked=False,
79 merged=False,
79 merged=False,
80 clean_p1=False,
80 clean_p1=False,
81 clean_p2=False,
81 clean_p2=False,
82 possibly_dirty=False,
82 possibly_dirty=False,
83 parentfiledata=None,
83 parentfiledata=None,
84 ):
84 ):
85 if merged and (clean_p1 or clean_p2):
85 if merged and (clean_p1 or clean_p2):
86 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
86 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
87 raise error.ProgrammingError(msg)
87 raise error.ProgrammingError(msg)
88
88
89 self._wc_tracked = wc_tracked
89 self._wc_tracked = wc_tracked
90 self._p1_tracked = p1_tracked
90 self._p1_tracked = p1_tracked
91 self._p2_tracked = p2_tracked
91 self._p2_tracked = p2_tracked
92 self._merged = merged
92 self._merged = merged
93 self._clean_p1 = clean_p1
93 self._clean_p1 = clean_p1
94 self._clean_p2 = clean_p2
94 self._clean_p2 = clean_p2
95 self._possibly_dirty = possibly_dirty
95 self._possibly_dirty = possibly_dirty
96 if parentfiledata is None:
96 if parentfiledata is None:
97 self._mode = None
97 self._mode = None
98 self._size = None
98 self._size = None
99 self._mtime = None
99 self._mtime = None
100 else:
100 else:
101 self._mode = parentfiledata[0]
101 self._mode = parentfiledata[0]
102 self._size = parentfiledata[1]
102 self._size = parentfiledata[1]
103 self._mtime = parentfiledata[2]
103 self._mtime = parentfiledata[2]
104
104
105 @classmethod
105 @classmethod
106 def new_added(cls):
106 def new_added(cls):
107 """constructor to help legacy API to build a new "added" item
107 """constructor to help legacy API to build a new "added" item
108
108
109 Should eventually be removed
109 Should eventually be removed
110 """
110 """
111 instance = cls()
111 instance = cls()
112 instance._wc_tracked = True
112 instance._wc_tracked = True
113 instance._p1_tracked = False
113 instance._p1_tracked = False
114 instance._p2_tracked = False
114 instance._p2_tracked = False
115 return instance
115 return instance
116
116
117 @classmethod
117 @classmethod
118 def new_merged(cls):
118 def new_merged(cls):
119 """constructor to help legacy API to build a new "merged" item
119 """constructor to help legacy API to build a new "merged" item
120
120
121 Should eventually be removed
121 Should eventually be removed
122 """
122 """
123 instance = cls()
123 instance = cls()
124 instance._wc_tracked = True
124 instance._wc_tracked = True
125 instance._p1_tracked = True # might not be True because of rename ?
125 instance._p1_tracked = True # might not be True because of rename ?
126 instance._p2_tracked = True # might not be True because of rename ?
126 instance._p2_tracked = True # might not be True because of rename ?
127 instance._merged = True
127 instance._merged = True
128 return instance
128 return instance
129
129
130 @classmethod
130 @classmethod
131 def new_from_p2(cls):
131 def new_from_p2(cls):
132 """constructor to help legacy API to build a new "from_p2" item
132 """constructor to help legacy API to build a new "from_p2" item
133
133
134 Should eventually be removed
134 Should eventually be removed
135 """
135 """
136 instance = cls()
136 instance = cls()
137 instance._wc_tracked = True
137 instance._wc_tracked = True
138 instance._p1_tracked = False # might actually be True
138 instance._p1_tracked = False # might actually be True
139 instance._p2_tracked = True
139 instance._p2_tracked = True
140 instance._clean_p2 = True
140 instance._clean_p2 = True
141 return instance
141 return instance
142
142
143 @classmethod
143 @classmethod
144 def new_possibly_dirty(cls):
144 def new_possibly_dirty(cls):
145 """constructor to help legacy API to build a new "possibly_dirty" item
145 """constructor to help legacy API to build a new "possibly_dirty" item
146
146
147 Should eventually be removed
147 Should eventually be removed
148 """
148 """
149 instance = cls()
149 instance = cls()
150 instance._wc_tracked = True
150 instance._wc_tracked = True
151 instance._p1_tracked = True
151 instance._p1_tracked = True
152 instance._possibly_dirty = True
152 instance._possibly_dirty = True
153 return instance
153 return instance
154
154
155 @classmethod
155 @classmethod
156 def new_normal(cls, mode, size, mtime):
156 def new_normal(cls, mode, size, mtime):
157 """constructor to help legacy API to build a new "normal" item
157 """constructor to help legacy API to build a new "normal" item
158
158
159 Should eventually be removed
159 Should eventually be removed
160 """
160 """
161 assert size != FROM_P2
161 assert size != FROM_P2
162 assert size != NONNORMAL
162 assert size != NONNORMAL
163 instance = cls()
163 instance = cls()
164 instance._wc_tracked = True
164 instance._wc_tracked = True
165 instance._p1_tracked = True
165 instance._p1_tracked = True
166 instance._mode = mode
166 instance._mode = mode
167 instance._size = size
167 instance._size = size
168 instance._mtime = mtime
168 instance._mtime = mtime
169 return instance
169 return instance
170
170
171 @classmethod
171 @classmethod
172 def from_v1_data(cls, state, mode, size, mtime):
172 def from_v1_data(cls, state, mode, size, mtime):
173 """Build a new DirstateItem object from V1 data
173 """Build a new DirstateItem object from V1 data
174
174
175 Since the dirstate-v1 format is frozen, the signature of this function
175 Since the dirstate-v1 format is frozen, the signature of this function
176 is not expected to change, unlike the __init__ one.
176 is not expected to change, unlike the __init__ one.
177 """
177 """
178 if state == b'm':
178 if state == b'm':
179 return cls.new_merged()
179 return cls.new_merged()
180 elif state == b'a':
180 elif state == b'a':
181 return cls.new_added()
181 return cls.new_added()
182 elif state == b'r':
182 elif state == b'r':
183 instance = cls()
183 instance = cls()
184 instance._wc_tracked = False
184 instance._wc_tracked = False
185 if size == NONNORMAL:
185 if size == NONNORMAL:
186 instance._merged = True
186 instance._merged = True
187 instance._p1_tracked = (
187 instance._p1_tracked = (
188 True # might not be True because of rename ?
188 True # might not be True because of rename ?
189 )
189 )
190 instance._p2_tracked = (
190 instance._p2_tracked = (
191 True # might not be True because of rename ?
191 True # might not be True because of rename ?
192 )
192 )
193 elif size == FROM_P2:
193 elif size == FROM_P2:
194 instance._clean_p2 = True
194 instance._clean_p2 = True
195 instance._p1_tracked = (
195 instance._p1_tracked = (
196 False # We actually don't know (file history)
196 False # We actually don't know (file history)
197 )
197 )
198 instance._p2_tracked = True
198 instance._p2_tracked = True
199 else:
199 else:
200 instance._p1_tracked = True
200 instance._p1_tracked = True
201 return instance
201 return instance
202 elif state == b'n':
202 elif state == b'n':
203 if size == FROM_P2:
203 if size == FROM_P2:
204 return cls.new_from_p2()
204 return cls.new_from_p2()
205 elif size == NONNORMAL:
205 elif size == NONNORMAL:
206 return cls.new_possibly_dirty()
206 return cls.new_possibly_dirty()
207 elif mtime == AMBIGUOUS_TIME:
207 elif mtime == AMBIGUOUS_TIME:
208 instance = cls.new_normal(mode, size, 42)
208 instance = cls.new_normal(mode, size, 42)
209 instance._mtime = None
209 instance._mtime = None
210 instance._possibly_dirty = True
210 instance._possibly_dirty = True
211 return instance
211 return instance
212 else:
212 else:
213 return cls.new_normal(mode, size, mtime)
213 return cls.new_normal(mode, size, mtime)
214 else:
214 else:
215 raise RuntimeError(b'unknown state: %s' % state)
215 raise RuntimeError(b'unknown state: %s' % state)
216
216
217 def set_possibly_dirty(self):
217 def set_possibly_dirty(self):
218 """Mark a file as "possibly dirty"
218 """Mark a file as "possibly dirty"
219
219
220 This means the next status call will have to actually check its content
220 This means the next status call will have to actually check its content
221 to make sure it is correct.
221 to make sure it is correct.
222 """
222 """
223 self._possibly_dirty = True
223 self._possibly_dirty = True
224
224
225 def set_clean(self, mode, size, mtime):
226 """mark a file as "clean" cancelling potential "possibly dirty call"
227
228 Note: this function is a descendant of `dirstate.normal` and is
229 currently expected to be call on "normal" entry only. There are not
230 reason for this to not change in the future as long as the ccode is
231 updated to preserve the proper state of the non-normal files.
232 """
233 self._wc_tracked = True
234 self._p1_tracked = True
235 self._p2_tracked = False # this might be wrong
236 self._merged = False
237 self._clean_p2 = False
238 self._possibly_dirty = False
239 self._mode = mode
240 self._size = size
241 self._mtime = mtime
242
225 def set_untracked(self):
243 def set_untracked(self):
226 """mark a file as untracked in the working copy
244 """mark a file as untracked in the working copy
227
245
228 This will ultimately be called by command like `hg remove`.
246 This will ultimately be called by command like `hg remove`.
229 """
247 """
230 # backup the previous state (useful for merge)
248 # backup the previous state (useful for merge)
231 self._wc_tracked = False
249 self._wc_tracked = False
232 self._mode = None
250 self._mode = None
233 self._size = None
251 self._size = None
234 self._mtime = None
252 self._mtime = None
235
253
236 @property
254 @property
237 def mode(self):
255 def mode(self):
238 return self.v1_mode()
256 return self.v1_mode()
239
257
240 @property
258 @property
241 def size(self):
259 def size(self):
242 return self.v1_size()
260 return self.v1_size()
243
261
244 @property
262 @property
245 def mtime(self):
263 def mtime(self):
246 return self.v1_mtime()
264 return self.v1_mtime()
247
265
248 @property
266 @property
249 def state(self):
267 def state(self):
250 """
268 """
251 States are:
269 States are:
252 n normal
270 n normal
253 m needs merging
271 m needs merging
254 r marked for removal
272 r marked for removal
255 a marked for addition
273 a marked for addition
256
274
257 XXX This "state" is a bit obscure and mostly a direct expression of the
275 XXX This "state" is a bit obscure and mostly a direct expression of the
258 dirstatev1 format. It would make sense to ultimately deprecate it in
276 dirstatev1 format. It would make sense to ultimately deprecate it in
259 favor of the more "semantic" attributes.
277 favor of the more "semantic" attributes.
260 """
278 """
261 return self.v1_state()
279 return self.v1_state()
262
280
263 @property
281 @property
264 def tracked(self):
282 def tracked(self):
265 """True is the file is tracked in the working copy"""
283 """True is the file is tracked in the working copy"""
266 return self._wc_tracked
284 return self._wc_tracked
267
285
268 @property
286 @property
269 def added(self):
287 def added(self):
270 """True if the file has been added"""
288 """True if the file has been added"""
271 return self._wc_tracked and not (self._p1_tracked or self._p2_tracked)
289 return self._wc_tracked and not (self._p1_tracked or self._p2_tracked)
272
290
273 @property
291 @property
274 def merged(self):
292 def merged(self):
275 """True if the file has been merged
293 """True if the file has been merged
276
294
277 Should only be set if a merge is in progress in the dirstate
295 Should only be set if a merge is in progress in the dirstate
278 """
296 """
279 return self._wc_tracked and self._merged
297 return self._wc_tracked and self._merged
280
298
281 @property
299 @property
282 def from_p2(self):
300 def from_p2(self):
283 """True if the file have been fetched from p2 during the current merge
301 """True if the file have been fetched from p2 during the current merge
284
302
285 This is only True is the file is currently tracked.
303 This is only True is the file is currently tracked.
286
304
287 Should only be set if a merge is in progress in the dirstate
305 Should only be set if a merge is in progress in the dirstate
288 """
306 """
289 if not self._wc_tracked:
307 if not self._wc_tracked:
290 return False
308 return False
291 return self._clean_p2 or (not self._p1_tracked and self._p2_tracked)
309 return self._clean_p2 or (not self._p1_tracked and self._p2_tracked)
292
310
293 @property
311 @property
294 def from_p2_removed(self):
312 def from_p2_removed(self):
295 """True if the file has been removed, but was "from_p2" initially
313 """True if the file has been removed, but was "from_p2" initially
296
314
297 This property seems like an abstraction leakage and should probably be
315 This property seems like an abstraction leakage and should probably be
298 dealt in this class (or maybe the dirstatemap) directly.
316 dealt in this class (or maybe the dirstatemap) directly.
299 """
317 """
300 return self.removed and self._clean_p2
318 return self.removed and self._clean_p2
301
319
302 @property
320 @property
303 def removed(self):
321 def removed(self):
304 """True if the file has been removed"""
322 """True if the file has been removed"""
305 return not self._wc_tracked and (self._p1_tracked or self._p2_tracked)
323 return not self._wc_tracked and (self._p1_tracked or self._p2_tracked)
306
324
307 @property
325 @property
308 def merged_removed(self):
326 def merged_removed(self):
309 """True if the file has been removed, but was "merged" initially
327 """True if the file has been removed, but was "merged" initially
310
328
311 This property seems like an abstraction leakage and should probably be
329 This property seems like an abstraction leakage and should probably be
312 dealt in this class (or maybe the dirstatemap) directly.
330 dealt in this class (or maybe the dirstatemap) directly.
313 """
331 """
314 return self.removed and self._merged
332 return self.removed and self._merged
315
333
316 @property
334 @property
317 def dm_nonnormal(self):
335 def dm_nonnormal(self):
318 """True is the entry is non-normal in the dirstatemap sense
336 """True is the entry is non-normal in the dirstatemap sense
319
337
320 There is no reason for any code, but the dirstatemap one to use this.
338 There is no reason for any code, but the dirstatemap one to use this.
321 """
339 """
322 return self.v1_state() != b'n' or self.v1_mtime() == AMBIGUOUS_TIME
340 return self.v1_state() != b'n' or self.v1_mtime() == AMBIGUOUS_TIME
323
341
324 @property
342 @property
325 def dm_otherparent(self):
343 def dm_otherparent(self):
326 """True is the entry is `otherparent` in the dirstatemap sense
344 """True is the entry is `otherparent` in the dirstatemap sense
327
345
328 There is no reason for any code, but the dirstatemap one to use this.
346 There is no reason for any code, but the dirstatemap one to use this.
329 """
347 """
330 return self.v1_size() == FROM_P2
348 return self.v1_size() == FROM_P2
331
349
332 def v1_state(self):
350 def v1_state(self):
333 """return a "state" suitable for v1 serialization"""
351 """return a "state" suitable for v1 serialization"""
334 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
352 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
335 # the object has no state to record, this is -currently-
353 # the object has no state to record, this is -currently-
336 # unsupported
354 # unsupported
337 raise RuntimeError('untracked item')
355 raise RuntimeError('untracked item')
338 elif self.removed:
356 elif self.removed:
339 return b'r'
357 return b'r'
340 elif self.merged:
358 elif self.merged:
341 return b'm'
359 return b'm'
342 elif self.added:
360 elif self.added:
343 return b'a'
361 return b'a'
344 else:
362 else:
345 return b'n'
363 return b'n'
346
364
347 def v1_mode(self):
365 def v1_mode(self):
348 """return a "mode" suitable for v1 serialization"""
366 """return a "mode" suitable for v1 serialization"""
349 return self._mode if self._mode is not None else 0
367 return self._mode if self._mode is not None else 0
350
368
351 def v1_size(self):
369 def v1_size(self):
352 """return a "size" suitable for v1 serialization"""
370 """return a "size" suitable for v1 serialization"""
353 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
371 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
354 # the object has no state to record, this is -currently-
372 # the object has no state to record, this is -currently-
355 # unsupported
373 # unsupported
356 raise RuntimeError('untracked item')
374 raise RuntimeError('untracked item')
357 elif self.merged_removed:
375 elif self.merged_removed:
358 return NONNORMAL
376 return NONNORMAL
359 elif self.from_p2_removed:
377 elif self.from_p2_removed:
360 return FROM_P2
378 return FROM_P2
361 elif self.removed:
379 elif self.removed:
362 return 0
380 return 0
363 elif self.merged:
381 elif self.merged:
364 return FROM_P2
382 return FROM_P2
365 elif self.added:
383 elif self.added:
366 return NONNORMAL
384 return NONNORMAL
367 elif self.from_p2:
385 elif self.from_p2:
368 return FROM_P2
386 return FROM_P2
369 elif self._possibly_dirty:
387 elif self._possibly_dirty:
370 return self._size if self._size is not None else NONNORMAL
388 return self._size if self._size is not None else NONNORMAL
371 else:
389 else:
372 return self._size
390 return self._size
373
391
374 def v1_mtime(self):
392 def v1_mtime(self):
375 """return a "mtime" suitable for v1 serialization"""
393 """return a "mtime" suitable for v1 serialization"""
376 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
394 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
377 # the object has no state to record, this is -currently-
395 # the object has no state to record, this is -currently-
378 # unsupported
396 # unsupported
379 raise RuntimeError('untracked item')
397 raise RuntimeError('untracked item')
380 elif self.removed:
398 elif self.removed:
381 return 0
399 return 0
382 elif self._possibly_dirty:
400 elif self._possibly_dirty:
383 return AMBIGUOUS_TIME
401 return AMBIGUOUS_TIME
384 elif self.merged:
402 elif self.merged:
385 return AMBIGUOUS_TIME
403 return AMBIGUOUS_TIME
386 elif self.added:
404 elif self.added:
387 return AMBIGUOUS_TIME
405 return AMBIGUOUS_TIME
388 elif self.from_p2:
406 elif self.from_p2:
389 return AMBIGUOUS_TIME
407 return AMBIGUOUS_TIME
390 else:
408 else:
391 return self._mtime if self._mtime is not None else 0
409 return self._mtime if self._mtime is not None else 0
392
410
393 def need_delay(self, now):
411 def need_delay(self, now):
394 """True if the stored mtime would be ambiguous with the current time"""
412 """True if the stored mtime would be ambiguous with the current time"""
395 return self.v1_state() == b'n' and self.v1_mtime() == now
413 return self.v1_state() == b'n' and self.v1_mtime() == now
396
414
397
415
398 def gettype(q):
416 def gettype(q):
399 return int(q & 0xFFFF)
417 return int(q & 0xFFFF)
400
418
401
419
402 class BaseIndexObject(object):
420 class BaseIndexObject(object):
403 # Can I be passed to an algorithme implemented in Rust ?
421 # Can I be passed to an algorithme implemented in Rust ?
404 rust_ext_compat = 0
422 rust_ext_compat = 0
405 # Format of an index entry according to Python's `struct` language
423 # Format of an index entry according to Python's `struct` language
406 index_format = revlog_constants.INDEX_ENTRY_V1
424 index_format = revlog_constants.INDEX_ENTRY_V1
407 # Size of a C unsigned long long int, platform independent
425 # Size of a C unsigned long long int, platform independent
408 big_int_size = struct.calcsize(b'>Q')
426 big_int_size = struct.calcsize(b'>Q')
409 # Size of a C long int, platform independent
427 # Size of a C long int, platform independent
410 int_size = struct.calcsize(b'>i')
428 int_size = struct.calcsize(b'>i')
411 # An empty index entry, used as a default value to be overridden, or nullrev
429 # An empty index entry, used as a default value to be overridden, or nullrev
412 null_item = (
430 null_item = (
413 0,
431 0,
414 0,
432 0,
415 0,
433 0,
416 -1,
434 -1,
417 -1,
435 -1,
418 -1,
436 -1,
419 -1,
437 -1,
420 sha1nodeconstants.nullid,
438 sha1nodeconstants.nullid,
421 0,
439 0,
422 0,
440 0,
423 revlog_constants.COMP_MODE_INLINE,
441 revlog_constants.COMP_MODE_INLINE,
424 revlog_constants.COMP_MODE_INLINE,
442 revlog_constants.COMP_MODE_INLINE,
425 )
443 )
426
444
427 @util.propertycache
445 @util.propertycache
428 def entry_size(self):
446 def entry_size(self):
429 return self.index_format.size
447 return self.index_format.size
430
448
431 @property
449 @property
432 def nodemap(self):
450 def nodemap(self):
433 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
451 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
434 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
452 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
435 return self._nodemap
453 return self._nodemap
436
454
437 @util.propertycache
455 @util.propertycache
438 def _nodemap(self):
456 def _nodemap(self):
439 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
457 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
440 for r in range(0, len(self)):
458 for r in range(0, len(self)):
441 n = self[r][7]
459 n = self[r][7]
442 nodemap[n] = r
460 nodemap[n] = r
443 return nodemap
461 return nodemap
444
462
445 def has_node(self, node):
463 def has_node(self, node):
446 """return True if the node exist in the index"""
464 """return True if the node exist in the index"""
447 return node in self._nodemap
465 return node in self._nodemap
448
466
449 def rev(self, node):
467 def rev(self, node):
450 """return a revision for a node
468 """return a revision for a node
451
469
452 If the node is unknown, raise a RevlogError"""
470 If the node is unknown, raise a RevlogError"""
453 return self._nodemap[node]
471 return self._nodemap[node]
454
472
455 def get_rev(self, node):
473 def get_rev(self, node):
456 """return a revision for a node
474 """return a revision for a node
457
475
458 If the node is unknown, return None"""
476 If the node is unknown, return None"""
459 return self._nodemap.get(node)
477 return self._nodemap.get(node)
460
478
461 def _stripnodes(self, start):
479 def _stripnodes(self, start):
462 if '_nodemap' in vars(self):
480 if '_nodemap' in vars(self):
463 for r in range(start, len(self)):
481 for r in range(start, len(self)):
464 n = self[r][7]
482 n = self[r][7]
465 del self._nodemap[n]
483 del self._nodemap[n]
466
484
467 def clearcaches(self):
485 def clearcaches(self):
468 self.__dict__.pop('_nodemap', None)
486 self.__dict__.pop('_nodemap', None)
469
487
470 def __len__(self):
488 def __len__(self):
471 return self._lgt + len(self._extra)
489 return self._lgt + len(self._extra)
472
490
473 def append(self, tup):
491 def append(self, tup):
474 if '_nodemap' in vars(self):
492 if '_nodemap' in vars(self):
475 self._nodemap[tup[7]] = len(self)
493 self._nodemap[tup[7]] = len(self)
476 data = self._pack_entry(len(self), tup)
494 data = self._pack_entry(len(self), tup)
477 self._extra.append(data)
495 self._extra.append(data)
478
496
479 def _pack_entry(self, rev, entry):
497 def _pack_entry(self, rev, entry):
480 assert entry[8] == 0
498 assert entry[8] == 0
481 assert entry[9] == 0
499 assert entry[9] == 0
482 return self.index_format.pack(*entry[:8])
500 return self.index_format.pack(*entry[:8])
483
501
484 def _check_index(self, i):
502 def _check_index(self, i):
485 if not isinstance(i, int):
503 if not isinstance(i, int):
486 raise TypeError(b"expecting int indexes")
504 raise TypeError(b"expecting int indexes")
487 if i < 0 or i >= len(self):
505 if i < 0 or i >= len(self):
488 raise IndexError
506 raise IndexError
489
507
490 def __getitem__(self, i):
508 def __getitem__(self, i):
491 if i == -1:
509 if i == -1:
492 return self.null_item
510 return self.null_item
493 self._check_index(i)
511 self._check_index(i)
494 if i >= self._lgt:
512 if i >= self._lgt:
495 data = self._extra[i - self._lgt]
513 data = self._extra[i - self._lgt]
496 else:
514 else:
497 index = self._calculate_index(i)
515 index = self._calculate_index(i)
498 data = self._data[index : index + self.entry_size]
516 data = self._data[index : index + self.entry_size]
499 r = self._unpack_entry(i, data)
517 r = self._unpack_entry(i, data)
500 if self._lgt and i == 0:
518 if self._lgt and i == 0:
501 offset = revlogutils.offset_type(0, gettype(r[0]))
519 offset = revlogutils.offset_type(0, gettype(r[0]))
502 r = (offset,) + r[1:]
520 r = (offset,) + r[1:]
503 return r
521 return r
504
522
505 def _unpack_entry(self, rev, data):
523 def _unpack_entry(self, rev, data):
506 r = self.index_format.unpack(data)
524 r = self.index_format.unpack(data)
507 r = r + (
525 r = r + (
508 0,
526 0,
509 0,
527 0,
510 revlog_constants.COMP_MODE_INLINE,
528 revlog_constants.COMP_MODE_INLINE,
511 revlog_constants.COMP_MODE_INLINE,
529 revlog_constants.COMP_MODE_INLINE,
512 )
530 )
513 return r
531 return r
514
532
515 def pack_header(self, header):
533 def pack_header(self, header):
516 """pack header information as binary"""
534 """pack header information as binary"""
517 v_fmt = revlog_constants.INDEX_HEADER
535 v_fmt = revlog_constants.INDEX_HEADER
518 return v_fmt.pack(header)
536 return v_fmt.pack(header)
519
537
520 def entry_binary(self, rev):
538 def entry_binary(self, rev):
521 """return the raw binary string representing a revision"""
539 """return the raw binary string representing a revision"""
522 entry = self[rev]
540 entry = self[rev]
523 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
541 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
524 if rev == 0:
542 if rev == 0:
525 p = p[revlog_constants.INDEX_HEADER.size :]
543 p = p[revlog_constants.INDEX_HEADER.size :]
526 return p
544 return p
527
545
528
546
529 class IndexObject(BaseIndexObject):
547 class IndexObject(BaseIndexObject):
530 def __init__(self, data):
548 def __init__(self, data):
531 assert len(data) % self.entry_size == 0, (
549 assert len(data) % self.entry_size == 0, (
532 len(data),
550 len(data),
533 self.entry_size,
551 self.entry_size,
534 len(data) % self.entry_size,
552 len(data) % self.entry_size,
535 )
553 )
536 self._data = data
554 self._data = data
537 self._lgt = len(data) // self.entry_size
555 self._lgt = len(data) // self.entry_size
538 self._extra = []
556 self._extra = []
539
557
540 def _calculate_index(self, i):
558 def _calculate_index(self, i):
541 return i * self.entry_size
559 return i * self.entry_size
542
560
543 def __delitem__(self, i):
561 def __delitem__(self, i):
544 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
562 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
545 raise ValueError(b"deleting slices only supports a:-1 with step 1")
563 raise ValueError(b"deleting slices only supports a:-1 with step 1")
546 i = i.start
564 i = i.start
547 self._check_index(i)
565 self._check_index(i)
548 self._stripnodes(i)
566 self._stripnodes(i)
549 if i < self._lgt:
567 if i < self._lgt:
550 self._data = self._data[: i * self.entry_size]
568 self._data = self._data[: i * self.entry_size]
551 self._lgt = i
569 self._lgt = i
552 self._extra = []
570 self._extra = []
553 else:
571 else:
554 self._extra = self._extra[: i - self._lgt]
572 self._extra = self._extra[: i - self._lgt]
555
573
556
574
557 class PersistentNodeMapIndexObject(IndexObject):
575 class PersistentNodeMapIndexObject(IndexObject):
558 """a Debug oriented class to test persistent nodemap
576 """a Debug oriented class to test persistent nodemap
559
577
560 We need a simple python object to test API and higher level behavior. See
578 We need a simple python object to test API and higher level behavior. See
561 the Rust implementation for more serious usage. This should be used only
579 the Rust implementation for more serious usage. This should be used only
562 through the dedicated `devel.persistent-nodemap` config.
580 through the dedicated `devel.persistent-nodemap` config.
563 """
581 """
564
582
565 def nodemap_data_all(self):
583 def nodemap_data_all(self):
566 """Return bytes containing a full serialization of a nodemap
584 """Return bytes containing a full serialization of a nodemap
567
585
568 The nodemap should be valid for the full set of revisions in the
586 The nodemap should be valid for the full set of revisions in the
569 index."""
587 index."""
570 return nodemaputil.persistent_data(self)
588 return nodemaputil.persistent_data(self)
571
589
572 def nodemap_data_incremental(self):
590 def nodemap_data_incremental(self):
573 """Return bytes containing a incremental update to persistent nodemap
591 """Return bytes containing a incremental update to persistent nodemap
574
592
575 This containst the data for an append-only update of the data provided
593 This containst the data for an append-only update of the data provided
576 in the last call to `update_nodemap_data`.
594 in the last call to `update_nodemap_data`.
577 """
595 """
578 if self._nm_root is None:
596 if self._nm_root is None:
579 return None
597 return None
580 docket = self._nm_docket
598 docket = self._nm_docket
581 changed, data = nodemaputil.update_persistent_data(
599 changed, data = nodemaputil.update_persistent_data(
582 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
600 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
583 )
601 )
584
602
585 self._nm_root = self._nm_max_idx = self._nm_docket = None
603 self._nm_root = self._nm_max_idx = self._nm_docket = None
586 return docket, changed, data
604 return docket, changed, data
587
605
588 def update_nodemap_data(self, docket, nm_data):
606 def update_nodemap_data(self, docket, nm_data):
589 """provide full block of persisted binary data for a nodemap
607 """provide full block of persisted binary data for a nodemap
590
608
591 The data are expected to come from disk. See `nodemap_data_all` for a
609 The data are expected to come from disk. See `nodemap_data_all` for a
592 produceur of such data."""
610 produceur of such data."""
593 if nm_data is not None:
611 if nm_data is not None:
594 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
612 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
595 if self._nm_root:
613 if self._nm_root:
596 self._nm_docket = docket
614 self._nm_docket = docket
597 else:
615 else:
598 self._nm_root = self._nm_max_idx = self._nm_docket = None
616 self._nm_root = self._nm_max_idx = self._nm_docket = None
599
617
600
618
601 class InlinedIndexObject(BaseIndexObject):
619 class InlinedIndexObject(BaseIndexObject):
602 def __init__(self, data, inline=0):
620 def __init__(self, data, inline=0):
603 self._data = data
621 self._data = data
604 self._lgt = self._inline_scan(None)
622 self._lgt = self._inline_scan(None)
605 self._inline_scan(self._lgt)
623 self._inline_scan(self._lgt)
606 self._extra = []
624 self._extra = []
607
625
608 def _inline_scan(self, lgt):
626 def _inline_scan(self, lgt):
609 off = 0
627 off = 0
610 if lgt is not None:
628 if lgt is not None:
611 self._offsets = [0] * lgt
629 self._offsets = [0] * lgt
612 count = 0
630 count = 0
613 while off <= len(self._data) - self.entry_size:
631 while off <= len(self._data) - self.entry_size:
614 start = off + self.big_int_size
632 start = off + self.big_int_size
615 (s,) = struct.unpack(
633 (s,) = struct.unpack(
616 b'>i',
634 b'>i',
617 self._data[start : start + self.int_size],
635 self._data[start : start + self.int_size],
618 )
636 )
619 if lgt is not None:
637 if lgt is not None:
620 self._offsets[count] = off
638 self._offsets[count] = off
621 count += 1
639 count += 1
622 off += self.entry_size + s
640 off += self.entry_size + s
623 if off != len(self._data):
641 if off != len(self._data):
624 raise ValueError(b"corrupted data")
642 raise ValueError(b"corrupted data")
625 return count
643 return count
626
644
627 def __delitem__(self, i):
645 def __delitem__(self, i):
628 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
646 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
629 raise ValueError(b"deleting slices only supports a:-1 with step 1")
647 raise ValueError(b"deleting slices only supports a:-1 with step 1")
630 i = i.start
648 i = i.start
631 self._check_index(i)
649 self._check_index(i)
632 self._stripnodes(i)
650 self._stripnodes(i)
633 if i < self._lgt:
651 if i < self._lgt:
634 self._offsets = self._offsets[:i]
652 self._offsets = self._offsets[:i]
635 self._lgt = i
653 self._lgt = i
636 self._extra = []
654 self._extra = []
637 else:
655 else:
638 self._extra = self._extra[: i - self._lgt]
656 self._extra = self._extra[: i - self._lgt]
639
657
640 def _calculate_index(self, i):
658 def _calculate_index(self, i):
641 return self._offsets[i]
659 return self._offsets[i]
642
660
643
661
644 def parse_index2(data, inline, revlogv2=False):
662 def parse_index2(data, inline, revlogv2=False):
645 if not inline:
663 if not inline:
646 cls = IndexObject2 if revlogv2 else IndexObject
664 cls = IndexObject2 if revlogv2 else IndexObject
647 return cls(data), None
665 return cls(data), None
648 cls = InlinedIndexObject
666 cls = InlinedIndexObject
649 return cls(data, inline), (0, data)
667 return cls(data, inline), (0, data)
650
668
651
669
652 def parse_index_cl_v2(data):
670 def parse_index_cl_v2(data):
653 return IndexChangelogV2(data), None
671 return IndexChangelogV2(data), None
654
672
655
673
656 class IndexObject2(IndexObject):
674 class IndexObject2(IndexObject):
657 index_format = revlog_constants.INDEX_ENTRY_V2
675 index_format = revlog_constants.INDEX_ENTRY_V2
658
676
659 def replace_sidedata_info(
677 def replace_sidedata_info(
660 self,
678 self,
661 rev,
679 rev,
662 sidedata_offset,
680 sidedata_offset,
663 sidedata_length,
681 sidedata_length,
664 offset_flags,
682 offset_flags,
665 compression_mode,
683 compression_mode,
666 ):
684 ):
667 """
685 """
668 Replace an existing index entry's sidedata offset and length with new
686 Replace an existing index entry's sidedata offset and length with new
669 ones.
687 ones.
670 This cannot be used outside of the context of sidedata rewriting,
688 This cannot be used outside of the context of sidedata rewriting,
671 inside the transaction that creates the revision `rev`.
689 inside the transaction that creates the revision `rev`.
672 """
690 """
673 if rev < 0:
691 if rev < 0:
674 raise KeyError
692 raise KeyError
675 self._check_index(rev)
693 self._check_index(rev)
676 if rev < self._lgt:
694 if rev < self._lgt:
677 msg = b"cannot rewrite entries outside of this transaction"
695 msg = b"cannot rewrite entries outside of this transaction"
678 raise KeyError(msg)
696 raise KeyError(msg)
679 else:
697 else:
680 entry = list(self[rev])
698 entry = list(self[rev])
681 entry[0] = offset_flags
699 entry[0] = offset_flags
682 entry[8] = sidedata_offset
700 entry[8] = sidedata_offset
683 entry[9] = sidedata_length
701 entry[9] = sidedata_length
684 entry[11] = compression_mode
702 entry[11] = compression_mode
685 entry = tuple(entry)
703 entry = tuple(entry)
686 new = self._pack_entry(rev, entry)
704 new = self._pack_entry(rev, entry)
687 self._extra[rev - self._lgt] = new
705 self._extra[rev - self._lgt] = new
688
706
689 def _unpack_entry(self, rev, data):
707 def _unpack_entry(self, rev, data):
690 data = self.index_format.unpack(data)
708 data = self.index_format.unpack(data)
691 entry = data[:10]
709 entry = data[:10]
692 data_comp = data[10] & 3
710 data_comp = data[10] & 3
693 sidedata_comp = (data[10] & (3 << 2)) >> 2
711 sidedata_comp = (data[10] & (3 << 2)) >> 2
694 return entry + (data_comp, sidedata_comp)
712 return entry + (data_comp, sidedata_comp)
695
713
696 def _pack_entry(self, rev, entry):
714 def _pack_entry(self, rev, entry):
697 data = entry[:10]
715 data = entry[:10]
698 data_comp = entry[10] & 3
716 data_comp = entry[10] & 3
699 sidedata_comp = (entry[11] & 3) << 2
717 sidedata_comp = (entry[11] & 3) << 2
700 data += (data_comp | sidedata_comp,)
718 data += (data_comp | sidedata_comp,)
701
719
702 return self.index_format.pack(*data)
720 return self.index_format.pack(*data)
703
721
704 def entry_binary(self, rev):
722 def entry_binary(self, rev):
705 """return the raw binary string representing a revision"""
723 """return the raw binary string representing a revision"""
706 entry = self[rev]
724 entry = self[rev]
707 return self._pack_entry(rev, entry)
725 return self._pack_entry(rev, entry)
708
726
709 def pack_header(self, header):
727 def pack_header(self, header):
710 """pack header information as binary"""
728 """pack header information as binary"""
711 msg = 'version header should go in the docket, not the index: %d'
729 msg = 'version header should go in the docket, not the index: %d'
712 msg %= header
730 msg %= header
713 raise error.ProgrammingError(msg)
731 raise error.ProgrammingError(msg)
714
732
715
733
716 class IndexChangelogV2(IndexObject2):
734 class IndexChangelogV2(IndexObject2):
717 index_format = revlog_constants.INDEX_ENTRY_CL_V2
735 index_format = revlog_constants.INDEX_ENTRY_CL_V2
718
736
719 def _unpack_entry(self, rev, data, r=True):
737 def _unpack_entry(self, rev, data, r=True):
720 items = self.index_format.unpack(data)
738 items = self.index_format.unpack(data)
721 entry = items[:3] + (rev, rev) + items[3:8]
739 entry = items[:3] + (rev, rev) + items[3:8]
722 data_comp = items[8] & 3
740 data_comp = items[8] & 3
723 sidedata_comp = (items[8] >> 2) & 3
741 sidedata_comp = (items[8] >> 2) & 3
724 return entry + (data_comp, sidedata_comp)
742 return entry + (data_comp, sidedata_comp)
725
743
726 def _pack_entry(self, rev, entry):
744 def _pack_entry(self, rev, entry):
727 assert entry[3] == rev, entry[3]
745 assert entry[3] == rev, entry[3]
728 assert entry[4] == rev, entry[4]
746 assert entry[4] == rev, entry[4]
729 data = entry[:3] + entry[5:10]
747 data = entry[:3] + entry[5:10]
730 data_comp = entry[10] & 3
748 data_comp = entry[10] & 3
731 sidedata_comp = (entry[11] & 3) << 2
749 sidedata_comp = (entry[11] & 3) << 2
732 data += (data_comp | sidedata_comp,)
750 data += (data_comp | sidedata_comp,)
733 return self.index_format.pack(*data)
751 return self.index_format.pack(*data)
734
752
735
753
736 def parse_index_devel_nodemap(data, inline):
754 def parse_index_devel_nodemap(data, inline):
737 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
755 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
738 return PersistentNodeMapIndexObject(data), None
756 return PersistentNodeMapIndexObject(data), None
739
757
740
758
741 def parse_dirstate(dmap, copymap, st):
759 def parse_dirstate(dmap, copymap, st):
742 parents = [st[:20], st[20:40]]
760 parents = [st[:20], st[20:40]]
743 # dereference fields so they will be local in loop
761 # dereference fields so they will be local in loop
744 format = b">cllll"
762 format = b">cllll"
745 e_size = struct.calcsize(format)
763 e_size = struct.calcsize(format)
746 pos1 = 40
764 pos1 = 40
747 l = len(st)
765 l = len(st)
748
766
749 # the inner loop
767 # the inner loop
750 while pos1 < l:
768 while pos1 < l:
751 pos2 = pos1 + e_size
769 pos2 = pos1 + e_size
752 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
770 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
753 pos1 = pos2 + e[4]
771 pos1 = pos2 + e[4]
754 f = st[pos2:pos1]
772 f = st[pos2:pos1]
755 if b'\0' in f:
773 if b'\0' in f:
756 f, c = f.split(b'\0')
774 f, c = f.split(b'\0')
757 copymap[f] = c
775 copymap[f] = c
758 dmap[f] = DirstateItem.from_v1_data(*e[:4])
776 dmap[f] = DirstateItem.from_v1_data(*e[:4])
759 return parents
777 return parents
760
778
761
779
762 def pack_dirstate(dmap, copymap, pl, now):
780 def pack_dirstate(dmap, copymap, pl, now):
763 now = int(now)
781 now = int(now)
764 cs = stringio()
782 cs = stringio()
765 write = cs.write
783 write = cs.write
766 write(b"".join(pl))
784 write(b"".join(pl))
767 for f, e in pycompat.iteritems(dmap):
785 for f, e in pycompat.iteritems(dmap):
768 if e.need_delay(now):
786 if e.need_delay(now):
769 # The file was last modified "simultaneously" with the current
787 # The file was last modified "simultaneously" with the current
770 # write to dirstate (i.e. within the same second for file-
788 # write to dirstate (i.e. within the same second for file-
771 # systems with a granularity of 1 sec). This commonly happens
789 # systems with a granularity of 1 sec). This commonly happens
772 # for at least a couple of files on 'update'.
790 # for at least a couple of files on 'update'.
773 # The user could change the file without changing its size
791 # The user could change the file without changing its size
774 # within the same second. Invalidate the file's mtime in
792 # within the same second. Invalidate the file's mtime in
775 # dirstate, forcing future 'status' calls to compare the
793 # dirstate, forcing future 'status' calls to compare the
776 # contents of the file if the size is the same. This prevents
794 # contents of the file if the size is the same. This prevents
777 # mistakenly treating such files as clean.
795 # mistakenly treating such files as clean.
778 e.set_possibly_dirty()
796 e.set_possibly_dirty()
779
797
780 if f in copymap:
798 if f in copymap:
781 f = b"%s\0%s" % (f, copymap[f])
799 f = b"%s\0%s" % (f, copymap[f])
782 e = _pack(
800 e = _pack(
783 b">cllll",
801 b">cllll",
784 e.v1_state(),
802 e.v1_state(),
785 e.v1_mode(),
803 e.v1_mode(),
786 e.v1_size(),
804 e.v1_size(),
787 e.v1_mtime(),
805 e.v1_mtime(),
788 len(f),
806 len(f),
789 )
807 )
790 write(e)
808 write(e)
791 write(f)
809 write(f)
792 return cs.getvalue()
810 return cs.getvalue()
@@ -1,1080 +1,1078 b''
1 # This file is automatically @generated by Cargo.
1 # This file is automatically @generated by Cargo.
2 # It is not intended for manual editing.
2 # It is not intended for manual editing.
3 version = 3
4
5 [[package]]
3 [[package]]
6 name = "adler"
4 name = "adler"
7 version = "0.2.3"
5 version = "0.2.3"
8 source = "registry+https://github.com/rust-lang/crates.io-index"
6 source = "registry+https://github.com/rust-lang/crates.io-index"
9 checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
7 checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
10
8
11 [[package]]
9 [[package]]
12 name = "aho-corasick"
10 name = "aho-corasick"
13 version = "0.7.15"
11 version = "0.7.15"
14 source = "registry+https://github.com/rust-lang/crates.io-index"
12 source = "registry+https://github.com/rust-lang/crates.io-index"
15 checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
13 checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
16 dependencies = [
14 dependencies = [
17 "memchr",
15 "memchr",
18 ]
16 ]
19
17
20 [[package]]
18 [[package]]
21 name = "ansi_term"
19 name = "ansi_term"
22 version = "0.11.0"
20 version = "0.11.0"
23 source = "registry+https://github.com/rust-lang/crates.io-index"
21 source = "registry+https://github.com/rust-lang/crates.io-index"
24 checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
22 checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
25 dependencies = [
23 dependencies = [
26 "winapi",
24 "winapi",
27 ]
25 ]
28
26
29 [[package]]
27 [[package]]
30 name = "atty"
28 name = "atty"
31 version = "0.2.14"
29 version = "0.2.14"
32 source = "registry+https://github.com/rust-lang/crates.io-index"
30 source = "registry+https://github.com/rust-lang/crates.io-index"
33 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
31 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
34 dependencies = [
32 dependencies = [
35 "hermit-abi",
33 "hermit-abi",
36 "libc",
34 "libc",
37 "winapi",
35 "winapi",
38 ]
36 ]
39
37
40 [[package]]
38 [[package]]
41 name = "autocfg"
39 name = "autocfg"
42 version = "1.0.1"
40 version = "1.0.1"
43 source = "registry+https://github.com/rust-lang/crates.io-index"
41 source = "registry+https://github.com/rust-lang/crates.io-index"
44 checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
42 checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
45
43
46 [[package]]
44 [[package]]
47 name = "bitflags"
45 name = "bitflags"
48 version = "1.2.1"
46 version = "1.2.1"
49 source = "registry+https://github.com/rust-lang/crates.io-index"
47 source = "registry+https://github.com/rust-lang/crates.io-index"
50 checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
48 checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
51
49
52 [[package]]
50 [[package]]
53 name = "bitmaps"
51 name = "bitmaps"
54 version = "2.1.0"
52 version = "2.1.0"
55 source = "registry+https://github.com/rust-lang/crates.io-index"
53 source = "registry+https://github.com/rust-lang/crates.io-index"
56 checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
54 checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
57 dependencies = [
55 dependencies = [
58 "typenum",
56 "typenum",
59 ]
57 ]
60
58
61 [[package]]
59 [[package]]
62 name = "block-buffer"
60 name = "block-buffer"
63 version = "0.9.0"
61 version = "0.9.0"
64 source = "registry+https://github.com/rust-lang/crates.io-index"
62 source = "registry+https://github.com/rust-lang/crates.io-index"
65 checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
63 checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
66 dependencies = [
64 dependencies = [
67 "generic-array",
65 "generic-array",
68 ]
66 ]
69
67
70 [[package]]
68 [[package]]
71 name = "byteorder"
69 name = "byteorder"
72 version = "1.3.4"
70 version = "1.3.4"
73 source = "registry+https://github.com/rust-lang/crates.io-index"
71 source = "registry+https://github.com/rust-lang/crates.io-index"
74 checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
72 checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
75
73
76 [[package]]
74 [[package]]
77 name = "bytes-cast"
75 name = "bytes-cast"
78 version = "0.2.0"
76 version = "0.2.0"
79 source = "registry+https://github.com/rust-lang/crates.io-index"
77 source = "registry+https://github.com/rust-lang/crates.io-index"
80 checksum = "0d434f9a4ecbe987e7ccfda7274b6f82ea52c9b63742565a65cb5e8ba0f2c452"
78 checksum = "0d434f9a4ecbe987e7ccfda7274b6f82ea52c9b63742565a65cb5e8ba0f2c452"
81 dependencies = [
79 dependencies = [
82 "bytes-cast-derive",
80 "bytes-cast-derive",
83 ]
81 ]
84
82
85 [[package]]
83 [[package]]
86 name = "bytes-cast-derive"
84 name = "bytes-cast-derive"
87 version = "0.1.0"
85 version = "0.1.0"
88 source = "registry+https://github.com/rust-lang/crates.io-index"
86 source = "registry+https://github.com/rust-lang/crates.io-index"
89 checksum = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb"
87 checksum = "cb936af9de38476664d6b58e529aff30d482e4ce1c5e150293d00730b0d81fdb"
90 dependencies = [
88 dependencies = [
91 "proc-macro2",
89 "proc-macro2",
92 "quote",
90 "quote",
93 "syn",
91 "syn",
94 ]
92 ]
95
93
96 [[package]]
94 [[package]]
97 name = "cc"
95 name = "cc"
98 version = "1.0.66"
96 version = "1.0.66"
99 source = "registry+https://github.com/rust-lang/crates.io-index"
97 source = "registry+https://github.com/rust-lang/crates.io-index"
100 checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
98 checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
101 dependencies = [
99 dependencies = [
102 "jobserver",
100 "jobserver",
103 ]
101 ]
104
102
105 [[package]]
103 [[package]]
106 name = "cfg-if"
104 name = "cfg-if"
107 version = "0.1.10"
105 version = "0.1.10"
108 source = "registry+https://github.com/rust-lang/crates.io-index"
106 source = "registry+https://github.com/rust-lang/crates.io-index"
109 checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
107 checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
110
108
111 [[package]]
109 [[package]]
112 name = "cfg-if"
110 name = "cfg-if"
113 version = "1.0.0"
111 version = "1.0.0"
114 source = "registry+https://github.com/rust-lang/crates.io-index"
112 source = "registry+https://github.com/rust-lang/crates.io-index"
115 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
113 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
116
114
117 [[package]]
115 [[package]]
118 name = "chrono"
116 name = "chrono"
119 version = "0.4.19"
117 version = "0.4.19"
120 source = "registry+https://github.com/rust-lang/crates.io-index"
118 source = "registry+https://github.com/rust-lang/crates.io-index"
121 checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
119 checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
122 dependencies = [
120 dependencies = [
123 "libc",
121 "libc",
124 "num-integer",
122 "num-integer",
125 "num-traits",
123 "num-traits",
126 "time",
124 "time",
127 "winapi",
125 "winapi",
128 ]
126 ]
129
127
130 [[package]]
128 [[package]]
131 name = "clap"
129 name = "clap"
132 version = "2.33.3"
130 version = "2.33.3"
133 source = "registry+https://github.com/rust-lang/crates.io-index"
131 source = "registry+https://github.com/rust-lang/crates.io-index"
134 checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
132 checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
135 dependencies = [
133 dependencies = [
136 "ansi_term",
134 "ansi_term",
137 "atty",
135 "atty",
138 "bitflags",
136 "bitflags",
139 "strsim",
137 "strsim",
140 "textwrap",
138 "textwrap",
141 "unicode-width",
139 "unicode-width",
142 "vec_map",
140 "vec_map",
143 ]
141 ]
144
142
145 [[package]]
143 [[package]]
146 name = "const_fn"
144 name = "const_fn"
147 version = "0.4.4"
145 version = "0.4.4"
148 source = "registry+https://github.com/rust-lang/crates.io-index"
146 source = "registry+https://github.com/rust-lang/crates.io-index"
149 checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
147 checksum = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
150
148
151 [[package]]
149 [[package]]
152 name = "cpufeatures"
150 name = "cpufeatures"
153 version = "0.1.4"
151 version = "0.1.4"
154 source = "registry+https://github.com/rust-lang/crates.io-index"
152 source = "registry+https://github.com/rust-lang/crates.io-index"
155 checksum = "ed00c67cb5d0a7d64a44f6ad2668db7e7530311dd53ea79bcd4fb022c64911c8"
153 checksum = "ed00c67cb5d0a7d64a44f6ad2668db7e7530311dd53ea79bcd4fb022c64911c8"
156 dependencies = [
154 dependencies = [
157 "libc",
155 "libc",
158 ]
156 ]
159
157
160 [[package]]
158 [[package]]
161 name = "cpython"
159 name = "cpython"
162 version = "0.6.0"
160 version = "0.6.0"
163 source = "registry+https://github.com/rust-lang/crates.io-index"
161 source = "registry+https://github.com/rust-lang/crates.io-index"
164 checksum = "8094679a4e9bfc8035572162624bc800eda35b5f9eff2537b9cd9aacc3d9782e"
162 checksum = "8094679a4e9bfc8035572162624bc800eda35b5f9eff2537b9cd9aacc3d9782e"
165 dependencies = [
163 dependencies = [
166 "libc",
164 "libc",
167 "num-traits",
165 "num-traits",
168 "paste",
166 "paste",
169 "python27-sys",
167 "python27-sys",
170 "python3-sys",
168 "python3-sys",
171 ]
169 ]
172
170
173 [[package]]
171 [[package]]
174 name = "crc32fast"
172 name = "crc32fast"
175 version = "1.2.1"
173 version = "1.2.1"
176 source = "registry+https://github.com/rust-lang/crates.io-index"
174 source = "registry+https://github.com/rust-lang/crates.io-index"
177 checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
175 checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
178 dependencies = [
176 dependencies = [
179 "cfg-if 1.0.0",
177 "cfg-if 1.0.0",
180 ]
178 ]
181
179
182 [[package]]
180 [[package]]
183 name = "crossbeam-channel"
181 name = "crossbeam-channel"
184 version = "0.4.4"
182 version = "0.4.4"
185 source = "registry+https://github.com/rust-lang/crates.io-index"
183 source = "registry+https://github.com/rust-lang/crates.io-index"
186 checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87"
184 checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87"
187 dependencies = [
185 dependencies = [
188 "crossbeam-utils 0.7.2",
186 "crossbeam-utils 0.7.2",
189 "maybe-uninit",
187 "maybe-uninit",
190 ]
188 ]
191
189
192 [[package]]
190 [[package]]
193 name = "crossbeam-channel"
191 name = "crossbeam-channel"
194 version = "0.5.0"
192 version = "0.5.0"
195 source = "registry+https://github.com/rust-lang/crates.io-index"
193 source = "registry+https://github.com/rust-lang/crates.io-index"
196 checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
194 checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
197 dependencies = [
195 dependencies = [
198 "cfg-if 1.0.0",
196 "cfg-if 1.0.0",
199 "crossbeam-utils 0.8.1",
197 "crossbeam-utils 0.8.1",
200 ]
198 ]
201
199
202 [[package]]
200 [[package]]
203 name = "crossbeam-deque"
201 name = "crossbeam-deque"
204 version = "0.8.0"
202 version = "0.8.0"
205 source = "registry+https://github.com/rust-lang/crates.io-index"
203 source = "registry+https://github.com/rust-lang/crates.io-index"
206 checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
204 checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
207 dependencies = [
205 dependencies = [
208 "cfg-if 1.0.0",
206 "cfg-if 1.0.0",
209 "crossbeam-epoch",
207 "crossbeam-epoch",
210 "crossbeam-utils 0.8.1",
208 "crossbeam-utils 0.8.1",
211 ]
209 ]
212
210
213 [[package]]
211 [[package]]
214 name = "crossbeam-epoch"
212 name = "crossbeam-epoch"
215 version = "0.9.1"
213 version = "0.9.1"
216 source = "registry+https://github.com/rust-lang/crates.io-index"
214 source = "registry+https://github.com/rust-lang/crates.io-index"
217 checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
215 checksum = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
218 dependencies = [
216 dependencies = [
219 "cfg-if 1.0.0",
217 "cfg-if 1.0.0",
220 "const_fn",
218 "const_fn",
221 "crossbeam-utils 0.8.1",
219 "crossbeam-utils 0.8.1",
222 "lazy_static",
220 "lazy_static",
223 "memoffset",
221 "memoffset",
224 "scopeguard",
222 "scopeguard",
225 ]
223 ]
226
224
227 [[package]]
225 [[package]]
228 name = "crossbeam-utils"
226 name = "crossbeam-utils"
229 version = "0.7.2"
227 version = "0.7.2"
230 source = "registry+https://github.com/rust-lang/crates.io-index"
228 source = "registry+https://github.com/rust-lang/crates.io-index"
231 checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
229 checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
232 dependencies = [
230 dependencies = [
233 "autocfg",
231 "autocfg",
234 "cfg-if 0.1.10",
232 "cfg-if 0.1.10",
235 "lazy_static",
233 "lazy_static",
236 ]
234 ]
237
235
238 [[package]]
236 [[package]]
239 name = "crossbeam-utils"
237 name = "crossbeam-utils"
240 version = "0.8.1"
238 version = "0.8.1"
241 source = "registry+https://github.com/rust-lang/crates.io-index"
239 source = "registry+https://github.com/rust-lang/crates.io-index"
242 checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
240 checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
243 dependencies = [
241 dependencies = [
244 "autocfg",
242 "autocfg",
245 "cfg-if 1.0.0",
243 "cfg-if 1.0.0",
246 "lazy_static",
244 "lazy_static",
247 ]
245 ]
248
246
249 [[package]]
247 [[package]]
250 name = "ctor"
248 name = "ctor"
251 version = "0.1.16"
249 version = "0.1.16"
252 source = "registry+https://github.com/rust-lang/crates.io-index"
250 source = "registry+https://github.com/rust-lang/crates.io-index"
253 checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484"
251 checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484"
254 dependencies = [
252 dependencies = [
255 "quote",
253 "quote",
256 "syn",
254 "syn",
257 ]
255 ]
258
256
259 [[package]]
257 [[package]]
260 name = "derive_more"
258 name = "derive_more"
261 version = "0.99.11"
259 version = "0.99.11"
262 source = "registry+https://github.com/rust-lang/crates.io-index"
260 source = "registry+https://github.com/rust-lang/crates.io-index"
263 checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
261 checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c"
264 dependencies = [
262 dependencies = [
265 "proc-macro2",
263 "proc-macro2",
266 "quote",
264 "quote",
267 "syn",
265 "syn",
268 ]
266 ]
269
267
270 [[package]]
268 [[package]]
271 name = "difference"
269 name = "difference"
272 version = "2.0.0"
270 version = "2.0.0"
273 source = "registry+https://github.com/rust-lang/crates.io-index"
271 source = "registry+https://github.com/rust-lang/crates.io-index"
274 checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
272 checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
275
273
276 [[package]]
274 [[package]]
277 name = "digest"
275 name = "digest"
278 version = "0.9.0"
276 version = "0.9.0"
279 source = "registry+https://github.com/rust-lang/crates.io-index"
277 source = "registry+https://github.com/rust-lang/crates.io-index"
280 checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
278 checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
281 dependencies = [
279 dependencies = [
282 "generic-array",
280 "generic-array",
283 ]
281 ]
284
282
285 [[package]]
283 [[package]]
286 name = "either"
284 name = "either"
287 version = "1.6.1"
285 version = "1.6.1"
288 source = "registry+https://github.com/rust-lang/crates.io-index"
286 source = "registry+https://github.com/rust-lang/crates.io-index"
289 checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
287 checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
290
288
291 [[package]]
289 [[package]]
292 name = "env_logger"
290 name = "env_logger"
293 version = "0.7.1"
291 version = "0.7.1"
294 source = "registry+https://github.com/rust-lang/crates.io-index"
292 source = "registry+https://github.com/rust-lang/crates.io-index"
295 checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
293 checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
296 dependencies = [
294 dependencies = [
297 "atty",
295 "atty",
298 "humantime",
296 "humantime",
299 "log",
297 "log",
300 "regex",
298 "regex",
301 "termcolor",
299 "termcolor",
302 ]
300 ]
303
301
304 [[package]]
302 [[package]]
305 name = "flate2"
303 name = "flate2"
306 version = "1.0.19"
304 version = "1.0.19"
307 source = "registry+https://github.com/rust-lang/crates.io-index"
305 source = "registry+https://github.com/rust-lang/crates.io-index"
308 checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
306 checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
309 dependencies = [
307 dependencies = [
310 "cfg-if 1.0.0",
308 "cfg-if 1.0.0",
311 "crc32fast",
309 "crc32fast",
312 "libc",
310 "libc",
313 "libz-sys",
311 "libz-sys",
314 "miniz_oxide",
312 "miniz_oxide",
315 ]
313 ]
316
314
317 [[package]]
315 [[package]]
318 name = "format-bytes"
316 name = "format-bytes"
319 version = "0.2.2"
317 version = "0.2.2"
320 source = "registry+https://github.com/rust-lang/crates.io-index"
318 source = "registry+https://github.com/rust-lang/crates.io-index"
321 checksum = "1c4e89040c7fd7b4e6ba2820ac705a45def8a0c098ec78d170ae88f1ef1d5762"
319 checksum = "1c4e89040c7fd7b4e6ba2820ac705a45def8a0c098ec78d170ae88f1ef1d5762"
322 dependencies = [
320 dependencies = [
323 "format-bytes-macros",
321 "format-bytes-macros",
324 "proc-macro-hack",
322 "proc-macro-hack",
325 ]
323 ]
326
324
327 [[package]]
325 [[package]]
328 name = "format-bytes-macros"
326 name = "format-bytes-macros"
329 version = "0.3.0"
327 version = "0.3.0"
330 source = "registry+https://github.com/rust-lang/crates.io-index"
328 source = "registry+https://github.com/rust-lang/crates.io-index"
331 checksum = "b05089e341a0460449e2210c3bf7b61597860b07f0deae58da38dbed0a4c6b6d"
329 checksum = "b05089e341a0460449e2210c3bf7b61597860b07f0deae58da38dbed0a4c6b6d"
332 dependencies = [
330 dependencies = [
333 "proc-macro-hack",
331 "proc-macro-hack",
334 "proc-macro2",
332 "proc-macro2",
335 "quote",
333 "quote",
336 "syn",
334 "syn",
337 ]
335 ]
338
336
339 [[package]]
337 [[package]]
340 name = "generic-array"
338 name = "generic-array"
341 version = "0.14.4"
339 version = "0.14.4"
342 source = "registry+https://github.com/rust-lang/crates.io-index"
340 source = "registry+https://github.com/rust-lang/crates.io-index"
343 checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
341 checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
344 dependencies = [
342 dependencies = [
345 "typenum",
343 "typenum",
346 "version_check",
344 "version_check",
347 ]
345 ]
348
346
349 [[package]]
347 [[package]]
350 name = "getrandom"
348 name = "getrandom"
351 version = "0.1.15"
349 version = "0.1.15"
352 source = "registry+https://github.com/rust-lang/crates.io-index"
350 source = "registry+https://github.com/rust-lang/crates.io-index"
353 checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
351 checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
354 dependencies = [
352 dependencies = [
355 "cfg-if 0.1.10",
353 "cfg-if 0.1.10",
356 "libc",
354 "libc",
357 "wasi 0.9.0+wasi-snapshot-preview1",
355 "wasi 0.9.0+wasi-snapshot-preview1",
358 ]
356 ]
359
357
360 [[package]]
358 [[package]]
361 name = "glob"
359 name = "glob"
362 version = "0.3.0"
360 version = "0.3.0"
363 source = "registry+https://github.com/rust-lang/crates.io-index"
361 source = "registry+https://github.com/rust-lang/crates.io-index"
364 checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
362 checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
365
363
366 [[package]]
364 [[package]]
367 name = "hermit-abi"
365 name = "hermit-abi"
368 version = "0.1.17"
366 version = "0.1.17"
369 source = "registry+https://github.com/rust-lang/crates.io-index"
367 source = "registry+https://github.com/rust-lang/crates.io-index"
370 checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
368 checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
371 dependencies = [
369 dependencies = [
372 "libc",
370 "libc",
373 ]
371 ]
374
372
375 [[package]]
373 [[package]]
376 name = "hg-core"
374 name = "hg-core"
377 version = "0.1.0"
375 version = "0.1.0"
378 dependencies = [
376 dependencies = [
379 "byteorder",
377 "byteorder",
380 "bytes-cast",
378 "bytes-cast",
381 "clap",
379 "clap",
382 "crossbeam-channel 0.4.4",
380 "crossbeam-channel 0.4.4",
383 "derive_more",
381 "derive_more",
384 "flate2",
382 "flate2",
385 "format-bytes",
383 "format-bytes",
386 "home",
384 "home",
387 "im-rc",
385 "im-rc",
388 "itertools",
386 "itertools",
389 "lazy_static",
387 "lazy_static",
390 "log",
388 "log",
391 "memmap2",
389 "memmap2",
392 "micro-timer",
390 "micro-timer",
393 "pretty_assertions",
391 "pretty_assertions",
394 "rand",
392 "rand",
395 "rand_distr",
393 "rand_distr",
396 "rand_pcg",
394 "rand_pcg",
397 "rayon",
395 "rayon",
398 "regex",
396 "regex",
399 "same-file",
397 "same-file",
400 "sha-1",
398 "sha-1",
401 "stable_deref_trait",
399 "stable_deref_trait",
402 "tempfile",
400 "tempfile",
403 "twox-hash",
401 "twox-hash",
404 "zstd",
402 "zstd",
405 ]
403 ]
406
404
407 [[package]]
405 [[package]]
408 name = "hg-cpython"
406 name = "hg-cpython"
409 version = "0.1.0"
407 version = "0.1.0"
410 dependencies = [
408 dependencies = [
411 "cpython",
409 "cpython",
412 "crossbeam-channel 0.4.4",
410 "crossbeam-channel 0.4.4",
413 "env_logger",
411 "env_logger",
414 "hg-core",
412 "hg-core",
415 "libc",
413 "libc",
416 "log",
414 "log",
417 "stable_deref_trait",
415 "stable_deref_trait",
418 ]
416 ]
419
417
420 [[package]]
418 [[package]]
421 name = "home"
419 name = "home"
422 version = "0.5.3"
420 version = "0.5.3"
423 source = "registry+https://github.com/rust-lang/crates.io-index"
421 source = "registry+https://github.com/rust-lang/crates.io-index"
424 checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
422 checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
425 dependencies = [
423 dependencies = [
426 "winapi",
424 "winapi",
427 ]
425 ]
428
426
429 [[package]]
427 [[package]]
430 name = "humantime"
428 name = "humantime"
431 version = "1.3.0"
429 version = "1.3.0"
432 source = "registry+https://github.com/rust-lang/crates.io-index"
430 source = "registry+https://github.com/rust-lang/crates.io-index"
433 checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
431 checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
434 dependencies = [
432 dependencies = [
435 "quick-error",
433 "quick-error",
436 ]
434 ]
437
435
438 [[package]]
436 [[package]]
439 name = "im-rc"
437 name = "im-rc"
440 version = "15.0.0"
438 version = "15.0.0"
441 source = "registry+https://github.com/rust-lang/crates.io-index"
439 source = "registry+https://github.com/rust-lang/crates.io-index"
442 checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f"
440 checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f"
443 dependencies = [
441 dependencies = [
444 "bitmaps",
442 "bitmaps",
445 "rand_core",
443 "rand_core",
446 "rand_xoshiro",
444 "rand_xoshiro",
447 "sized-chunks",
445 "sized-chunks",
448 "typenum",
446 "typenum",
449 "version_check",
447 "version_check",
450 ]
448 ]
451
449
452 [[package]]
450 [[package]]
453 name = "itertools"
451 name = "itertools"
454 version = "0.9.0"
452 version = "0.9.0"
455 source = "registry+https://github.com/rust-lang/crates.io-index"
453 source = "registry+https://github.com/rust-lang/crates.io-index"
456 checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
454 checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
457 dependencies = [
455 dependencies = [
458 "either",
456 "either",
459 ]
457 ]
460
458
461 [[package]]
459 [[package]]
462 name = "jobserver"
460 name = "jobserver"
463 version = "0.1.21"
461 version = "0.1.21"
464 source = "registry+https://github.com/rust-lang/crates.io-index"
462 source = "registry+https://github.com/rust-lang/crates.io-index"
465 checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
463 checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
466 dependencies = [
464 dependencies = [
467 "libc",
465 "libc",
468 ]
466 ]
469
467
470 [[package]]
468 [[package]]
471 name = "lazy_static"
469 name = "lazy_static"
472 version = "1.4.0"
470 version = "1.4.0"
473 source = "registry+https://github.com/rust-lang/crates.io-index"
471 source = "registry+https://github.com/rust-lang/crates.io-index"
474 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
472 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
475
473
476 [[package]]
474 [[package]]
477 name = "libc"
475 name = "libc"
478 version = "0.2.81"
476 version = "0.2.81"
479 source = "registry+https://github.com/rust-lang/crates.io-index"
477 source = "registry+https://github.com/rust-lang/crates.io-index"
480 checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
478 checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
481
479
482 [[package]]
480 [[package]]
483 name = "libz-sys"
481 name = "libz-sys"
484 version = "1.1.2"
482 version = "1.1.2"
485 source = "registry+https://github.com/rust-lang/crates.io-index"
483 source = "registry+https://github.com/rust-lang/crates.io-index"
486 checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
484 checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
487 dependencies = [
485 dependencies = [
488 "cc",
486 "cc",
489 "pkg-config",
487 "pkg-config",
490 "vcpkg",
488 "vcpkg",
491 ]
489 ]
492
490
493 [[package]]
491 [[package]]
494 name = "log"
492 name = "log"
495 version = "0.4.11"
493 version = "0.4.11"
496 source = "registry+https://github.com/rust-lang/crates.io-index"
494 source = "registry+https://github.com/rust-lang/crates.io-index"
497 checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
495 checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
498 dependencies = [
496 dependencies = [
499 "cfg-if 0.1.10",
497 "cfg-if 0.1.10",
500 ]
498 ]
501
499
502 [[package]]
500 [[package]]
503 name = "maybe-uninit"
501 name = "maybe-uninit"
504 version = "2.0.0"
502 version = "2.0.0"
505 source = "registry+https://github.com/rust-lang/crates.io-index"
503 source = "registry+https://github.com/rust-lang/crates.io-index"
506 checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
504 checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
507
505
508 [[package]]
506 [[package]]
509 name = "memchr"
507 name = "memchr"
510 version = "2.3.4"
508 version = "2.3.4"
511 source = "registry+https://github.com/rust-lang/crates.io-index"
509 source = "registry+https://github.com/rust-lang/crates.io-index"
512 checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
510 checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
513
511
514 [[package]]
512 [[package]]
515 name = "memmap2"
513 name = "memmap2"
516 version = "0.3.1"
514 version = "0.3.1"
517 source = "registry+https://github.com/rust-lang/crates.io-index"
515 source = "registry+https://github.com/rust-lang/crates.io-index"
518 checksum = "00b6c2ebff6180198788f5db08d7ce3bc1d0b617176678831a7510825973e357"
516 checksum = "00b6c2ebff6180198788f5db08d7ce3bc1d0b617176678831a7510825973e357"
519 dependencies = [
517 dependencies = [
520 "libc",
518 "libc",
521 ]
519 ]
522
520
523 [[package]]
521 [[package]]
524 name = "memoffset"
522 name = "memoffset"
525 version = "0.6.1"
523 version = "0.6.1"
526 source = "registry+https://github.com/rust-lang/crates.io-index"
524 source = "registry+https://github.com/rust-lang/crates.io-index"
527 checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
525 checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
528 dependencies = [
526 dependencies = [
529 "autocfg",
527 "autocfg",
530 ]
528 ]
531
529
532 [[package]]
530 [[package]]
533 name = "micro-timer"
531 name = "micro-timer"
534 version = "0.3.1"
532 version = "0.3.1"
535 source = "registry+https://github.com/rust-lang/crates.io-index"
533 source = "registry+https://github.com/rust-lang/crates.io-index"
536 checksum = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c"
534 checksum = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c"
537 dependencies = [
535 dependencies = [
538 "micro-timer-macros",
536 "micro-timer-macros",
539 "scopeguard",
537 "scopeguard",
540 ]
538 ]
541
539
542 [[package]]
540 [[package]]
543 name = "micro-timer-macros"
541 name = "micro-timer-macros"
544 version = "0.3.1"
542 version = "0.3.1"
545 source = "registry+https://github.com/rust-lang/crates.io-index"
543 source = "registry+https://github.com/rust-lang/crates.io-index"
546 checksum = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d"
544 checksum = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d"
547 dependencies = [
545 dependencies = [
548 "proc-macro2",
546 "proc-macro2",
549 "quote",
547 "quote",
550 "scopeguard",
548 "scopeguard",
551 "syn",
549 "syn",
552 ]
550 ]
553
551
554 [[package]]
552 [[package]]
555 name = "miniz_oxide"
553 name = "miniz_oxide"
556 version = "0.4.3"
554 version = "0.4.3"
557 source = "registry+https://github.com/rust-lang/crates.io-index"
555 source = "registry+https://github.com/rust-lang/crates.io-index"
558 checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
556 checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
559 dependencies = [
557 dependencies = [
560 "adler",
558 "adler",
561 "autocfg",
559 "autocfg",
562 ]
560 ]
563
561
564 [[package]]
562 [[package]]
565 name = "num-integer"
563 name = "num-integer"
566 version = "0.1.44"
564 version = "0.1.44"
567 source = "registry+https://github.com/rust-lang/crates.io-index"
565 source = "registry+https://github.com/rust-lang/crates.io-index"
568 checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
566 checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
569 dependencies = [
567 dependencies = [
570 "autocfg",
568 "autocfg",
571 "num-traits",
569 "num-traits",
572 ]
570 ]
573
571
574 [[package]]
572 [[package]]
575 name = "num-traits"
573 name = "num-traits"
576 version = "0.2.14"
574 version = "0.2.14"
577 source = "registry+https://github.com/rust-lang/crates.io-index"
575 source = "registry+https://github.com/rust-lang/crates.io-index"
578 checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
576 checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
579 dependencies = [
577 dependencies = [
580 "autocfg",
578 "autocfg",
581 ]
579 ]
582
580
583 [[package]]
581 [[package]]
584 name = "num_cpus"
582 name = "num_cpus"
585 version = "1.13.0"
583 version = "1.13.0"
586 source = "registry+https://github.com/rust-lang/crates.io-index"
584 source = "registry+https://github.com/rust-lang/crates.io-index"
587 checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
585 checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
588 dependencies = [
586 dependencies = [
589 "hermit-abi",
587 "hermit-abi",
590 "libc",
588 "libc",
591 ]
589 ]
592
590
593 [[package]]
591 [[package]]
594 name = "opaque-debug"
592 name = "opaque-debug"
595 version = "0.3.0"
593 version = "0.3.0"
596 source = "registry+https://github.com/rust-lang/crates.io-index"
594 source = "registry+https://github.com/rust-lang/crates.io-index"
597 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
595 checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
598
596
599 [[package]]
597 [[package]]
600 name = "output_vt100"
598 name = "output_vt100"
601 version = "0.1.2"
599 version = "0.1.2"
602 source = "registry+https://github.com/rust-lang/crates.io-index"
600 source = "registry+https://github.com/rust-lang/crates.io-index"
603 checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
601 checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
604 dependencies = [
602 dependencies = [
605 "winapi",
603 "winapi",
606 ]
604 ]
607
605
608 [[package]]
606 [[package]]
609 name = "paste"
607 name = "paste"
610 version = "1.0.5"
608 version = "1.0.5"
611 source = "registry+https://github.com/rust-lang/crates.io-index"
609 source = "registry+https://github.com/rust-lang/crates.io-index"
612 checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58"
610 checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58"
613
611
614 [[package]]
612 [[package]]
615 name = "pkg-config"
613 name = "pkg-config"
616 version = "0.3.19"
614 version = "0.3.19"
617 source = "registry+https://github.com/rust-lang/crates.io-index"
615 source = "registry+https://github.com/rust-lang/crates.io-index"
618 checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
616 checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
619
617
620 [[package]]
618 [[package]]
621 name = "ppv-lite86"
619 name = "ppv-lite86"
622 version = "0.2.10"
620 version = "0.2.10"
623 source = "registry+https://github.com/rust-lang/crates.io-index"
621 source = "registry+https://github.com/rust-lang/crates.io-index"
624 checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
622 checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
625
623
626 [[package]]
624 [[package]]
627 name = "pretty_assertions"
625 name = "pretty_assertions"
628 version = "0.6.1"
626 version = "0.6.1"
629 source = "registry+https://github.com/rust-lang/crates.io-index"
627 source = "registry+https://github.com/rust-lang/crates.io-index"
630 checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
628 checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
631 dependencies = [
629 dependencies = [
632 "ansi_term",
630 "ansi_term",
633 "ctor",
631 "ctor",
634 "difference",
632 "difference",
635 "output_vt100",
633 "output_vt100",
636 ]
634 ]
637
635
638 [[package]]
636 [[package]]
639 name = "proc-macro-hack"
637 name = "proc-macro-hack"
640 version = "0.5.19"
638 version = "0.5.19"
641 source = "registry+https://github.com/rust-lang/crates.io-index"
639 source = "registry+https://github.com/rust-lang/crates.io-index"
642 checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
640 checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
643
641
644 [[package]]
642 [[package]]
645 name = "proc-macro2"
643 name = "proc-macro2"
646 version = "1.0.24"
644 version = "1.0.24"
647 source = "registry+https://github.com/rust-lang/crates.io-index"
645 source = "registry+https://github.com/rust-lang/crates.io-index"
648 checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
646 checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
649 dependencies = [
647 dependencies = [
650 "unicode-xid",
648 "unicode-xid",
651 ]
649 ]
652
650
653 [[package]]
651 [[package]]
654 name = "python27-sys"
652 name = "python27-sys"
655 version = "0.6.0"
653 version = "0.6.0"
656 source = "registry+https://github.com/rust-lang/crates.io-index"
654 source = "registry+https://github.com/rust-lang/crates.io-index"
657 checksum = "5826ddbc5366eb0b0492040fdc25bf50bb49092c192bd45e80fb7a24dc6832ab"
655 checksum = "5826ddbc5366eb0b0492040fdc25bf50bb49092c192bd45e80fb7a24dc6832ab"
658 dependencies = [
656 dependencies = [
659 "libc",
657 "libc",
660 "regex",
658 "regex",
661 ]
659 ]
662
660
663 [[package]]
661 [[package]]
664 name = "python3-sys"
662 name = "python3-sys"
665 version = "0.6.0"
663 version = "0.6.0"
666 source = "registry+https://github.com/rust-lang/crates.io-index"
664 source = "registry+https://github.com/rust-lang/crates.io-index"
667 checksum = "b78af21b29594951a47fc3dac9b9eff0a3f077dec2f780ee943ae16a668f3b6a"
665 checksum = "b78af21b29594951a47fc3dac9b9eff0a3f077dec2f780ee943ae16a668f3b6a"
668 dependencies = [
666 dependencies = [
669 "libc",
667 "libc",
670 "regex",
668 "regex",
671 ]
669 ]
672
670
673 [[package]]
671 [[package]]
674 name = "quick-error"
672 name = "quick-error"
675 version = "1.2.3"
673 version = "1.2.3"
676 source = "registry+https://github.com/rust-lang/crates.io-index"
674 source = "registry+https://github.com/rust-lang/crates.io-index"
677 checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
675 checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
678
676
679 [[package]]
677 [[package]]
680 name = "quote"
678 name = "quote"
681 version = "1.0.7"
679 version = "1.0.7"
682 source = "registry+https://github.com/rust-lang/crates.io-index"
680 source = "registry+https://github.com/rust-lang/crates.io-index"
683 checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
681 checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
684 dependencies = [
682 dependencies = [
685 "proc-macro2",
683 "proc-macro2",
686 ]
684 ]
687
685
688 [[package]]
686 [[package]]
689 name = "rand"
687 name = "rand"
690 version = "0.7.3"
688 version = "0.7.3"
691 source = "registry+https://github.com/rust-lang/crates.io-index"
689 source = "registry+https://github.com/rust-lang/crates.io-index"
692 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
690 checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
693 dependencies = [
691 dependencies = [
694 "getrandom",
692 "getrandom",
695 "libc",
693 "libc",
696 "rand_chacha",
694 "rand_chacha",
697 "rand_core",
695 "rand_core",
698 "rand_hc",
696 "rand_hc",
699 ]
697 ]
700
698
701 [[package]]
699 [[package]]
702 name = "rand_chacha"
700 name = "rand_chacha"
703 version = "0.2.2"
701 version = "0.2.2"
704 source = "registry+https://github.com/rust-lang/crates.io-index"
702 source = "registry+https://github.com/rust-lang/crates.io-index"
705 checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
703 checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
706 dependencies = [
704 dependencies = [
707 "ppv-lite86",
705 "ppv-lite86",
708 "rand_core",
706 "rand_core",
709 ]
707 ]
710
708
711 [[package]]
709 [[package]]
712 name = "rand_core"
710 name = "rand_core"
713 version = "0.5.1"
711 version = "0.5.1"
714 source = "registry+https://github.com/rust-lang/crates.io-index"
712 source = "registry+https://github.com/rust-lang/crates.io-index"
715 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
713 checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
716 dependencies = [
714 dependencies = [
717 "getrandom",
715 "getrandom",
718 ]
716 ]
719
717
720 [[package]]
718 [[package]]
721 name = "rand_distr"
719 name = "rand_distr"
722 version = "0.2.2"
720 version = "0.2.2"
723 source = "registry+https://github.com/rust-lang/crates.io-index"
721 source = "registry+https://github.com/rust-lang/crates.io-index"
724 checksum = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2"
722 checksum = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2"
725 dependencies = [
723 dependencies = [
726 "rand",
724 "rand",
727 ]
725 ]
728
726
729 [[package]]
727 [[package]]
730 name = "rand_hc"
728 name = "rand_hc"
731 version = "0.2.0"
729 version = "0.2.0"
732 source = "registry+https://github.com/rust-lang/crates.io-index"
730 source = "registry+https://github.com/rust-lang/crates.io-index"
733 checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
731 checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
734 dependencies = [
732 dependencies = [
735 "rand_core",
733 "rand_core",
736 ]
734 ]
737
735
738 [[package]]
736 [[package]]
739 name = "rand_pcg"
737 name = "rand_pcg"
740 version = "0.2.1"
738 version = "0.2.1"
741 source = "registry+https://github.com/rust-lang/crates.io-index"
739 source = "registry+https://github.com/rust-lang/crates.io-index"
742 checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429"
740 checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429"
743 dependencies = [
741 dependencies = [
744 "rand_core",
742 "rand_core",
745 ]
743 ]
746
744
747 [[package]]
745 [[package]]
748 name = "rand_xoshiro"
746 name = "rand_xoshiro"
749 version = "0.4.0"
747 version = "0.4.0"
750 source = "registry+https://github.com/rust-lang/crates.io-index"
748 source = "registry+https://github.com/rust-lang/crates.io-index"
751 checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
749 checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
752 dependencies = [
750 dependencies = [
753 "rand_core",
751 "rand_core",
754 ]
752 ]
755
753
756 [[package]]
754 [[package]]
757 name = "rayon"
755 name = "rayon"
758 version = "1.5.0"
756 version = "1.5.0"
759 source = "registry+https://github.com/rust-lang/crates.io-index"
757 source = "registry+https://github.com/rust-lang/crates.io-index"
760 checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
758 checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
761 dependencies = [
759 dependencies = [
762 "autocfg",
760 "autocfg",
763 "crossbeam-deque",
761 "crossbeam-deque",
764 "either",
762 "either",
765 "rayon-core",
763 "rayon-core",
766 ]
764 ]
767
765
768 [[package]]
766 [[package]]
769 name = "rayon-core"
767 name = "rayon-core"
770 version = "1.9.0"
768 version = "1.9.0"
771 source = "registry+https://github.com/rust-lang/crates.io-index"
769 source = "registry+https://github.com/rust-lang/crates.io-index"
772 checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
770 checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
773 dependencies = [
771 dependencies = [
774 "crossbeam-channel 0.5.0",
772 "crossbeam-channel 0.5.0",
775 "crossbeam-deque",
773 "crossbeam-deque",
776 "crossbeam-utils 0.8.1",
774 "crossbeam-utils 0.8.1",
777 "lazy_static",
775 "lazy_static",
778 "num_cpus",
776 "num_cpus",
779 ]
777 ]
780
778
781 [[package]]
779 [[package]]
782 name = "redox_syscall"
780 name = "redox_syscall"
783 version = "0.1.57"
781 version = "0.1.57"
784 source = "registry+https://github.com/rust-lang/crates.io-index"
782 source = "registry+https://github.com/rust-lang/crates.io-index"
785 checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
783 checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
786
784
787 [[package]]
785 [[package]]
788 name = "regex"
786 name = "regex"
789 version = "1.4.2"
787 version = "1.4.2"
790 source = "registry+https://github.com/rust-lang/crates.io-index"
788 source = "registry+https://github.com/rust-lang/crates.io-index"
791 checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
789 checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
792 dependencies = [
790 dependencies = [
793 "aho-corasick",
791 "aho-corasick",
794 "memchr",
792 "memchr",
795 "regex-syntax",
793 "regex-syntax",
796 "thread_local",
794 "thread_local",
797 ]
795 ]
798
796
799 [[package]]
797 [[package]]
800 name = "regex-syntax"
798 name = "regex-syntax"
801 version = "0.6.21"
799 version = "0.6.21"
802 source = "registry+https://github.com/rust-lang/crates.io-index"
800 source = "registry+https://github.com/rust-lang/crates.io-index"
803 checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
801 checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
804
802
805 [[package]]
803 [[package]]
806 name = "remove_dir_all"
804 name = "remove_dir_all"
807 version = "0.5.3"
805 version = "0.5.3"
808 source = "registry+https://github.com/rust-lang/crates.io-index"
806 source = "registry+https://github.com/rust-lang/crates.io-index"
809 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
807 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
810 dependencies = [
808 dependencies = [
811 "winapi",
809 "winapi",
812 ]
810 ]
813
811
814 [[package]]
812 [[package]]
815 name = "rhg"
813 name = "rhg"
816 version = "0.1.0"
814 version = "0.1.0"
817 dependencies = [
815 dependencies = [
818 "chrono",
816 "chrono",
819 "clap",
817 "clap",
820 "derive_more",
818 "derive_more",
821 "env_logger",
819 "env_logger",
822 "format-bytes",
820 "format-bytes",
823 "hg-core",
821 "hg-core",
824 "home",
822 "home",
825 "lazy_static",
823 "lazy_static",
826 "log",
824 "log",
827 "micro-timer",
825 "micro-timer",
828 "regex",
826 "regex",
829 "users",
827 "users",
830 ]
828 ]
831
829
832 [[package]]
830 [[package]]
833 name = "same-file"
831 name = "same-file"
834 version = "1.0.6"
832 version = "1.0.6"
835 source = "registry+https://github.com/rust-lang/crates.io-index"
833 source = "registry+https://github.com/rust-lang/crates.io-index"
836 checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
834 checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
837 dependencies = [
835 dependencies = [
838 "winapi-util",
836 "winapi-util",
839 ]
837 ]
840
838
841 [[package]]
839 [[package]]
842 name = "scopeguard"
840 name = "scopeguard"
843 version = "1.1.0"
841 version = "1.1.0"
844 source = "registry+https://github.com/rust-lang/crates.io-index"
842 source = "registry+https://github.com/rust-lang/crates.io-index"
845 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
843 checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
846
844
847 [[package]]
845 [[package]]
848 name = "sha-1"
846 name = "sha-1"
849 version = "0.9.6"
847 version = "0.9.6"
850 source = "registry+https://github.com/rust-lang/crates.io-index"
848 source = "registry+https://github.com/rust-lang/crates.io-index"
851 checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16"
849 checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16"
852 dependencies = [
850 dependencies = [
853 "block-buffer",
851 "block-buffer",
854 "cfg-if 1.0.0",
852 "cfg-if 1.0.0",
855 "cpufeatures",
853 "cpufeatures",
856 "digest",
854 "digest",
857 "opaque-debug",
855 "opaque-debug",
858 ]
856 ]
859
857
860 [[package]]
858 [[package]]
861 name = "sized-chunks"
859 name = "sized-chunks"
862 version = "0.6.2"
860 version = "0.6.2"
863 source = "registry+https://github.com/rust-lang/crates.io-index"
861 source = "registry+https://github.com/rust-lang/crates.io-index"
864 checksum = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f"
862 checksum = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f"
865 dependencies = [
863 dependencies = [
866 "bitmaps",
864 "bitmaps",
867 "typenum",
865 "typenum",
868 ]
866 ]
869
867
870 [[package]]
868 [[package]]
871 name = "stable_deref_trait"
869 name = "stable_deref_trait"
872 version = "1.2.0"
870 version = "1.2.0"
873 source = "registry+https://github.com/rust-lang/crates.io-index"
871 source = "registry+https://github.com/rust-lang/crates.io-index"
874 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
872 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
875
873
876 [[package]]
874 [[package]]
877 name = "static_assertions"
875 name = "static_assertions"
878 version = "1.1.0"
876 version = "1.1.0"
879 source = "registry+https://github.com/rust-lang/crates.io-index"
877 source = "registry+https://github.com/rust-lang/crates.io-index"
880 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
878 checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
881
879
882 [[package]]
880 [[package]]
883 name = "strsim"
881 name = "strsim"
884 version = "0.8.0"
882 version = "0.8.0"
885 source = "registry+https://github.com/rust-lang/crates.io-index"
883 source = "registry+https://github.com/rust-lang/crates.io-index"
886 checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
884 checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
887
885
888 [[package]]
886 [[package]]
889 name = "syn"
887 name = "syn"
890 version = "1.0.54"
888 version = "1.0.54"
891 source = "registry+https://github.com/rust-lang/crates.io-index"
889 source = "registry+https://github.com/rust-lang/crates.io-index"
892 checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44"
890 checksum = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44"
893 dependencies = [
891 dependencies = [
894 "proc-macro2",
892 "proc-macro2",
895 "quote",
893 "quote",
896 "unicode-xid",
894 "unicode-xid",
897 ]
895 ]
898
896
899 [[package]]
897 [[package]]
900 name = "tempfile"
898 name = "tempfile"
901 version = "3.1.0"
899 version = "3.1.0"
902 source = "registry+https://github.com/rust-lang/crates.io-index"
900 source = "registry+https://github.com/rust-lang/crates.io-index"
903 checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
901 checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
904 dependencies = [
902 dependencies = [
905 "cfg-if 0.1.10",
903 "cfg-if 0.1.10",
906 "libc",
904 "libc",
907 "rand",
905 "rand",
908 "redox_syscall",
906 "redox_syscall",
909 "remove_dir_all",
907 "remove_dir_all",
910 "winapi",
908 "winapi",
911 ]
909 ]
912
910
913 [[package]]
911 [[package]]
914 name = "termcolor"
912 name = "termcolor"
915 version = "1.1.2"
913 version = "1.1.2"
916 source = "registry+https://github.com/rust-lang/crates.io-index"
914 source = "registry+https://github.com/rust-lang/crates.io-index"
917 checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
915 checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
918 dependencies = [
916 dependencies = [
919 "winapi-util",
917 "winapi-util",
920 ]
918 ]
921
919
922 [[package]]
920 [[package]]
923 name = "textwrap"
921 name = "textwrap"
924 version = "0.11.0"
922 version = "0.11.0"
925 source = "registry+https://github.com/rust-lang/crates.io-index"
923 source = "registry+https://github.com/rust-lang/crates.io-index"
926 checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
924 checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
927 dependencies = [
925 dependencies = [
928 "unicode-width",
926 "unicode-width",
929 ]
927 ]
930
928
931 [[package]]
929 [[package]]
932 name = "thread_local"
930 name = "thread_local"
933 version = "1.0.1"
931 version = "1.0.1"
934 source = "registry+https://github.com/rust-lang/crates.io-index"
932 source = "registry+https://github.com/rust-lang/crates.io-index"
935 checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
933 checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
936 dependencies = [
934 dependencies = [
937 "lazy_static",
935 "lazy_static",
938 ]
936 ]
939
937
940 [[package]]
938 [[package]]
941 name = "time"
939 name = "time"
942 version = "0.1.44"
940 version = "0.1.44"
943 source = "registry+https://github.com/rust-lang/crates.io-index"
941 source = "registry+https://github.com/rust-lang/crates.io-index"
944 checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
942 checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
945 dependencies = [
943 dependencies = [
946 "libc",
944 "libc",
947 "wasi 0.10.0+wasi-snapshot-preview1",
945 "wasi 0.10.0+wasi-snapshot-preview1",
948 "winapi",
946 "winapi",
949 ]
947 ]
950
948
951 [[package]]
949 [[package]]
952 name = "twox-hash"
950 name = "twox-hash"
953 version = "1.6.0"
951 version = "1.6.0"
954 source = "registry+https://github.com/rust-lang/crates.io-index"
952 source = "registry+https://github.com/rust-lang/crates.io-index"
955 checksum = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59"
953 checksum = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59"
956 dependencies = [
954 dependencies = [
957 "cfg-if 0.1.10",
955 "cfg-if 0.1.10",
958 "rand",
956 "rand",
959 "static_assertions",
957 "static_assertions",
960 ]
958 ]
961
959
962 [[package]]
960 [[package]]
963 name = "typenum"
961 name = "typenum"
964 version = "1.12.0"
962 version = "1.12.0"
965 source = "registry+https://github.com/rust-lang/crates.io-index"
963 source = "registry+https://github.com/rust-lang/crates.io-index"
966 checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
964 checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
967
965
968 [[package]]
966 [[package]]
969 name = "unicode-width"
967 name = "unicode-width"
970 version = "0.1.8"
968 version = "0.1.8"
971 source = "registry+https://github.com/rust-lang/crates.io-index"
969 source = "registry+https://github.com/rust-lang/crates.io-index"
972 checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
970 checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
973
971
974 [[package]]
972 [[package]]
975 name = "unicode-xid"
973 name = "unicode-xid"
976 version = "0.2.1"
974 version = "0.2.1"
977 source = "registry+https://github.com/rust-lang/crates.io-index"
975 source = "registry+https://github.com/rust-lang/crates.io-index"
978 checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
976 checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
979
977
980 [[package]]
978 [[package]]
981 name = "users"
979 name = "users"
982 version = "0.11.0"
980 version = "0.11.0"
983 source = "registry+https://github.com/rust-lang/crates.io-index"
981 source = "registry+https://github.com/rust-lang/crates.io-index"
984 checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032"
982 checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032"
985 dependencies = [
983 dependencies = [
986 "libc",
984 "libc",
987 "log",
985 "log",
988 ]
986 ]
989
987
990 [[package]]
988 [[package]]
991 name = "vcpkg"
989 name = "vcpkg"
992 version = "0.2.11"
990 version = "0.2.11"
993 source = "registry+https://github.com/rust-lang/crates.io-index"
991 source = "registry+https://github.com/rust-lang/crates.io-index"
994 checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
992 checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
995
993
996 [[package]]
994 [[package]]
997 name = "vec_map"
995 name = "vec_map"
998 version = "0.8.2"
996 version = "0.8.2"
999 source = "registry+https://github.com/rust-lang/crates.io-index"
997 source = "registry+https://github.com/rust-lang/crates.io-index"
1000 checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
998 checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
1001
999
1002 [[package]]
1000 [[package]]
1003 name = "version_check"
1001 name = "version_check"
1004 version = "0.9.2"
1002 version = "0.9.2"
1005 source = "registry+https://github.com/rust-lang/crates.io-index"
1003 source = "registry+https://github.com/rust-lang/crates.io-index"
1006 checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
1004 checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
1007
1005
1008 [[package]]
1006 [[package]]
1009 name = "wasi"
1007 name = "wasi"
1010 version = "0.9.0+wasi-snapshot-preview1"
1008 version = "0.9.0+wasi-snapshot-preview1"
1011 source = "registry+https://github.com/rust-lang/crates.io-index"
1009 source = "registry+https://github.com/rust-lang/crates.io-index"
1012 checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
1010 checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
1013
1011
1014 [[package]]
1012 [[package]]
1015 name = "wasi"
1013 name = "wasi"
1016 version = "0.10.0+wasi-snapshot-preview1"
1014 version = "0.10.0+wasi-snapshot-preview1"
1017 source = "registry+https://github.com/rust-lang/crates.io-index"
1015 source = "registry+https://github.com/rust-lang/crates.io-index"
1018 checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
1016 checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
1019
1017
1020 [[package]]
1018 [[package]]
1021 name = "winapi"
1019 name = "winapi"
1022 version = "0.3.9"
1020 version = "0.3.9"
1023 source = "registry+https://github.com/rust-lang/crates.io-index"
1021 source = "registry+https://github.com/rust-lang/crates.io-index"
1024 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
1022 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
1025 dependencies = [
1023 dependencies = [
1026 "winapi-i686-pc-windows-gnu",
1024 "winapi-i686-pc-windows-gnu",
1027 "winapi-x86_64-pc-windows-gnu",
1025 "winapi-x86_64-pc-windows-gnu",
1028 ]
1026 ]
1029
1027
1030 [[package]]
1028 [[package]]
1031 name = "winapi-i686-pc-windows-gnu"
1029 name = "winapi-i686-pc-windows-gnu"
1032 version = "0.4.0"
1030 version = "0.4.0"
1033 source = "registry+https://github.com/rust-lang/crates.io-index"
1031 source = "registry+https://github.com/rust-lang/crates.io-index"
1034 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
1032 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
1035
1033
1036 [[package]]
1034 [[package]]
1037 name = "winapi-util"
1035 name = "winapi-util"
1038 version = "0.1.5"
1036 version = "0.1.5"
1039 source = "registry+https://github.com/rust-lang/crates.io-index"
1037 source = "registry+https://github.com/rust-lang/crates.io-index"
1040 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
1038 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
1041 dependencies = [
1039 dependencies = [
1042 "winapi",
1040 "winapi",
1043 ]
1041 ]
1044
1042
1045 [[package]]
1043 [[package]]
1046 name = "winapi-x86_64-pc-windows-gnu"
1044 name = "winapi-x86_64-pc-windows-gnu"
1047 version = "0.4.0"
1045 version = "0.4.0"
1048 source = "registry+https://github.com/rust-lang/crates.io-index"
1046 source = "registry+https://github.com/rust-lang/crates.io-index"
1049 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1047 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
1050
1048
1051 [[package]]
1049 [[package]]
1052 name = "zstd"
1050 name = "zstd"
1053 version = "0.5.3+zstd.1.4.5"
1051 version = "0.5.3+zstd.1.4.5"
1054 source = "registry+https://github.com/rust-lang/crates.io-index"
1052 source = "registry+https://github.com/rust-lang/crates.io-index"
1055 checksum = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8"
1053 checksum = "01b32eaf771efa709e8308605bbf9319bf485dc1503179ec0469b611937c0cd8"
1056 dependencies = [
1054 dependencies = [
1057 "zstd-safe",
1055 "zstd-safe",
1058 ]
1056 ]
1059
1057
1060 [[package]]
1058 [[package]]
1061 name = "zstd-safe"
1059 name = "zstd-safe"
1062 version = "2.0.5+zstd.1.4.5"
1060 version = "2.0.5+zstd.1.4.5"
1063 source = "registry+https://github.com/rust-lang/crates.io-index"
1061 source = "registry+https://github.com/rust-lang/crates.io-index"
1064 checksum = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055"
1062 checksum = "1cfb642e0d27f64729a639c52db457e0ae906e7bc6f5fe8f5c453230400f1055"
1065 dependencies = [
1063 dependencies = [
1066 "libc",
1064 "libc",
1067 "zstd-sys",
1065 "zstd-sys",
1068 ]
1066 ]
1069
1067
1070 [[package]]
1068 [[package]]
1071 name = "zstd-sys"
1069 name = "zstd-sys"
1072 version = "1.4.17+zstd.1.4.5"
1070 version = "1.4.17+zstd.1.4.5"
1073 source = "registry+https://github.com/rust-lang/crates.io-index"
1071 source = "registry+https://github.com/rust-lang/crates.io-index"
1074 checksum = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b"
1072 checksum = "b89249644df056b522696b1bb9e7c18c87e8ffa3e2f0dc3b0155875d6498f01b"
1075 dependencies = [
1073 dependencies = [
1076 "cc",
1074 "cc",
1077 "glob",
1075 "glob",
1078 "itertools",
1076 "itertools",
1079 "libc",
1077 "libc",
1080 ]
1078 ]
General Comments 0
You need to be logged in to leave comments. Login now