##// END OF EJS Templates
dirstate: Use the Rust implementation of DirstateItem when Rust is enabled...
Simon Sapin -
r48858:d5528ac9 default
parent child Browse files
Show More
@@ -1,1323 +1,1316 b''
1 /*
1 /*
2 parsers.c - efficient content parsing
2 parsers.c - efficient content parsing
3
3
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
5
5
6 This software may be used and distributed according to the terms of
6 This software may be used and distributed according to the terms of
7 the GNU General Public License, incorporated herein by reference.
7 the GNU General Public License, incorporated herein by reference.
8 */
8 */
9
9
10 #define PY_SSIZE_T_CLEAN
10 #define PY_SSIZE_T_CLEAN
11 #include <Python.h>
11 #include <Python.h>
12 #include <ctype.h>
12 #include <ctype.h>
13 #include <stddef.h>
13 #include <stddef.h>
14 #include <string.h>
14 #include <string.h>
15
15
16 #include "bitmanipulation.h"
16 #include "bitmanipulation.h"
17 #include "charencode.h"
17 #include "charencode.h"
18 #include "util.h"
18 #include "util.h"
19
19
20 #ifdef IS_PY3K
20 #ifdef IS_PY3K
21 /* The mapping of Python types is meant to be temporary to get Python
21 /* The mapping of Python types is meant to be temporary to get Python
22 * 3 to compile. We should remove this once Python 3 support is fully
22 * 3 to compile. We should remove this once Python 3 support is fully
23 * supported and proper types are used in the extensions themselves. */
23 * supported and proper types are used in the extensions themselves. */
24 #define PyInt_Check PyLong_Check
24 #define PyInt_Check PyLong_Check
25 #define PyInt_FromLong PyLong_FromLong
25 #define PyInt_FromLong PyLong_FromLong
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AsLong PyLong_AsLong
27 #define PyInt_AsLong PyLong_AsLong
28 #endif
28 #endif
29
29
30 static const char *const versionerrortext = "Python minor version mismatch";
30 static const char *const versionerrortext = "Python minor version mismatch";
31
31
32 static const int dirstate_v1_from_p2 = -2;
32 static const int dirstate_v1_from_p2 = -2;
33 static const int dirstate_v1_nonnormal = -1;
33 static const int dirstate_v1_nonnormal = -1;
34 static const int ambiguous_time = -1;
34 static const int ambiguous_time = -1;
35
35
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
37 {
37 {
38 Py_ssize_t expected_size;
38 Py_ssize_t expected_size;
39
39
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
41 return NULL;
41 return NULL;
42 }
42 }
43
43
44 return _dict_new_presized(expected_size);
44 return _dict_new_presized(expected_size);
45 }
45 }
46
46
47 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
47 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
48 PyObject *kwds)
48 PyObject *kwds)
49 {
49 {
50 /* We do all the initialization here and not a tp_init function because
50 /* We do all the initialization here and not a tp_init function because
51 * dirstate_item is immutable. */
51 * dirstate_item is immutable. */
52 dirstateItemObject *t;
52 dirstateItemObject *t;
53 int wc_tracked;
53 int wc_tracked;
54 int p1_tracked;
54 int p1_tracked;
55 int p2_tracked;
55 int p2_tracked;
56 int merged;
56 int merged;
57 int clean_p1;
57 int clean_p1;
58 int clean_p2;
58 int clean_p2;
59 int possibly_dirty;
59 int possibly_dirty;
60 PyObject *parentfiledata;
60 PyObject *parentfiledata;
61 static char *keywords_name[] = {
61 static char *keywords_name[] = {
62 "wc_tracked", "p1_tracked", "p2_tracked",
62 "wc_tracked", "p1_tracked", "p2_tracked",
63 "merged", "clean_p1", "clean_p2",
63 "merged", "clean_p1", "clean_p2",
64 "possibly_dirty", "parentfiledata", NULL,
64 "possibly_dirty", "parentfiledata", NULL,
65 };
65 };
66 wc_tracked = 0;
66 wc_tracked = 0;
67 p1_tracked = 0;
67 p1_tracked = 0;
68 p2_tracked = 0;
68 p2_tracked = 0;
69 merged = 0;
69 merged = 0;
70 clean_p1 = 0;
70 clean_p1 = 0;
71 clean_p2 = 0;
71 clean_p2 = 0;
72 possibly_dirty = 0;
72 possibly_dirty = 0;
73 parentfiledata = Py_None;
73 parentfiledata = Py_None;
74 if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiiiiiiO", keywords_name,
74 if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiiiiiiO", keywords_name,
75 &wc_tracked, &p1_tracked, &p2_tracked,
75 &wc_tracked, &p1_tracked, &p2_tracked,
76 &merged, &clean_p1, &clean_p2,
76 &merged, &clean_p1, &clean_p2,
77 &possibly_dirty, &parentfiledata
77 &possibly_dirty, &parentfiledata
78
78
79 )) {
79 )) {
80 return NULL;
80 return NULL;
81 }
81 }
82 if (merged && (clean_p1 || clean_p2)) {
82 if (merged && (clean_p1 || clean_p2)) {
83 PyErr_SetString(PyExc_RuntimeError,
83 PyErr_SetString(PyExc_RuntimeError,
84 "`merged` argument incompatible with "
84 "`merged` argument incompatible with "
85 "`clean_p1`/`clean_p2`");
85 "`clean_p1`/`clean_p2`");
86 return NULL;
86 return NULL;
87 }
87 }
88 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
88 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
89 if (!t) {
89 if (!t) {
90 return NULL;
90 return NULL;
91 }
91 }
92
92
93 t->flags = 0;
93 t->flags = 0;
94 if (wc_tracked) {
94 if (wc_tracked) {
95 t->flags |= dirstate_flag_wc_tracked;
95 t->flags |= dirstate_flag_wc_tracked;
96 }
96 }
97 if (p1_tracked) {
97 if (p1_tracked) {
98 t->flags |= dirstate_flag_p1_tracked;
98 t->flags |= dirstate_flag_p1_tracked;
99 }
99 }
100 if (p2_tracked) {
100 if (p2_tracked) {
101 t->flags |= dirstate_flag_p2_tracked;
101 t->flags |= dirstate_flag_p2_tracked;
102 }
102 }
103 if (possibly_dirty) {
103 if (possibly_dirty) {
104 t->flags |= dirstate_flag_possibly_dirty;
104 t->flags |= dirstate_flag_possibly_dirty;
105 }
105 }
106 if (merged) {
106 if (merged) {
107 t->flags |= dirstate_flag_merged;
107 t->flags |= dirstate_flag_merged;
108 }
108 }
109 if (clean_p1) {
109 if (clean_p1) {
110 t->flags |= dirstate_flag_clean_p1;
110 t->flags |= dirstate_flag_clean_p1;
111 }
111 }
112 if (clean_p2) {
112 if (clean_p2) {
113 t->flags |= dirstate_flag_clean_p2;
113 t->flags |= dirstate_flag_clean_p2;
114 }
114 }
115 t->mode = 0;
115 t->mode = 0;
116 t->size = dirstate_v1_nonnormal;
116 t->size = dirstate_v1_nonnormal;
117 t->mtime = ambiguous_time;
117 t->mtime = ambiguous_time;
118 if (parentfiledata != Py_None) {
118 if (parentfiledata != Py_None) {
119 if (!PyTuple_CheckExact(parentfiledata)) {
119 if (!PyTuple_CheckExact(parentfiledata)) {
120 PyErr_SetString(
120 PyErr_SetString(
121 PyExc_TypeError,
121 PyExc_TypeError,
122 "parentfiledata should be a Tuple or None");
122 "parentfiledata should be a Tuple or None");
123 return NULL;
123 return NULL;
124 }
124 }
125 t->mode =
125 t->mode =
126 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 0));
126 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 0));
127 t->size =
127 t->size =
128 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 1));
128 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 1));
129 t->mtime =
129 t->mtime =
130 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 2));
130 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 2));
131 }
131 }
132 return (PyObject *)t;
132 return (PyObject *)t;
133 }
133 }
134
134
135 static void dirstate_item_dealloc(PyObject *o)
135 static void dirstate_item_dealloc(PyObject *o)
136 {
136 {
137 PyObject_Del(o);
137 PyObject_Del(o);
138 }
138 }
139
139
140 static inline bool dirstate_item_c_tracked(dirstateItemObject *self)
140 static inline bool dirstate_item_c_tracked(dirstateItemObject *self)
141 {
141 {
142 return (self->flags & dirstate_flag_wc_tracked);
142 return (self->flags & dirstate_flag_wc_tracked);
143 }
143 }
144
144
145 static inline bool dirstate_item_c_added(dirstateItemObject *self)
145 static inline bool dirstate_item_c_added(dirstateItemObject *self)
146 {
146 {
147 unsigned char mask =
147 unsigned char mask =
148 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
148 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
149 dirstate_flag_p2_tracked);
149 dirstate_flag_p2_tracked);
150 unsigned char target = dirstate_flag_wc_tracked;
150 unsigned char target = dirstate_flag_wc_tracked;
151 return (self->flags & mask) == target;
151 return (self->flags & mask) == target;
152 }
152 }
153
153
154 static inline bool dirstate_item_c_removed(dirstateItemObject *self)
154 static inline bool dirstate_item_c_removed(dirstateItemObject *self)
155 {
155 {
156 if (self->flags & dirstate_flag_wc_tracked) {
156 if (self->flags & dirstate_flag_wc_tracked) {
157 return false;
157 return false;
158 }
158 }
159 return (self->flags &
159 return (self->flags &
160 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked));
160 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked));
161 }
161 }
162
162
163 static inline bool dirstate_item_c_merged(dirstateItemObject *self)
163 static inline bool dirstate_item_c_merged(dirstateItemObject *self)
164 {
164 {
165 return ((self->flags & dirstate_flag_wc_tracked) &&
165 return ((self->flags & dirstate_flag_wc_tracked) &&
166 (self->flags & dirstate_flag_merged));
166 (self->flags & dirstate_flag_merged));
167 }
167 }
168
168
169 static inline bool dirstate_item_c_merged_removed(dirstateItemObject *self)
169 static inline bool dirstate_item_c_merged_removed(dirstateItemObject *self)
170 {
170 {
171 if (!dirstate_item_c_removed(self)) {
171 if (!dirstate_item_c_removed(self)) {
172 return false;
172 return false;
173 }
173 }
174 return (self->flags & dirstate_flag_merged);
174 return (self->flags & dirstate_flag_merged);
175 }
175 }
176
176
177 static inline bool dirstate_item_c_from_p2(dirstateItemObject *self)
177 static inline bool dirstate_item_c_from_p2(dirstateItemObject *self)
178 {
178 {
179 if (!dirstate_item_c_tracked(self)) {
179 if (!dirstate_item_c_tracked(self)) {
180 return false;
180 return false;
181 }
181 }
182 return (self->flags & dirstate_flag_clean_p2);
182 return (self->flags & dirstate_flag_clean_p2);
183 }
183 }
184
184
185 static inline bool dirstate_item_c_from_p2_removed(dirstateItemObject *self)
185 static inline bool dirstate_item_c_from_p2_removed(dirstateItemObject *self)
186 {
186 {
187 if (!dirstate_item_c_removed(self)) {
187 if (!dirstate_item_c_removed(self)) {
188 return false;
188 return false;
189 }
189 }
190 return (self->flags & dirstate_flag_clean_p2);
190 return (self->flags & dirstate_flag_clean_p2);
191 }
191 }
192
192
193 static inline char dirstate_item_c_v1_state(dirstateItemObject *self)
193 static inline char dirstate_item_c_v1_state(dirstateItemObject *self)
194 {
194 {
195 if (dirstate_item_c_removed(self)) {
195 if (dirstate_item_c_removed(self)) {
196 return 'r';
196 return 'r';
197 } else if (dirstate_item_c_merged(self)) {
197 } else if (dirstate_item_c_merged(self)) {
198 return 'm';
198 return 'm';
199 } else if (dirstate_item_c_added(self)) {
199 } else if (dirstate_item_c_added(self)) {
200 return 'a';
200 return 'a';
201 } else {
201 } else {
202 return 'n';
202 return 'n';
203 }
203 }
204 }
204 }
205
205
206 static inline int dirstate_item_c_v1_mode(dirstateItemObject *self)
206 static inline int dirstate_item_c_v1_mode(dirstateItemObject *self)
207 {
207 {
208 return self->mode;
208 return self->mode;
209 }
209 }
210
210
211 static inline int dirstate_item_c_v1_size(dirstateItemObject *self)
211 static inline int dirstate_item_c_v1_size(dirstateItemObject *self)
212 {
212 {
213 if (dirstate_item_c_merged_removed(self)) {
213 if (dirstate_item_c_merged_removed(self)) {
214 return dirstate_v1_nonnormal;
214 return dirstate_v1_nonnormal;
215 } else if (dirstate_item_c_from_p2_removed(self)) {
215 } else if (dirstate_item_c_from_p2_removed(self)) {
216 return dirstate_v1_from_p2;
216 return dirstate_v1_from_p2;
217 } else if (dirstate_item_c_removed(self)) {
217 } else if (dirstate_item_c_removed(self)) {
218 return 0;
218 return 0;
219 } else if (dirstate_item_c_merged(self)) {
219 } else if (dirstate_item_c_merged(self)) {
220 return dirstate_v1_from_p2;
220 return dirstate_v1_from_p2;
221 } else if (dirstate_item_c_added(self)) {
221 } else if (dirstate_item_c_added(self)) {
222 return dirstate_v1_nonnormal;
222 return dirstate_v1_nonnormal;
223 } else if (dirstate_item_c_from_p2(self)) {
223 } else if (dirstate_item_c_from_p2(self)) {
224 return dirstate_v1_from_p2;
224 return dirstate_v1_from_p2;
225 } else if (self->flags & dirstate_flag_possibly_dirty) {
225 } else if (self->flags & dirstate_flag_possibly_dirty) {
226 return self->size; /* NON NORMAL ? */
226 return self->size; /* NON NORMAL ? */
227 } else {
227 } else {
228 return self->size;
228 return self->size;
229 }
229 }
230 }
230 }
231
231
232 static inline int dirstate_item_c_v1_mtime(dirstateItemObject *self)
232 static inline int dirstate_item_c_v1_mtime(dirstateItemObject *self)
233 {
233 {
234 if (dirstate_item_c_removed(self)) {
234 if (dirstate_item_c_removed(self)) {
235 return 0;
235 return 0;
236 } else if (self->flags & dirstate_flag_possibly_dirty) {
236 } else if (self->flags & dirstate_flag_possibly_dirty) {
237 return ambiguous_time;
237 return ambiguous_time;
238 } else if (dirstate_item_c_merged(self)) {
238 } else if (dirstate_item_c_merged(self)) {
239 return ambiguous_time;
239 return ambiguous_time;
240 } else if (dirstate_item_c_added(self)) {
240 } else if (dirstate_item_c_added(self)) {
241 return ambiguous_time;
241 return ambiguous_time;
242 } else if (dirstate_item_c_from_p2(self)) {
242 } else if (dirstate_item_c_from_p2(self)) {
243 return ambiguous_time;
243 return ambiguous_time;
244 } else {
244 } else {
245 return self->mtime;
245 return self->mtime;
246 }
246 }
247 }
247 }
248
248
249 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
249 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
250 {
250 {
251 char state = dirstate_item_c_v1_state(self);
251 char state = dirstate_item_c_v1_state(self);
252 return PyBytes_FromStringAndSize(&state, 1);
252 return PyBytes_FromStringAndSize(&state, 1);
253 };
253 };
254
254
255 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
255 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
256 {
256 {
257 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
257 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
258 };
258 };
259
259
260 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
260 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
261 {
261 {
262 return PyInt_FromLong(dirstate_item_c_v1_size(self));
262 return PyInt_FromLong(dirstate_item_c_v1_size(self));
263 };
263 };
264
264
265 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
265 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
266 {
266 {
267 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
267 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
268 };
268 };
269
269
270 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
270 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
271 PyObject *value)
271 PyObject *value)
272 {
272 {
273 long now;
273 long now;
274 if (!pylong_to_long(value, &now)) {
274 if (!pylong_to_long(value, &now)) {
275 return NULL;
275 return NULL;
276 }
276 }
277 if (dirstate_item_c_v1_state(self) == 'n' &&
277 if (dirstate_item_c_v1_state(self) == 'n' &&
278 dirstate_item_c_v1_mtime(self) == now) {
278 dirstate_item_c_v1_mtime(self) == now) {
279 Py_RETURN_TRUE;
279 Py_RETURN_TRUE;
280 } else {
280 } else {
281 Py_RETURN_FALSE;
281 Py_RETURN_FALSE;
282 }
282 }
283 };
283 };
284
284
285 /* This will never change since it's bound to V1
285 /* This will never change since it's bound to V1
286 */
286 */
287 static inline dirstateItemObject *
287 static inline dirstateItemObject *
288 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
288 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
289 {
289 {
290 dirstateItemObject *t =
290 dirstateItemObject *t =
291 PyObject_New(dirstateItemObject, &dirstateItemType);
291 PyObject_New(dirstateItemObject, &dirstateItemType);
292 if (!t) {
292 if (!t) {
293 return NULL;
293 return NULL;
294 }
294 }
295
295
296 if (state == 'm') {
296 if (state == 'm') {
297 t->flags =
297 t->flags =
298 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
298 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
299 dirstate_flag_p2_tracked | dirstate_flag_merged);
299 dirstate_flag_p2_tracked | dirstate_flag_merged);
300 t->mode = 0;
300 t->mode = 0;
301 t->size = dirstate_v1_from_p2;
301 t->size = dirstate_v1_from_p2;
302 t->mtime = ambiguous_time;
302 t->mtime = ambiguous_time;
303 } else if (state == 'a') {
303 } else if (state == 'a') {
304 t->flags = dirstate_flag_wc_tracked;
304 t->flags = dirstate_flag_wc_tracked;
305 t->mode = 0;
305 t->mode = 0;
306 t->size = dirstate_v1_nonnormal;
306 t->size = dirstate_v1_nonnormal;
307 t->mtime = ambiguous_time;
307 t->mtime = ambiguous_time;
308 } else if (state == 'r') {
308 } else if (state == 'r') {
309 t->mode = 0;
309 t->mode = 0;
310 t->size = 0;
310 t->size = 0;
311 t->mtime = 0;
311 t->mtime = 0;
312 if (size == dirstate_v1_nonnormal) {
312 if (size == dirstate_v1_nonnormal) {
313 t->flags =
313 t->flags =
314 (dirstate_flag_p1_tracked |
314 (dirstate_flag_p1_tracked |
315 dirstate_flag_p2_tracked | dirstate_flag_merged);
315 dirstate_flag_p2_tracked | dirstate_flag_merged);
316 } else if (size == dirstate_v1_from_p2) {
316 } else if (size == dirstate_v1_from_p2) {
317 t->flags =
317 t->flags =
318 (dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
318 (dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
319 } else {
319 } else {
320 t->flags = dirstate_flag_p1_tracked;
320 t->flags = dirstate_flag_p1_tracked;
321 }
321 }
322 } else if (state == 'n') {
322 } else if (state == 'n') {
323 if (size == dirstate_v1_from_p2) {
323 if (size == dirstate_v1_from_p2) {
324 t->flags =
324 t->flags =
325 (dirstate_flag_wc_tracked |
325 (dirstate_flag_wc_tracked |
326 dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
326 dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
327 t->mode = 0;
327 t->mode = 0;
328 t->size = dirstate_v1_from_p2;
328 t->size = dirstate_v1_from_p2;
329 t->mtime = ambiguous_time;
329 t->mtime = ambiguous_time;
330 } else if (size == dirstate_v1_nonnormal) {
330 } else if (size == dirstate_v1_nonnormal) {
331 t->flags = (dirstate_flag_wc_tracked |
331 t->flags = (dirstate_flag_wc_tracked |
332 dirstate_flag_p1_tracked |
332 dirstate_flag_p1_tracked |
333 dirstate_flag_possibly_dirty);
333 dirstate_flag_possibly_dirty);
334 t->mode = 0;
334 t->mode = 0;
335 t->size = dirstate_v1_nonnormal;
335 t->size = dirstate_v1_nonnormal;
336 t->mtime = ambiguous_time;
336 t->mtime = ambiguous_time;
337 } else if (mtime == ambiguous_time) {
337 } else if (mtime == ambiguous_time) {
338 t->flags = (dirstate_flag_wc_tracked |
338 t->flags = (dirstate_flag_wc_tracked |
339 dirstate_flag_p1_tracked |
339 dirstate_flag_p1_tracked |
340 dirstate_flag_possibly_dirty);
340 dirstate_flag_possibly_dirty);
341 t->mode = mode;
341 t->mode = mode;
342 t->size = size;
342 t->size = size;
343 t->mtime = 0;
343 t->mtime = 0;
344 } else {
344 } else {
345 t->flags = (dirstate_flag_wc_tracked |
345 t->flags = (dirstate_flag_wc_tracked |
346 dirstate_flag_p1_tracked);
346 dirstate_flag_p1_tracked);
347 t->mode = mode;
347 t->mode = mode;
348 t->size = size;
348 t->size = size;
349 t->mtime = mtime;
349 t->mtime = mtime;
350 }
350 }
351 } else {
351 } else {
352 PyErr_Format(PyExc_RuntimeError,
352 PyErr_Format(PyExc_RuntimeError,
353 "unknown state: `%c` (%d, %d, %d)", state, mode,
353 "unknown state: `%c` (%d, %d, %d)", state, mode,
354 size, mtime, NULL);
354 size, mtime, NULL);
355 Py_DECREF(t);
355 Py_DECREF(t);
356 return NULL;
356 return NULL;
357 }
357 }
358
358
359 return t;
359 return t;
360 }
360 }
361
361
362 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
362 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
363 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
363 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
364 PyObject *args)
364 PyObject *args)
365 {
365 {
366 /* We do all the initialization here and not a tp_init function because
366 /* We do all the initialization here and not a tp_init function because
367 * dirstate_item is immutable. */
367 * dirstate_item is immutable. */
368 char state;
368 char state;
369 int size, mode, mtime;
369 int size, mode, mtime;
370 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
370 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
371 return NULL;
371 return NULL;
372 }
372 }
373 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
373 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
374 };
374 };
375
375
376 /* constructor to help legacy API to build a new "added" item
376 /* constructor to help legacy API to build a new "added" item
377
377
378 Should eventually be removed */
378 Should eventually be removed */
379 static PyObject *dirstate_item_new_added(PyTypeObject *subtype)
379 static PyObject *dirstate_item_new_added(PyTypeObject *subtype)
380 {
380 {
381 dirstateItemObject *t;
381 dirstateItemObject *t;
382 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
382 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
383 if (!t) {
383 if (!t) {
384 return NULL;
384 return NULL;
385 }
385 }
386 t->flags = dirstate_flag_wc_tracked;
386 t->flags = dirstate_flag_wc_tracked;
387 t->mode = 0;
387 t->mode = 0;
388 t->size = dirstate_v1_nonnormal;
388 t->size = dirstate_v1_nonnormal;
389 t->mtime = ambiguous_time;
389 t->mtime = ambiguous_time;
390 return (PyObject *)t;
390 return (PyObject *)t;
391 };
391 };
392
392
393 /* constructor to help legacy API to build a new "merged" item
393 /* constructor to help legacy API to build a new "merged" item
394
394
395 Should eventually be removed */
395 Should eventually be removed */
396 static PyObject *dirstate_item_new_merged(PyTypeObject *subtype)
396 static PyObject *dirstate_item_new_merged(PyTypeObject *subtype)
397 {
397 {
398 dirstateItemObject *t;
398 dirstateItemObject *t;
399 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
399 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
400 if (!t) {
400 if (!t) {
401 return NULL;
401 return NULL;
402 }
402 }
403 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
403 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
404 dirstate_flag_p2_tracked | dirstate_flag_merged);
404 dirstate_flag_p2_tracked | dirstate_flag_merged);
405 t->mode = 0;
405 t->mode = 0;
406 t->size = dirstate_v1_from_p2;
406 t->size = dirstate_v1_from_p2;
407 t->mtime = ambiguous_time;
407 t->mtime = ambiguous_time;
408 return (PyObject *)t;
408 return (PyObject *)t;
409 };
409 };
410
410
411 /* constructor to help legacy API to build a new "from_p2" item
411 /* constructor to help legacy API to build a new "from_p2" item
412
412
413 Should eventually be removed */
413 Should eventually be removed */
414 static PyObject *dirstate_item_new_from_p2(PyTypeObject *subtype)
414 static PyObject *dirstate_item_new_from_p2(PyTypeObject *subtype)
415 {
415 {
416 /* We do all the initialization here and not a tp_init function because
416 /* We do all the initialization here and not a tp_init function because
417 * dirstate_item is immutable. */
417 * dirstate_item is immutable. */
418 dirstateItemObject *t;
418 dirstateItemObject *t;
419 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
419 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
420 if (!t) {
420 if (!t) {
421 return NULL;
421 return NULL;
422 }
422 }
423 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p2_tracked |
423 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p2_tracked |
424 dirstate_flag_clean_p2);
424 dirstate_flag_clean_p2);
425 t->mode = 0;
425 t->mode = 0;
426 t->size = dirstate_v1_from_p2;
426 t->size = dirstate_v1_from_p2;
427 t->mtime = ambiguous_time;
427 t->mtime = ambiguous_time;
428 return (PyObject *)t;
428 return (PyObject *)t;
429 };
429 };
430
430
431 /* constructor to help legacy API to build a new "possibly" item
431 /* constructor to help legacy API to build a new "possibly" item
432
432
433 Should eventually be removed */
433 Should eventually be removed */
434 static PyObject *dirstate_item_new_possibly_dirty(PyTypeObject *subtype)
434 static PyObject *dirstate_item_new_possibly_dirty(PyTypeObject *subtype)
435 {
435 {
436 /* We do all the initialization here and not a tp_init function because
436 /* We do all the initialization here and not a tp_init function because
437 * dirstate_item is immutable. */
437 * dirstate_item is immutable. */
438 dirstateItemObject *t;
438 dirstateItemObject *t;
439 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
439 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
440 if (!t) {
440 if (!t) {
441 return NULL;
441 return NULL;
442 }
442 }
443 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
443 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
444 dirstate_flag_possibly_dirty);
444 dirstate_flag_possibly_dirty);
445 t->mode = 0;
445 t->mode = 0;
446 t->size = dirstate_v1_nonnormal;
446 t->size = dirstate_v1_nonnormal;
447 t->mtime = ambiguous_time;
447 t->mtime = ambiguous_time;
448 return (PyObject *)t;
448 return (PyObject *)t;
449 };
449 };
450
450
451 /* constructor to help legacy API to build a new "normal" item
451 /* constructor to help legacy API to build a new "normal" item
452
452
453 Should eventually be removed */
453 Should eventually be removed */
454 static PyObject *dirstate_item_new_normal(PyTypeObject *subtype, PyObject *args)
454 static PyObject *dirstate_item_new_normal(PyTypeObject *subtype, PyObject *args)
455 {
455 {
456 /* We do all the initialization here and not a tp_init function because
456 /* We do all the initialization here and not a tp_init function because
457 * dirstate_item is immutable. */
457 * dirstate_item is immutable. */
458 dirstateItemObject *t;
458 dirstateItemObject *t;
459 int size, mode, mtime;
459 int size, mode, mtime;
460 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
460 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
461 return NULL;
461 return NULL;
462 }
462 }
463
463
464 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
464 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
465 if (!t) {
465 if (!t) {
466 return NULL;
466 return NULL;
467 }
467 }
468 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked);
468 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked);
469 t->mode = mode;
469 t->mode = mode;
470 t->size = size;
470 t->size = size;
471 t->mtime = mtime;
471 t->mtime = mtime;
472 return (PyObject *)t;
472 return (PyObject *)t;
473 };
473 };
474
474
475 /* This means the next status call will have to actually check its content
475 /* This means the next status call will have to actually check its content
476 to make sure it is correct. */
476 to make sure it is correct. */
477 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
477 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
478 {
478 {
479 self->flags |= dirstate_flag_possibly_dirty;
479 self->flags |= dirstate_flag_possibly_dirty;
480 Py_RETURN_NONE;
480 Py_RETURN_NONE;
481 }
481 }
482
482
483 /* See docstring of the python implementation for details */
483 /* See docstring of the python implementation for details */
484 static PyObject *dirstate_item_set_clean(dirstateItemObject *self,
484 static PyObject *dirstate_item_set_clean(dirstateItemObject *self,
485 PyObject *args)
485 PyObject *args)
486 {
486 {
487 int size, mode, mtime;
487 int size, mode, mtime;
488 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
488 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
489 return NULL;
489 return NULL;
490 }
490 }
491 self->flags = dirstate_flag_wc_tracked | dirstate_flag_p1_tracked;
491 self->flags = dirstate_flag_wc_tracked | dirstate_flag_p1_tracked;
492 self->mode = mode;
492 self->mode = mode;
493 self->size = size;
493 self->size = size;
494 self->mtime = mtime;
494 self->mtime = mtime;
495 Py_RETURN_NONE;
495 Py_RETURN_NONE;
496 }
496 }
497
497
498 static PyObject *dirstate_item_set_tracked(dirstateItemObject *self)
498 static PyObject *dirstate_item_set_tracked(dirstateItemObject *self)
499 {
499 {
500 self->flags |= dirstate_flag_wc_tracked;
500 self->flags |= dirstate_flag_wc_tracked;
501 self->flags |= dirstate_flag_possibly_dirty;
501 self->flags |= dirstate_flag_possibly_dirty;
502 /* size = None on the python size turn into size = NON_NORMAL when
502 /* size = None on the python size turn into size = NON_NORMAL when
503 * accessed. So the next line is currently required, but a some future
503 * accessed. So the next line is currently required, but a some future
504 * clean up would be welcome. */
504 * clean up would be welcome. */
505 self->size = dirstate_v1_nonnormal;
505 self->size = dirstate_v1_nonnormal;
506 Py_RETURN_NONE;
506 Py_RETURN_NONE;
507 }
507 }
508
508
509 static PyObject *dirstate_item_set_untracked(dirstateItemObject *self)
509 static PyObject *dirstate_item_set_untracked(dirstateItemObject *self)
510 {
510 {
511 self->flags &= ~dirstate_flag_wc_tracked;
511 self->flags &= ~dirstate_flag_wc_tracked;
512 self->mode = 0;
512 self->mode = 0;
513 self->mtime = 0;
513 self->mtime = 0;
514 self->size = 0;
514 self->size = 0;
515 Py_RETURN_NONE;
515 Py_RETURN_NONE;
516 }
516 }
517
517
518 static PyMethodDef dirstate_item_methods[] = {
518 static PyMethodDef dirstate_item_methods[] = {
519 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
519 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
520 "return a \"state\" suitable for v1 serialization"},
520 "return a \"state\" suitable for v1 serialization"},
521 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
521 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
522 "return a \"mode\" suitable for v1 serialization"},
522 "return a \"mode\" suitable for v1 serialization"},
523 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
523 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
524 "return a \"size\" suitable for v1 serialization"},
524 "return a \"size\" suitable for v1 serialization"},
525 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
525 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
526 "return a \"mtime\" suitable for v1 serialization"},
526 "return a \"mtime\" suitable for v1 serialization"},
527 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
527 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
528 "True if the stored mtime would be ambiguous with the current time"},
528 "True if the stored mtime would be ambiguous with the current time"},
529 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
529 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
530 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
530 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
531 {"new_added", (PyCFunction)dirstate_item_new_added,
531 {"new_added", (PyCFunction)dirstate_item_new_added,
532 METH_NOARGS | METH_CLASS,
532 METH_NOARGS | METH_CLASS,
533 "constructor to help legacy API to build a new \"added\" item"},
533 "constructor to help legacy API to build a new \"added\" item"},
534 {"new_merged", (PyCFunction)dirstate_item_new_merged,
534 {"new_merged", (PyCFunction)dirstate_item_new_merged,
535 METH_NOARGS | METH_CLASS,
535 METH_NOARGS | METH_CLASS,
536 "constructor to help legacy API to build a new \"merged\" item"},
536 "constructor to help legacy API to build a new \"merged\" item"},
537 {"new_from_p2", (PyCFunction)dirstate_item_new_from_p2,
537 {"new_from_p2", (PyCFunction)dirstate_item_new_from_p2,
538 METH_NOARGS | METH_CLASS,
538 METH_NOARGS | METH_CLASS,
539 "constructor to help legacy API to build a new \"from_p2\" item"},
539 "constructor to help legacy API to build a new \"from_p2\" item"},
540 {"new_possibly_dirty", (PyCFunction)dirstate_item_new_possibly_dirty,
540 {"new_possibly_dirty", (PyCFunction)dirstate_item_new_possibly_dirty,
541 METH_NOARGS | METH_CLASS,
541 METH_NOARGS | METH_CLASS,
542 "constructor to help legacy API to build a new \"possibly_dirty\" item"},
542 "constructor to help legacy API to build a new \"possibly_dirty\" item"},
543 {"new_normal", (PyCFunction)dirstate_item_new_normal,
543 {"new_normal", (PyCFunction)dirstate_item_new_normal,
544 METH_VARARGS | METH_CLASS,
544 METH_VARARGS | METH_CLASS,
545 "constructor to help legacy API to build a new \"normal\" item"},
545 "constructor to help legacy API to build a new \"normal\" item"},
546 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
546 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
547 METH_NOARGS, "mark a file as \"possibly dirty\""},
547 METH_NOARGS, "mark a file as \"possibly dirty\""},
548 {"set_clean", (PyCFunction)dirstate_item_set_clean, METH_VARARGS,
548 {"set_clean", (PyCFunction)dirstate_item_set_clean, METH_VARARGS,
549 "mark a file as \"clean\""},
549 "mark a file as \"clean\""},
550 {"set_tracked", (PyCFunction)dirstate_item_set_tracked, METH_NOARGS,
550 {"set_tracked", (PyCFunction)dirstate_item_set_tracked, METH_NOARGS,
551 "mark a file as \"tracked\""},
551 "mark a file as \"tracked\""},
552 {"set_untracked", (PyCFunction)dirstate_item_set_untracked, METH_NOARGS,
552 {"set_untracked", (PyCFunction)dirstate_item_set_untracked, METH_NOARGS,
553 "mark a file as \"untracked\""},
553 "mark a file as \"untracked\""},
554 {NULL} /* Sentinel */
554 {NULL} /* Sentinel */
555 };
555 };
556
556
557 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
557 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
558 {
558 {
559 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
559 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
560 };
560 };
561
561
562 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
562 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
563 {
563 {
564 return PyInt_FromLong(dirstate_item_c_v1_size(self));
564 return PyInt_FromLong(dirstate_item_c_v1_size(self));
565 };
565 };
566
566
567 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
567 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
568 {
568 {
569 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
569 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
570 };
570 };
571
571
572 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
572 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
573 {
573 {
574 char state = dirstate_item_c_v1_state(self);
574 char state = dirstate_item_c_v1_state(self);
575 return PyBytes_FromStringAndSize(&state, 1);
575 return PyBytes_FromStringAndSize(&state, 1);
576 };
576 };
577
577
578 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
578 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
579 {
579 {
580 if (dirstate_item_c_tracked(self)) {
580 if (dirstate_item_c_tracked(self)) {
581 Py_RETURN_TRUE;
581 Py_RETURN_TRUE;
582 } else {
582 } else {
583 Py_RETURN_FALSE;
583 Py_RETURN_FALSE;
584 }
584 }
585 };
585 };
586
586
587 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
587 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
588 {
588 {
589 if (dirstate_item_c_added(self)) {
589 if (dirstate_item_c_added(self)) {
590 Py_RETURN_TRUE;
590 Py_RETURN_TRUE;
591 } else {
591 } else {
592 Py_RETURN_FALSE;
592 Py_RETURN_FALSE;
593 }
593 }
594 };
594 };
595
595
596 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
596 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
597 {
597 {
598 if (dirstate_item_c_merged(self)) {
598 if (dirstate_item_c_merged(self)) {
599 Py_RETURN_TRUE;
599 Py_RETURN_TRUE;
600 } else {
600 } else {
601 Py_RETURN_FALSE;
601 Py_RETURN_FALSE;
602 }
602 }
603 };
603 };
604
604
605 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
605 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
606 {
606 {
607 if (dirstate_item_c_merged_removed(self)) {
607 if (dirstate_item_c_merged_removed(self)) {
608 Py_RETURN_TRUE;
608 Py_RETURN_TRUE;
609 } else {
609 } else {
610 Py_RETURN_FALSE;
610 Py_RETURN_FALSE;
611 }
611 }
612 };
612 };
613
613
614 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
614 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
615 {
615 {
616 if (dirstate_item_c_from_p2(self)) {
616 if (dirstate_item_c_from_p2(self)) {
617 Py_RETURN_TRUE;
617 Py_RETURN_TRUE;
618 } else {
618 } else {
619 Py_RETURN_FALSE;
619 Py_RETURN_FALSE;
620 }
620 }
621 };
621 };
622
622
623 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
623 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
624 {
624 {
625 if (dirstate_item_c_from_p2_removed(self)) {
625 if (dirstate_item_c_from_p2_removed(self)) {
626 Py_RETURN_TRUE;
626 Py_RETURN_TRUE;
627 } else {
627 } else {
628 Py_RETURN_FALSE;
628 Py_RETURN_FALSE;
629 }
629 }
630 };
630 };
631
631
632 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
632 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
633 {
633 {
634 if (dirstate_item_c_removed(self)) {
634 if (dirstate_item_c_removed(self)) {
635 Py_RETURN_TRUE;
635 Py_RETURN_TRUE;
636 } else {
636 } else {
637 Py_RETURN_FALSE;
637 Py_RETURN_FALSE;
638 }
638 }
639 };
639 };
640
640
641 static PyObject *dm_nonnormal(dirstateItemObject *self)
641 static PyObject *dm_nonnormal(dirstateItemObject *self)
642 {
642 {
643 if ((dirstate_item_c_v1_state(self) != 'n') ||
643 if ((dirstate_item_c_v1_state(self) != 'n') ||
644 (dirstate_item_c_v1_mtime(self) == ambiguous_time)) {
644 (dirstate_item_c_v1_mtime(self) == ambiguous_time)) {
645 Py_RETURN_TRUE;
645 Py_RETURN_TRUE;
646 } else {
646 } else {
647 Py_RETURN_FALSE;
647 Py_RETURN_FALSE;
648 }
648 }
649 };
649 };
650 static PyObject *dm_otherparent(dirstateItemObject *self)
650 static PyObject *dm_otherparent(dirstateItemObject *self)
651 {
651 {
652 if (dirstate_item_c_v1_mtime(self) == dirstate_v1_from_p2) {
652 if (dirstate_item_c_v1_mtime(self) == dirstate_v1_from_p2) {
653 Py_RETURN_TRUE;
653 Py_RETURN_TRUE;
654 } else {
654 } else {
655 Py_RETURN_FALSE;
655 Py_RETURN_FALSE;
656 }
656 }
657 };
657 };
658
658
659 static PyGetSetDef dirstate_item_getset[] = {
659 static PyGetSetDef dirstate_item_getset[] = {
660 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
660 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
661 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
661 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
662 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
662 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
663 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
663 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
664 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
664 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
665 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
665 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
666 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
666 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
667 "merged_removed", NULL},
667 "merged_removed", NULL},
668 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
668 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
669 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
669 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
670 "from_p2_removed", NULL},
670 "from_p2_removed", NULL},
671 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
671 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
672 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
672 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
673 {"dm_nonnormal", (getter)dm_nonnormal, NULL, "dm_nonnormal", NULL},
673 {"dm_nonnormal", (getter)dm_nonnormal, NULL, "dm_nonnormal", NULL},
674 {"dm_otherparent", (getter)dm_otherparent, NULL, "dm_otherparent", NULL},
674 {"dm_otherparent", (getter)dm_otherparent, NULL, "dm_otherparent", NULL},
675 {NULL} /* Sentinel */
675 {NULL} /* Sentinel */
676 };
676 };
677
677
678 PyTypeObject dirstateItemType = {
678 PyTypeObject dirstateItemType = {
679 PyVarObject_HEAD_INIT(NULL, 0) /* header */
679 PyVarObject_HEAD_INIT(NULL, 0) /* header */
680 "dirstate_tuple", /* tp_name */
680 "dirstate_tuple", /* tp_name */
681 sizeof(dirstateItemObject), /* tp_basicsize */
681 sizeof(dirstateItemObject), /* tp_basicsize */
682 0, /* tp_itemsize */
682 0, /* tp_itemsize */
683 (destructor)dirstate_item_dealloc, /* tp_dealloc */
683 (destructor)dirstate_item_dealloc, /* tp_dealloc */
684 0, /* tp_print */
684 0, /* tp_print */
685 0, /* tp_getattr */
685 0, /* tp_getattr */
686 0, /* tp_setattr */
686 0, /* tp_setattr */
687 0, /* tp_compare */
687 0, /* tp_compare */
688 0, /* tp_repr */
688 0, /* tp_repr */
689 0, /* tp_as_number */
689 0, /* tp_as_number */
690 0, /* tp_as_sequence */
690 0, /* tp_as_sequence */
691 0, /* tp_as_mapping */
691 0, /* tp_as_mapping */
692 0, /* tp_hash */
692 0, /* tp_hash */
693 0, /* tp_call */
693 0, /* tp_call */
694 0, /* tp_str */
694 0, /* tp_str */
695 0, /* tp_getattro */
695 0, /* tp_getattro */
696 0, /* tp_setattro */
696 0, /* tp_setattro */
697 0, /* tp_as_buffer */
697 0, /* tp_as_buffer */
698 Py_TPFLAGS_DEFAULT, /* tp_flags */
698 Py_TPFLAGS_DEFAULT, /* tp_flags */
699 "dirstate tuple", /* tp_doc */
699 "dirstate tuple", /* tp_doc */
700 0, /* tp_traverse */
700 0, /* tp_traverse */
701 0, /* tp_clear */
701 0, /* tp_clear */
702 0, /* tp_richcompare */
702 0, /* tp_richcompare */
703 0, /* tp_weaklistoffset */
703 0, /* tp_weaklistoffset */
704 0, /* tp_iter */
704 0, /* tp_iter */
705 0, /* tp_iternext */
705 0, /* tp_iternext */
706 dirstate_item_methods, /* tp_methods */
706 dirstate_item_methods, /* tp_methods */
707 0, /* tp_members */
707 0, /* tp_members */
708 dirstate_item_getset, /* tp_getset */
708 dirstate_item_getset, /* tp_getset */
709 0, /* tp_base */
709 0, /* tp_base */
710 0, /* tp_dict */
710 0, /* tp_dict */
711 0, /* tp_descr_get */
711 0, /* tp_descr_get */
712 0, /* tp_descr_set */
712 0, /* tp_descr_set */
713 0, /* tp_dictoffset */
713 0, /* tp_dictoffset */
714 0, /* tp_init */
714 0, /* tp_init */
715 0, /* tp_alloc */
715 0, /* tp_alloc */
716 dirstate_item_new, /* tp_new */
716 dirstate_item_new, /* tp_new */
717 };
717 };
718
718
719 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
719 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
720 {
720 {
721 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
721 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
722 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
722 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
723 char state, *cur, *str, *cpos;
723 char state, *cur, *str, *cpos;
724 int mode, size, mtime;
724 int mode, size, mtime;
725 unsigned int flen, pos = 40;
725 unsigned int flen, pos = 40;
726 Py_ssize_t len = 40;
726 Py_ssize_t len = 40;
727 Py_ssize_t readlen;
727 Py_ssize_t readlen;
728
728
729 if (!PyArg_ParseTuple(
729 if (!PyArg_ParseTuple(
730 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
730 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
731 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
731 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
732 goto quit;
732 goto quit;
733 }
733 }
734
734
735 len = readlen;
735 len = readlen;
736
736
737 /* read parents */
737 /* read parents */
738 if (len < 40) {
738 if (len < 40) {
739 PyErr_SetString(PyExc_ValueError,
739 PyErr_SetString(PyExc_ValueError,
740 "too little data for parents");
740 "too little data for parents");
741 goto quit;
741 goto quit;
742 }
742 }
743
743
744 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
744 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
745 str + 20, (Py_ssize_t)20);
745 str + 20, (Py_ssize_t)20);
746 if (!parents) {
746 if (!parents) {
747 goto quit;
747 goto quit;
748 }
748 }
749
749
750 /* read filenames */
750 /* read filenames */
751 while (pos >= 40 && pos < len) {
751 while (pos >= 40 && pos < len) {
752 if (pos + 17 > len) {
752 if (pos + 17 > len) {
753 PyErr_SetString(PyExc_ValueError,
753 PyErr_SetString(PyExc_ValueError,
754 "overflow in dirstate");
754 "overflow in dirstate");
755 goto quit;
755 goto quit;
756 }
756 }
757 cur = str + pos;
757 cur = str + pos;
758 /* unpack header */
758 /* unpack header */
759 state = *cur;
759 state = *cur;
760 mode = getbe32(cur + 1);
760 mode = getbe32(cur + 1);
761 size = getbe32(cur + 5);
761 size = getbe32(cur + 5);
762 mtime = getbe32(cur + 9);
762 mtime = getbe32(cur + 9);
763 flen = getbe32(cur + 13);
763 flen = getbe32(cur + 13);
764 pos += 17;
764 pos += 17;
765 cur += 17;
765 cur += 17;
766 if (flen > len - pos) {
766 if (flen > len - pos) {
767 PyErr_SetString(PyExc_ValueError,
767 PyErr_SetString(PyExc_ValueError,
768 "overflow in dirstate");
768 "overflow in dirstate");
769 goto quit;
769 goto quit;
770 }
770 }
771
771
772 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
772 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
773 size, mtime);
773 size, mtime);
774 if (!entry)
774 if (!entry)
775 goto quit;
775 goto quit;
776 cpos = memchr(cur, 0, flen);
776 cpos = memchr(cur, 0, flen);
777 if (cpos) {
777 if (cpos) {
778 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
778 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
779 cname = PyBytes_FromStringAndSize(
779 cname = PyBytes_FromStringAndSize(
780 cpos + 1, flen - (cpos - cur) - 1);
780 cpos + 1, flen - (cpos - cur) - 1);
781 if (!fname || !cname ||
781 if (!fname || !cname ||
782 PyDict_SetItem(cmap, fname, cname) == -1 ||
782 PyDict_SetItem(cmap, fname, cname) == -1 ||
783 PyDict_SetItem(dmap, fname, entry) == -1) {
783 PyDict_SetItem(dmap, fname, entry) == -1) {
784 goto quit;
784 goto quit;
785 }
785 }
786 Py_DECREF(cname);
786 Py_DECREF(cname);
787 } else {
787 } else {
788 fname = PyBytes_FromStringAndSize(cur, flen);
788 fname = PyBytes_FromStringAndSize(cur, flen);
789 if (!fname ||
789 if (!fname ||
790 PyDict_SetItem(dmap, fname, entry) == -1) {
790 PyDict_SetItem(dmap, fname, entry) == -1) {
791 goto quit;
791 goto quit;
792 }
792 }
793 }
793 }
794 Py_DECREF(fname);
794 Py_DECREF(fname);
795 Py_DECREF(entry);
795 Py_DECREF(entry);
796 fname = cname = entry = NULL;
796 fname = cname = entry = NULL;
797 pos += flen;
797 pos += flen;
798 }
798 }
799
799
800 ret = parents;
800 ret = parents;
801 Py_INCREF(ret);
801 Py_INCREF(ret);
802 quit:
802 quit:
803 Py_XDECREF(fname);
803 Py_XDECREF(fname);
804 Py_XDECREF(cname);
804 Py_XDECREF(cname);
805 Py_XDECREF(entry);
805 Py_XDECREF(entry);
806 Py_XDECREF(parents);
806 Py_XDECREF(parents);
807 return ret;
807 return ret;
808 }
808 }
809
809
810 /*
810 /*
811 * Build a set of non-normal and other parent entries from the dirstate dmap
811 * Build a set of non-normal and other parent entries from the dirstate dmap
812 */
812 */
813 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
813 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
814 {
814 {
815 PyObject *dmap, *fname, *v;
815 PyObject *dmap, *fname, *v;
816 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
816 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
817 Py_ssize_t pos;
817 Py_ssize_t pos;
818
818
819 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
819 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
820 &dmap)) {
820 &dmap)) {
821 goto bail;
821 goto bail;
822 }
822 }
823
823
824 nonnset = PySet_New(NULL);
824 nonnset = PySet_New(NULL);
825 if (nonnset == NULL) {
825 if (nonnset == NULL) {
826 goto bail;
826 goto bail;
827 }
827 }
828
828
829 otherpset = PySet_New(NULL);
829 otherpset = PySet_New(NULL);
830 if (otherpset == NULL) {
830 if (otherpset == NULL) {
831 goto bail;
831 goto bail;
832 }
832 }
833
833
834 pos = 0;
834 pos = 0;
835 while (PyDict_Next(dmap, &pos, &fname, &v)) {
835 while (PyDict_Next(dmap, &pos, &fname, &v)) {
836 dirstateItemObject *t;
836 dirstateItemObject *t;
837 if (!dirstate_tuple_check(v)) {
837 if (!dirstate_tuple_check(v)) {
838 PyErr_SetString(PyExc_TypeError,
838 PyErr_SetString(PyExc_TypeError,
839 "expected a dirstate tuple");
839 "expected a dirstate tuple");
840 goto bail;
840 goto bail;
841 }
841 }
842 t = (dirstateItemObject *)v;
842 t = (dirstateItemObject *)v;
843
843
844 if (dirstate_item_c_from_p2(t)) {
844 if (dirstate_item_c_from_p2(t)) {
845 if (PySet_Add(otherpset, fname) == -1) {
845 if (PySet_Add(otherpset, fname) == -1) {
846 goto bail;
846 goto bail;
847 }
847 }
848 }
848 }
849 if (!(t->flags & dirstate_flag_wc_tracked) ||
849 if (!(t->flags & dirstate_flag_wc_tracked) ||
850 !(t->flags &
850 !(t->flags &
851 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked)) ||
851 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked)) ||
852 (t->flags &
852 (t->flags &
853 (dirstate_flag_possibly_dirty | dirstate_flag_merged))) {
853 (dirstate_flag_possibly_dirty | dirstate_flag_merged))) {
854 if (PySet_Add(nonnset, fname) == -1) {
854 if (PySet_Add(nonnset, fname) == -1) {
855 goto bail;
855 goto bail;
856 }
856 }
857 }
857 }
858 }
858 }
859
859
860 result = Py_BuildValue("(OO)", nonnset, otherpset);
860 result = Py_BuildValue("(OO)", nonnset, otherpset);
861 if (result == NULL) {
861 if (result == NULL) {
862 goto bail;
862 goto bail;
863 }
863 }
864 Py_DECREF(nonnset);
864 Py_DECREF(nonnset);
865 Py_DECREF(otherpset);
865 Py_DECREF(otherpset);
866 return result;
866 return result;
867 bail:
867 bail:
868 Py_XDECREF(nonnset);
868 Py_XDECREF(nonnset);
869 Py_XDECREF(otherpset);
869 Py_XDECREF(otherpset);
870 Py_XDECREF(result);
870 Py_XDECREF(result);
871 return NULL;
871 return NULL;
872 }
872 }
873
873
874 /*
874 /*
875 * Efficiently pack a dirstate object into its on-disk format.
875 * Efficiently pack a dirstate object into its on-disk format.
876 */
876 */
877 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
877 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
878 {
878 {
879 PyObject *packobj = NULL;
879 PyObject *packobj = NULL;
880 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
880 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
881 Py_ssize_t nbytes, pos, l;
881 Py_ssize_t nbytes, pos, l;
882 PyObject *k, *v = NULL, *pn;
882 PyObject *k, *v = NULL, *pn;
883 char *p, *s;
883 char *p, *s;
884 int now;
884 int now;
885
885
886 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
886 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
887 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
887 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
888 &now)) {
888 &now)) {
889 return NULL;
889 return NULL;
890 }
890 }
891
891
892 if (PyTuple_Size(pl) != 2) {
892 if (PyTuple_Size(pl) != 2) {
893 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
893 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
894 return NULL;
894 return NULL;
895 }
895 }
896
896
897 /* Figure out how much we need to allocate. */
897 /* Figure out how much we need to allocate. */
898 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
898 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
899 PyObject *c;
899 PyObject *c;
900 if (!PyBytes_Check(k)) {
900 if (!PyBytes_Check(k)) {
901 PyErr_SetString(PyExc_TypeError, "expected string key");
901 PyErr_SetString(PyExc_TypeError, "expected string key");
902 goto bail;
902 goto bail;
903 }
903 }
904 nbytes += PyBytes_GET_SIZE(k) + 17;
904 nbytes += PyBytes_GET_SIZE(k) + 17;
905 c = PyDict_GetItem(copymap, k);
905 c = PyDict_GetItem(copymap, k);
906 if (c) {
906 if (c) {
907 if (!PyBytes_Check(c)) {
907 if (!PyBytes_Check(c)) {
908 PyErr_SetString(PyExc_TypeError,
908 PyErr_SetString(PyExc_TypeError,
909 "expected string key");
909 "expected string key");
910 goto bail;
910 goto bail;
911 }
911 }
912 nbytes += PyBytes_GET_SIZE(c) + 1;
912 nbytes += PyBytes_GET_SIZE(c) + 1;
913 }
913 }
914 }
914 }
915
915
916 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
916 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
917 if (packobj == NULL) {
917 if (packobj == NULL) {
918 goto bail;
918 goto bail;
919 }
919 }
920
920
921 p = PyBytes_AS_STRING(packobj);
921 p = PyBytes_AS_STRING(packobj);
922
922
923 pn = PyTuple_GET_ITEM(pl, 0);
923 pn = PyTuple_GET_ITEM(pl, 0);
924 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
924 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
925 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
925 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
926 goto bail;
926 goto bail;
927 }
927 }
928 memcpy(p, s, l);
928 memcpy(p, s, l);
929 p += 20;
929 p += 20;
930 pn = PyTuple_GET_ITEM(pl, 1);
930 pn = PyTuple_GET_ITEM(pl, 1);
931 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
931 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
932 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
932 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
933 goto bail;
933 goto bail;
934 }
934 }
935 memcpy(p, s, l);
935 memcpy(p, s, l);
936 p += 20;
936 p += 20;
937
937
938 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
938 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
939 dirstateItemObject *tuple;
939 dirstateItemObject *tuple;
940 char state;
940 char state;
941 int mode, size, mtime;
941 int mode, size, mtime;
942 Py_ssize_t len, l;
942 Py_ssize_t len, l;
943 PyObject *o;
943 PyObject *o;
944 char *t;
944 char *t;
945
945
946 if (!dirstate_tuple_check(v)) {
946 if (!dirstate_tuple_check(v)) {
947 PyErr_SetString(PyExc_TypeError,
947 PyErr_SetString(PyExc_TypeError,
948 "expected a dirstate tuple");
948 "expected a dirstate tuple");
949 goto bail;
949 goto bail;
950 }
950 }
951 tuple = (dirstateItemObject *)v;
951 tuple = (dirstateItemObject *)v;
952
952
953 state = dirstate_item_c_v1_state(tuple);
953 state = dirstate_item_c_v1_state(tuple);
954 mode = dirstate_item_c_v1_mode(tuple);
954 mode = dirstate_item_c_v1_mode(tuple);
955 size = dirstate_item_c_v1_size(tuple);
955 size = dirstate_item_c_v1_size(tuple);
956 mtime = dirstate_item_c_v1_mtime(tuple);
956 mtime = dirstate_item_c_v1_mtime(tuple);
957 if (state == 'n' && mtime == now) {
957 if (state == 'n' && mtime == now) {
958 /* See pure/parsers.py:pack_dirstate for why we do
958 /* See pure/parsers.py:pack_dirstate for why we do
959 * this. */
959 * this. */
960 mtime = -1;
960 mtime = -1;
961 mtime_unset = (PyObject *)dirstate_item_from_v1_data(
961 mtime_unset = (PyObject *)dirstate_item_from_v1_data(
962 state, mode, size, mtime);
962 state, mode, size, mtime);
963 if (!mtime_unset) {
963 if (!mtime_unset) {
964 goto bail;
964 goto bail;
965 }
965 }
966 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
966 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
967 goto bail;
967 goto bail;
968 }
968 }
969 Py_DECREF(mtime_unset);
969 Py_DECREF(mtime_unset);
970 mtime_unset = NULL;
970 mtime_unset = NULL;
971 }
971 }
972 *p++ = state;
972 *p++ = state;
973 putbe32((uint32_t)mode, p);
973 putbe32((uint32_t)mode, p);
974 putbe32((uint32_t)size, p + 4);
974 putbe32((uint32_t)size, p + 4);
975 putbe32((uint32_t)mtime, p + 8);
975 putbe32((uint32_t)mtime, p + 8);
976 t = p + 12;
976 t = p + 12;
977 p += 16;
977 p += 16;
978 len = PyBytes_GET_SIZE(k);
978 len = PyBytes_GET_SIZE(k);
979 memcpy(p, PyBytes_AS_STRING(k), len);
979 memcpy(p, PyBytes_AS_STRING(k), len);
980 p += len;
980 p += len;
981 o = PyDict_GetItem(copymap, k);
981 o = PyDict_GetItem(copymap, k);
982 if (o) {
982 if (o) {
983 *p++ = '\0';
983 *p++ = '\0';
984 l = PyBytes_GET_SIZE(o);
984 l = PyBytes_GET_SIZE(o);
985 memcpy(p, PyBytes_AS_STRING(o), l);
985 memcpy(p, PyBytes_AS_STRING(o), l);
986 p += l;
986 p += l;
987 len += l + 1;
987 len += l + 1;
988 }
988 }
989 putbe32((uint32_t)len, t);
989 putbe32((uint32_t)len, t);
990 }
990 }
991
991
992 pos = p - PyBytes_AS_STRING(packobj);
992 pos = p - PyBytes_AS_STRING(packobj);
993 if (pos != nbytes) {
993 if (pos != nbytes) {
994 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
994 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
995 (long)pos, (long)nbytes);
995 (long)pos, (long)nbytes);
996 goto bail;
996 goto bail;
997 }
997 }
998
998
999 return packobj;
999 return packobj;
1000 bail:
1000 bail:
1001 Py_XDECREF(mtime_unset);
1001 Py_XDECREF(mtime_unset);
1002 Py_XDECREF(packobj);
1002 Py_XDECREF(packobj);
1003 Py_XDECREF(v);
1003 Py_XDECREF(v);
1004 return NULL;
1004 return NULL;
1005 }
1005 }
1006
1006
1007 #define BUMPED_FIX 1
1007 #define BUMPED_FIX 1
1008 #define USING_SHA_256 2
1008 #define USING_SHA_256 2
1009 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
1009 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
1010
1010
1011 static PyObject *readshas(const char *source, unsigned char num,
1011 static PyObject *readshas(const char *source, unsigned char num,
1012 Py_ssize_t hashwidth)
1012 Py_ssize_t hashwidth)
1013 {
1013 {
1014 int i;
1014 int i;
1015 PyObject *list = PyTuple_New(num);
1015 PyObject *list = PyTuple_New(num);
1016 if (list == NULL) {
1016 if (list == NULL) {
1017 return NULL;
1017 return NULL;
1018 }
1018 }
1019 for (i = 0; i < num; i++) {
1019 for (i = 0; i < num; i++) {
1020 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
1020 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
1021 if (hash == NULL) {
1021 if (hash == NULL) {
1022 Py_DECREF(list);
1022 Py_DECREF(list);
1023 return NULL;
1023 return NULL;
1024 }
1024 }
1025 PyTuple_SET_ITEM(list, i, hash);
1025 PyTuple_SET_ITEM(list, i, hash);
1026 source += hashwidth;
1026 source += hashwidth;
1027 }
1027 }
1028 return list;
1028 return list;
1029 }
1029 }
1030
1030
1031 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
1031 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
1032 uint32_t *msize)
1032 uint32_t *msize)
1033 {
1033 {
1034 const char *data = databegin;
1034 const char *data = databegin;
1035 const char *meta;
1035 const char *meta;
1036
1036
1037 double mtime;
1037 double mtime;
1038 int16_t tz;
1038 int16_t tz;
1039 uint16_t flags;
1039 uint16_t flags;
1040 unsigned char nsuccs, nparents, nmetadata;
1040 unsigned char nsuccs, nparents, nmetadata;
1041 Py_ssize_t hashwidth = 20;
1041 Py_ssize_t hashwidth = 20;
1042
1042
1043 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
1043 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
1044 PyObject *metadata = NULL, *ret = NULL;
1044 PyObject *metadata = NULL, *ret = NULL;
1045 int i;
1045 int i;
1046
1046
1047 if (data + FM1_HEADER_SIZE > dataend) {
1047 if (data + FM1_HEADER_SIZE > dataend) {
1048 goto overflow;
1048 goto overflow;
1049 }
1049 }
1050
1050
1051 *msize = getbe32(data);
1051 *msize = getbe32(data);
1052 data += 4;
1052 data += 4;
1053 mtime = getbefloat64(data);
1053 mtime = getbefloat64(data);
1054 data += 8;
1054 data += 8;
1055 tz = getbeint16(data);
1055 tz = getbeint16(data);
1056 data += 2;
1056 data += 2;
1057 flags = getbeuint16(data);
1057 flags = getbeuint16(data);
1058 data += 2;
1058 data += 2;
1059
1059
1060 if (flags & USING_SHA_256) {
1060 if (flags & USING_SHA_256) {
1061 hashwidth = 32;
1061 hashwidth = 32;
1062 }
1062 }
1063
1063
1064 nsuccs = (unsigned char)(*data++);
1064 nsuccs = (unsigned char)(*data++);
1065 nparents = (unsigned char)(*data++);
1065 nparents = (unsigned char)(*data++);
1066 nmetadata = (unsigned char)(*data++);
1066 nmetadata = (unsigned char)(*data++);
1067
1067
1068 if (databegin + *msize > dataend) {
1068 if (databegin + *msize > dataend) {
1069 goto overflow;
1069 goto overflow;
1070 }
1070 }
1071 dataend = databegin + *msize; /* narrow down to marker size */
1071 dataend = databegin + *msize; /* narrow down to marker size */
1072
1072
1073 if (data + hashwidth > dataend) {
1073 if (data + hashwidth > dataend) {
1074 goto overflow;
1074 goto overflow;
1075 }
1075 }
1076 prec = PyBytes_FromStringAndSize(data, hashwidth);
1076 prec = PyBytes_FromStringAndSize(data, hashwidth);
1077 data += hashwidth;
1077 data += hashwidth;
1078 if (prec == NULL) {
1078 if (prec == NULL) {
1079 goto bail;
1079 goto bail;
1080 }
1080 }
1081
1081
1082 if (data + nsuccs * hashwidth > dataend) {
1082 if (data + nsuccs * hashwidth > dataend) {
1083 goto overflow;
1083 goto overflow;
1084 }
1084 }
1085 succs = readshas(data, nsuccs, hashwidth);
1085 succs = readshas(data, nsuccs, hashwidth);
1086 if (succs == NULL) {
1086 if (succs == NULL) {
1087 goto bail;
1087 goto bail;
1088 }
1088 }
1089 data += nsuccs * hashwidth;
1089 data += nsuccs * hashwidth;
1090
1090
1091 if (nparents == 1 || nparents == 2) {
1091 if (nparents == 1 || nparents == 2) {
1092 if (data + nparents * hashwidth > dataend) {
1092 if (data + nparents * hashwidth > dataend) {
1093 goto overflow;
1093 goto overflow;
1094 }
1094 }
1095 parents = readshas(data, nparents, hashwidth);
1095 parents = readshas(data, nparents, hashwidth);
1096 if (parents == NULL) {
1096 if (parents == NULL) {
1097 goto bail;
1097 goto bail;
1098 }
1098 }
1099 data += nparents * hashwidth;
1099 data += nparents * hashwidth;
1100 } else {
1100 } else {
1101 parents = Py_None;
1101 parents = Py_None;
1102 Py_INCREF(parents);
1102 Py_INCREF(parents);
1103 }
1103 }
1104
1104
1105 if (data + 2 * nmetadata > dataend) {
1105 if (data + 2 * nmetadata > dataend) {
1106 goto overflow;
1106 goto overflow;
1107 }
1107 }
1108 meta = data + (2 * nmetadata);
1108 meta = data + (2 * nmetadata);
1109 metadata = PyTuple_New(nmetadata);
1109 metadata = PyTuple_New(nmetadata);
1110 if (metadata == NULL) {
1110 if (metadata == NULL) {
1111 goto bail;
1111 goto bail;
1112 }
1112 }
1113 for (i = 0; i < nmetadata; i++) {
1113 for (i = 0; i < nmetadata; i++) {
1114 PyObject *tmp, *left = NULL, *right = NULL;
1114 PyObject *tmp, *left = NULL, *right = NULL;
1115 Py_ssize_t leftsize = (unsigned char)(*data++);
1115 Py_ssize_t leftsize = (unsigned char)(*data++);
1116 Py_ssize_t rightsize = (unsigned char)(*data++);
1116 Py_ssize_t rightsize = (unsigned char)(*data++);
1117 if (meta + leftsize + rightsize > dataend) {
1117 if (meta + leftsize + rightsize > dataend) {
1118 goto overflow;
1118 goto overflow;
1119 }
1119 }
1120 left = PyBytes_FromStringAndSize(meta, leftsize);
1120 left = PyBytes_FromStringAndSize(meta, leftsize);
1121 meta += leftsize;
1121 meta += leftsize;
1122 right = PyBytes_FromStringAndSize(meta, rightsize);
1122 right = PyBytes_FromStringAndSize(meta, rightsize);
1123 meta += rightsize;
1123 meta += rightsize;
1124 tmp = PyTuple_New(2);
1124 tmp = PyTuple_New(2);
1125 if (!left || !right || !tmp) {
1125 if (!left || !right || !tmp) {
1126 Py_XDECREF(left);
1126 Py_XDECREF(left);
1127 Py_XDECREF(right);
1127 Py_XDECREF(right);
1128 Py_XDECREF(tmp);
1128 Py_XDECREF(tmp);
1129 goto bail;
1129 goto bail;
1130 }
1130 }
1131 PyTuple_SET_ITEM(tmp, 0, left);
1131 PyTuple_SET_ITEM(tmp, 0, left);
1132 PyTuple_SET_ITEM(tmp, 1, right);
1132 PyTuple_SET_ITEM(tmp, 1, right);
1133 PyTuple_SET_ITEM(metadata, i, tmp);
1133 PyTuple_SET_ITEM(metadata, i, tmp);
1134 }
1134 }
1135 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
1135 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
1136 (int)tz * 60, parents);
1136 (int)tz * 60, parents);
1137 goto bail; /* return successfully */
1137 goto bail; /* return successfully */
1138
1138
1139 overflow:
1139 overflow:
1140 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
1140 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
1141 bail:
1141 bail:
1142 Py_XDECREF(prec);
1142 Py_XDECREF(prec);
1143 Py_XDECREF(succs);
1143 Py_XDECREF(succs);
1144 Py_XDECREF(metadata);
1144 Py_XDECREF(metadata);
1145 Py_XDECREF(parents);
1145 Py_XDECREF(parents);
1146 return ret;
1146 return ret;
1147 }
1147 }
1148
1148
1149 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
1149 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
1150 {
1150 {
1151 const char *data, *dataend;
1151 const char *data, *dataend;
1152 Py_ssize_t datalen, offset, stop;
1152 Py_ssize_t datalen, offset, stop;
1153 PyObject *markers = NULL;
1153 PyObject *markers = NULL;
1154
1154
1155 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1155 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1156 &offset, &stop)) {
1156 &offset, &stop)) {
1157 return NULL;
1157 return NULL;
1158 }
1158 }
1159 if (offset < 0) {
1159 if (offset < 0) {
1160 PyErr_SetString(PyExc_ValueError,
1160 PyErr_SetString(PyExc_ValueError,
1161 "invalid negative offset in fm1readmarkers");
1161 "invalid negative offset in fm1readmarkers");
1162 return NULL;
1162 return NULL;
1163 }
1163 }
1164 if (stop > datalen) {
1164 if (stop > datalen) {
1165 PyErr_SetString(
1165 PyErr_SetString(
1166 PyExc_ValueError,
1166 PyExc_ValueError,
1167 "stop longer than data length in fm1readmarkers");
1167 "stop longer than data length in fm1readmarkers");
1168 return NULL;
1168 return NULL;
1169 }
1169 }
1170 dataend = data + datalen;
1170 dataend = data + datalen;
1171 data += offset;
1171 data += offset;
1172 markers = PyList_New(0);
1172 markers = PyList_New(0);
1173 if (!markers) {
1173 if (!markers) {
1174 return NULL;
1174 return NULL;
1175 }
1175 }
1176 while (offset < stop) {
1176 while (offset < stop) {
1177 uint32_t msize;
1177 uint32_t msize;
1178 int error;
1178 int error;
1179 PyObject *record = fm1readmarker(data, dataend, &msize);
1179 PyObject *record = fm1readmarker(data, dataend, &msize);
1180 if (!record) {
1180 if (!record) {
1181 goto bail;
1181 goto bail;
1182 }
1182 }
1183 error = PyList_Append(markers, record);
1183 error = PyList_Append(markers, record);
1184 Py_DECREF(record);
1184 Py_DECREF(record);
1185 if (error) {
1185 if (error) {
1186 goto bail;
1186 goto bail;
1187 }
1187 }
1188 data += msize;
1188 data += msize;
1189 offset += msize;
1189 offset += msize;
1190 }
1190 }
1191 return markers;
1191 return markers;
1192 bail:
1192 bail:
1193 Py_DECREF(markers);
1193 Py_DECREF(markers);
1194 return NULL;
1194 return NULL;
1195 }
1195 }
1196
1196
1197 static char parsers_doc[] = "Efficient content parsing.";
1197 static char parsers_doc[] = "Efficient content parsing.";
1198
1198
1199 PyObject *encodedir(PyObject *self, PyObject *args);
1199 PyObject *encodedir(PyObject *self, PyObject *args);
1200 PyObject *pathencode(PyObject *self, PyObject *args);
1200 PyObject *pathencode(PyObject *self, PyObject *args);
1201 PyObject *lowerencode(PyObject *self, PyObject *args);
1201 PyObject *lowerencode(PyObject *self, PyObject *args);
1202 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
1202 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
1203
1203
1204 static PyMethodDef methods[] = {
1204 static PyMethodDef methods[] = {
1205 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
1205 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
1206 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
1206 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
1207 "create a set containing non-normal and other parent entries of given "
1207 "create a set containing non-normal and other parent entries of given "
1208 "dirstate\n"},
1208 "dirstate\n"},
1209 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
1209 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
1210 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
1210 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
1211 "parse a revlog index\n"},
1211 "parse a revlog index\n"},
1212 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
1212 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
1213 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
1213 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
1214 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
1214 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
1215 {"dict_new_presized", dict_new_presized, METH_VARARGS,
1215 {"dict_new_presized", dict_new_presized, METH_VARARGS,
1216 "construct a dict with an expected size\n"},
1216 "construct a dict with an expected size\n"},
1217 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
1217 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
1218 "make file foldmap\n"},
1218 "make file foldmap\n"},
1219 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
1219 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
1220 "escape a UTF-8 byte string to JSON (fast path)\n"},
1220 "escape a UTF-8 byte string to JSON (fast path)\n"},
1221 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
1221 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
1222 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
1222 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
1223 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
1223 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
1224 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
1224 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
1225 "parse v1 obsolete markers\n"},
1225 "parse v1 obsolete markers\n"},
1226 {NULL, NULL}};
1226 {NULL, NULL}};
1227
1227
1228 void dirs_module_init(PyObject *mod);
1228 void dirs_module_init(PyObject *mod);
1229 void manifest_module_init(PyObject *mod);
1229 void manifest_module_init(PyObject *mod);
1230 void revlog_module_init(PyObject *mod);
1230 void revlog_module_init(PyObject *mod);
1231
1231
1232 static const int version = 20;
1232 static const int version = 20;
1233
1233
1234 static void module_init(PyObject *mod)
1234 static void module_init(PyObject *mod)
1235 {
1235 {
1236 PyObject *capsule = NULL;
1237 PyModule_AddIntConstant(mod, "version", version);
1236 PyModule_AddIntConstant(mod, "version", version);
1238
1237
1239 /* This module constant has two purposes. First, it lets us unit test
1238 /* This module constant has two purposes. First, it lets us unit test
1240 * the ImportError raised without hard-coding any error text. This
1239 * the ImportError raised without hard-coding any error text. This
1241 * means we can change the text in the future without breaking tests,
1240 * means we can change the text in the future without breaking tests,
1242 * even across changesets without a recompile. Second, its presence
1241 * even across changesets without a recompile. Second, its presence
1243 * can be used to determine whether the version-checking logic is
1242 * can be used to determine whether the version-checking logic is
1244 * present, which also helps in testing across changesets without a
1243 * present, which also helps in testing across changesets without a
1245 * recompile. Note that this means the pure-Python version of parsers
1244 * recompile. Note that this means the pure-Python version of parsers
1246 * should not have this module constant. */
1245 * should not have this module constant. */
1247 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
1246 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
1248
1247
1249 dirs_module_init(mod);
1248 dirs_module_init(mod);
1250 manifest_module_init(mod);
1249 manifest_module_init(mod);
1251 revlog_module_init(mod);
1250 revlog_module_init(mod);
1252
1251
1253 capsule = PyCapsule_New(
1254 dirstate_item_from_v1_data,
1255 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
1256 if (capsule != NULL)
1257 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
1258
1259 if (PyType_Ready(&dirstateItemType) < 0) {
1252 if (PyType_Ready(&dirstateItemType) < 0) {
1260 return;
1253 return;
1261 }
1254 }
1262 Py_INCREF(&dirstateItemType);
1255 Py_INCREF(&dirstateItemType);
1263 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
1256 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
1264 }
1257 }
1265
1258
1266 static int check_python_version(void)
1259 static int check_python_version(void)
1267 {
1260 {
1268 PyObject *sys = PyImport_ImportModule("sys"), *ver;
1261 PyObject *sys = PyImport_ImportModule("sys"), *ver;
1269 long hexversion;
1262 long hexversion;
1270 if (!sys) {
1263 if (!sys) {
1271 return -1;
1264 return -1;
1272 }
1265 }
1273 ver = PyObject_GetAttrString(sys, "hexversion");
1266 ver = PyObject_GetAttrString(sys, "hexversion");
1274 Py_DECREF(sys);
1267 Py_DECREF(sys);
1275 if (!ver) {
1268 if (!ver) {
1276 return -1;
1269 return -1;
1277 }
1270 }
1278 hexversion = PyInt_AsLong(ver);
1271 hexversion = PyInt_AsLong(ver);
1279 Py_DECREF(ver);
1272 Py_DECREF(ver);
1280 /* sys.hexversion is a 32-bit number by default, so the -1 case
1273 /* sys.hexversion is a 32-bit number by default, so the -1 case
1281 * should only occur in unusual circumstances (e.g. if sys.hexversion
1274 * should only occur in unusual circumstances (e.g. if sys.hexversion
1282 * is manually set to an invalid value). */
1275 * is manually set to an invalid value). */
1283 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
1276 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
1284 PyErr_Format(PyExc_ImportError,
1277 PyErr_Format(PyExc_ImportError,
1285 "%s: The Mercurial extension "
1278 "%s: The Mercurial extension "
1286 "modules were compiled with Python " PY_VERSION
1279 "modules were compiled with Python " PY_VERSION
1287 ", but "
1280 ", but "
1288 "Mercurial is currently using Python with "
1281 "Mercurial is currently using Python with "
1289 "sys.hexversion=%ld: "
1282 "sys.hexversion=%ld: "
1290 "Python %s\n at: %s",
1283 "Python %s\n at: %s",
1291 versionerrortext, hexversion, Py_GetVersion(),
1284 versionerrortext, hexversion, Py_GetVersion(),
1292 Py_GetProgramFullPath());
1285 Py_GetProgramFullPath());
1293 return -1;
1286 return -1;
1294 }
1287 }
1295 return 0;
1288 return 0;
1296 }
1289 }
1297
1290
1298 #ifdef IS_PY3K
1291 #ifdef IS_PY3K
1299 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1292 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1300 parsers_doc, -1, methods};
1293 parsers_doc, -1, methods};
1301
1294
1302 PyMODINIT_FUNC PyInit_parsers(void)
1295 PyMODINIT_FUNC PyInit_parsers(void)
1303 {
1296 {
1304 PyObject *mod;
1297 PyObject *mod;
1305
1298
1306 if (check_python_version() == -1)
1299 if (check_python_version() == -1)
1307 return NULL;
1300 return NULL;
1308 mod = PyModule_Create(&parsers_module);
1301 mod = PyModule_Create(&parsers_module);
1309 module_init(mod);
1302 module_init(mod);
1310 return mod;
1303 return mod;
1311 }
1304 }
1312 #else
1305 #else
1313 PyMODINIT_FUNC initparsers(void)
1306 PyMODINIT_FUNC initparsers(void)
1314 {
1307 {
1315 PyObject *mod;
1308 PyObject *mod;
1316
1309
1317 if (check_python_version() == -1) {
1310 if (check_python_version() == -1) {
1318 return;
1311 return;
1319 }
1312 }
1320 mod = Py_InitModule3("parsers", methods, parsers_doc);
1313 mod = Py_InitModule3("parsers", methods, parsers_doc);
1321 module_init(mod);
1314 module_init(mod);
1322 }
1315 }
1323 #endif
1316 #endif
@@ -1,1568 +1,1568 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = dirstatemap.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def parents(self):
347 def parents(self):
348 return [self._validate(p) for p in self._pl]
348 return [self._validate(p) for p in self._pl]
349
349
350 def p1(self):
350 def p1(self):
351 return self._validate(self._pl[0])
351 return self._validate(self._pl[0])
352
352
353 def p2(self):
353 def p2(self):
354 return self._validate(self._pl[1])
354 return self._validate(self._pl[1])
355
355
356 @property
356 @property
357 def in_merge(self):
357 def in_merge(self):
358 """True if a merge is in progress"""
358 """True if a merge is in progress"""
359 return self._pl[1] != self._nodeconstants.nullid
359 return self._pl[1] != self._nodeconstants.nullid
360
360
361 def branch(self):
361 def branch(self):
362 return encoding.tolocal(self._branch)
362 return encoding.tolocal(self._branch)
363
363
364 def setparents(self, p1, p2=None):
364 def setparents(self, p1, p2=None):
365 """Set dirstate parents to p1 and p2.
365 """Set dirstate parents to p1 and p2.
366
366
367 When moving from two parents to one, "merged" entries a
367 When moving from two parents to one, "merged" entries a
368 adjusted to normal and previous copy records discarded and
368 adjusted to normal and previous copy records discarded and
369 returned by the call.
369 returned by the call.
370
370
371 See localrepo.setparents()
371 See localrepo.setparents()
372 """
372 """
373 if p2 is None:
373 if p2 is None:
374 p2 = self._nodeconstants.nullid
374 p2 = self._nodeconstants.nullid
375 if self._parentwriters == 0:
375 if self._parentwriters == 0:
376 raise ValueError(
376 raise ValueError(
377 b"cannot set dirstate parent outside of "
377 b"cannot set dirstate parent outside of "
378 b"dirstate.parentchange context manager"
378 b"dirstate.parentchange context manager"
379 )
379 )
380
380
381 self._dirty = True
381 self._dirty = True
382 oldp2 = self._pl[1]
382 oldp2 = self._pl[1]
383 if self._origpl is None:
383 if self._origpl is None:
384 self._origpl = self._pl
384 self._origpl = self._pl
385 self._map.setparents(p1, p2)
385 self._map.setparents(p1, p2)
386 copies = {}
386 copies = {}
387 nullid = self._nodeconstants.nullid
387 nullid = self._nodeconstants.nullid
388 if oldp2 != nullid and p2 == nullid:
388 if oldp2 != nullid and p2 == nullid:
389 candidatefiles = self._map.non_normal_or_other_parent_paths()
389 candidatefiles = self._map.non_normal_or_other_parent_paths()
390
390
391 for f in candidatefiles:
391 for f in candidatefiles:
392 s = self._map.get(f)
392 s = self._map.get(f)
393 if s is None:
393 if s is None:
394 continue
394 continue
395
395
396 # Discard "merged" markers when moving away from a merge state
396 # Discard "merged" markers when moving away from a merge state
397 if s.merged:
397 if s.merged:
398 source = self._map.copymap.get(f)
398 source = self._map.copymap.get(f)
399 if source:
399 if source:
400 copies[f] = source
400 copies[f] = source
401 self._map.reset_state(
401 self._map.reset_state(
402 f,
402 f,
403 wc_tracked=True,
403 wc_tracked=True,
404 p1_tracked=True,
404 p1_tracked=True,
405 possibly_dirty=True,
405 possibly_dirty=True,
406 )
406 )
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._check_new_tracked_filename(f)
412 self._check_new_tracked_filename(f)
413 self._updatedfiles.add(f)
413 self._updatedfiles.add(f)
414 self._map.reset_state(
414 self._map.reset_state(
415 f,
415 f,
416 p1_tracked=False,
416 p1_tracked=False,
417 wc_tracked=True,
417 wc_tracked=True,
418 )
418 )
419 return copies
419 return copies
420
420
421 def setbranch(self, branch):
421 def setbranch(self, branch):
422 self.__class__._branch.set(self, encoding.fromlocal(branch))
422 self.__class__._branch.set(self, encoding.fromlocal(branch))
423 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
423 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
424 try:
424 try:
425 f.write(self._branch + b'\n')
425 f.write(self._branch + b'\n')
426 f.close()
426 f.close()
427
427
428 # make sure filecache has the correct stat info for _branch after
428 # make sure filecache has the correct stat info for _branch after
429 # replacing the underlying file
429 # replacing the underlying file
430 ce = self._filecache[b'_branch']
430 ce = self._filecache[b'_branch']
431 if ce:
431 if ce:
432 ce.refresh()
432 ce.refresh()
433 except: # re-raises
433 except: # re-raises
434 f.discard()
434 f.discard()
435 raise
435 raise
436
436
437 def invalidate(self):
437 def invalidate(self):
438 """Causes the next access to reread the dirstate.
438 """Causes the next access to reread the dirstate.
439
439
440 This is different from localrepo.invalidatedirstate() because it always
440 This is different from localrepo.invalidatedirstate() because it always
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
441 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
442 check whether the dirstate has changed before rereading it."""
442 check whether the dirstate has changed before rereading it."""
443
443
444 for a in ("_map", "_branch", "_ignore"):
444 for a in ("_map", "_branch", "_ignore"):
445 if a in self.__dict__:
445 if a in self.__dict__:
446 delattr(self, a)
446 delattr(self, a)
447 self._lastnormaltime = 0
447 self._lastnormaltime = 0
448 self._dirty = False
448 self._dirty = False
449 self._updatedfiles.clear()
449 self._updatedfiles.clear()
450 self._parentwriters = 0
450 self._parentwriters = 0
451 self._origpl = None
451 self._origpl = None
452
452
453 def copy(self, source, dest):
453 def copy(self, source, dest):
454 """Mark dest as a copy of source. Unmark dest if source is None."""
454 """Mark dest as a copy of source. Unmark dest if source is None."""
455 if source == dest:
455 if source == dest:
456 return
456 return
457 self._dirty = True
457 self._dirty = True
458 if source is not None:
458 if source is not None:
459 self._map.copymap[dest] = source
459 self._map.copymap[dest] = source
460 self._updatedfiles.add(source)
460 self._updatedfiles.add(source)
461 self._updatedfiles.add(dest)
461 self._updatedfiles.add(dest)
462 elif self._map.copymap.pop(dest, None):
462 elif self._map.copymap.pop(dest, None):
463 self._updatedfiles.add(dest)
463 self._updatedfiles.add(dest)
464
464
465 def copied(self, file):
465 def copied(self, file):
466 return self._map.copymap.get(file, None)
466 return self._map.copymap.get(file, None)
467
467
468 def copies(self):
468 def copies(self):
469 return self._map.copymap
469 return self._map.copymap
470
470
471 @requires_no_parents_change
471 @requires_no_parents_change
472 def set_tracked(self, filename):
472 def set_tracked(self, filename):
473 """a "public" method for generic code to mark a file as tracked
473 """a "public" method for generic code to mark a file as tracked
474
474
475 This function is to be called outside of "update/merge" case. For
475 This function is to be called outside of "update/merge" case. For
476 example by a command like `hg add X`.
476 example by a command like `hg add X`.
477
477
478 return True the file was previously untracked, False otherwise.
478 return True the file was previously untracked, False otherwise.
479 """
479 """
480 self._dirty = True
480 self._dirty = True
481 self._updatedfiles.add(filename)
481 self._updatedfiles.add(filename)
482 entry = self._map.get(filename)
482 entry = self._map.get(filename)
483 if entry is None or not entry.tracked:
483 if entry is None or not entry.tracked:
484 self._check_new_tracked_filename(filename)
484 self._check_new_tracked_filename(filename)
485 return self._map.set_tracked(filename)
485 return self._map.set_tracked(filename)
486
486
487 @requires_no_parents_change
487 @requires_no_parents_change
488 def set_untracked(self, filename):
488 def set_untracked(self, filename):
489 """a "public" method for generic code to mark a file as untracked
489 """a "public" method for generic code to mark a file as untracked
490
490
491 This function is to be called outside of "update/merge" case. For
491 This function is to be called outside of "update/merge" case. For
492 example by a command like `hg remove X`.
492 example by a command like `hg remove X`.
493
493
494 return True the file was previously tracked, False otherwise.
494 return True the file was previously tracked, False otherwise.
495 """
495 """
496 ret = self._map.set_untracked(filename)
496 ret = self._map.set_untracked(filename)
497 if ret:
497 if ret:
498 self._dirty = True
498 self._dirty = True
499 self._updatedfiles.add(filename)
499 self._updatedfiles.add(filename)
500 return ret
500 return ret
501
501
502 @requires_no_parents_change
502 @requires_no_parents_change
503 def set_clean(self, filename, parentfiledata=None):
503 def set_clean(self, filename, parentfiledata=None):
504 """record that the current state of the file on disk is known to be clean"""
504 """record that the current state of the file on disk is known to be clean"""
505 self._dirty = True
505 self._dirty = True
506 self._updatedfiles.add(filename)
506 self._updatedfiles.add(filename)
507 if parentfiledata:
507 if parentfiledata:
508 (mode, size, mtime) = parentfiledata
508 (mode, size, mtime) = parentfiledata
509 else:
509 else:
510 (mode, size, mtime) = self._get_filedata(filename)
510 (mode, size, mtime) = self._get_filedata(filename)
511 if not self._map[filename].tracked:
511 if not self._map[filename].tracked:
512 self._check_new_tracked_filename(filename)
512 self._check_new_tracked_filename(filename)
513 self._map.set_clean(filename, mode, size, mtime)
513 self._map.set_clean(filename, mode, size, mtime)
514 if mtime > self._lastnormaltime:
514 if mtime > self._lastnormaltime:
515 # Remember the most recent modification timeslot for status(),
515 # Remember the most recent modification timeslot for status(),
516 # to make sure we won't miss future size-preserving file content
516 # to make sure we won't miss future size-preserving file content
517 # modifications that happen within the same timeslot.
517 # modifications that happen within the same timeslot.
518 self._lastnormaltime = mtime
518 self._lastnormaltime = mtime
519
519
520 @requires_no_parents_change
520 @requires_no_parents_change
521 def set_possibly_dirty(self, filename):
521 def set_possibly_dirty(self, filename):
522 """record that the current state of the file on disk is unknown"""
522 """record that the current state of the file on disk is unknown"""
523 self._dirty = True
523 self._dirty = True
524 self._updatedfiles.add(filename)
524 self._updatedfiles.add(filename)
525 self._map.set_possibly_dirty(filename)
525 self._map.set_possibly_dirty(filename)
526
526
527 @requires_parents_change
527 @requires_parents_change
528 def update_file_p1(
528 def update_file_p1(
529 self,
529 self,
530 filename,
530 filename,
531 p1_tracked,
531 p1_tracked,
532 ):
532 ):
533 """Set a file as tracked in the parent (or not)
533 """Set a file as tracked in the parent (or not)
534
534
535 This is to be called when adjust the dirstate to a new parent after an history
535 This is to be called when adjust the dirstate to a new parent after an history
536 rewriting operation.
536 rewriting operation.
537
537
538 It should not be called during a merge (p2 != nullid) and only within
538 It should not be called during a merge (p2 != nullid) and only within
539 a `with dirstate.parentchange():` context.
539 a `with dirstate.parentchange():` context.
540 """
540 """
541 if self.in_merge:
541 if self.in_merge:
542 msg = b'update_file_reference should not be called when merging'
542 msg = b'update_file_reference should not be called when merging'
543 raise error.ProgrammingError(msg)
543 raise error.ProgrammingError(msg)
544 entry = self._map.get(filename)
544 entry = self._map.get(filename)
545 if entry is None:
545 if entry is None:
546 wc_tracked = False
546 wc_tracked = False
547 else:
547 else:
548 wc_tracked = entry.tracked
548 wc_tracked = entry.tracked
549 possibly_dirty = False
549 possibly_dirty = False
550 if p1_tracked and wc_tracked:
550 if p1_tracked and wc_tracked:
551 # the underlying reference might have changed, we will have to
551 # the underlying reference might have changed, we will have to
552 # check it.
552 # check it.
553 possibly_dirty = True
553 possibly_dirty = True
554 elif not (p1_tracked or wc_tracked):
554 elif not (p1_tracked or wc_tracked):
555 # the file is no longer relevant to anyone
555 # the file is no longer relevant to anyone
556 if self._map.get(filename) is not None:
556 if self._map.get(filename) is not None:
557 self._map.reset_state(filename)
557 self._map.reset_state(filename)
558 self._dirty = True
558 self._dirty = True
559 self._updatedfiles.add(filename)
559 self._updatedfiles.add(filename)
560 elif (not p1_tracked) and wc_tracked:
560 elif (not p1_tracked) and wc_tracked:
561 if entry is not None and entry.added:
561 if entry is not None and entry.added:
562 return # avoid dropping copy information (maybe?)
562 return # avoid dropping copy information (maybe?)
563 elif p1_tracked and not wc_tracked:
563 elif p1_tracked and not wc_tracked:
564 pass
564 pass
565 else:
565 else:
566 assert False, 'unreachable'
566 assert False, 'unreachable'
567
567
568 # this mean we are doing call for file we do not really care about the
568 # this mean we are doing call for file we do not really care about the
569 # data (eg: added or removed), however this should be a minor overhead
569 # data (eg: added or removed), however this should be a minor overhead
570 # compared to the overall update process calling this.
570 # compared to the overall update process calling this.
571 parentfiledata = None
571 parentfiledata = None
572 if wc_tracked:
572 if wc_tracked:
573 parentfiledata = self._get_filedata(filename)
573 parentfiledata = self._get_filedata(filename)
574
574
575 self._updatedfiles.add(filename)
575 self._updatedfiles.add(filename)
576 self._map.reset_state(
576 self._map.reset_state(
577 filename,
577 filename,
578 wc_tracked,
578 wc_tracked,
579 p1_tracked,
579 p1_tracked,
580 possibly_dirty=possibly_dirty,
580 possibly_dirty=possibly_dirty,
581 parentfiledata=parentfiledata,
581 parentfiledata=parentfiledata,
582 )
582 )
583 if (
583 if (
584 parentfiledata is not None
584 parentfiledata is not None
585 and parentfiledata[2] > self._lastnormaltime
585 and parentfiledata[2] > self._lastnormaltime
586 ):
586 ):
587 # Remember the most recent modification timeslot for status(),
587 # Remember the most recent modification timeslot for status(),
588 # to make sure we won't miss future size-preserving file content
588 # to make sure we won't miss future size-preserving file content
589 # modifications that happen within the same timeslot.
589 # modifications that happen within the same timeslot.
590 self._lastnormaltime = parentfiledata[2]
590 self._lastnormaltime = parentfiledata[2]
591
591
592 @requires_parents_change
592 @requires_parents_change
593 def update_file(
593 def update_file(
594 self,
594 self,
595 filename,
595 filename,
596 wc_tracked,
596 wc_tracked,
597 p1_tracked,
597 p1_tracked,
598 p2_tracked=False,
598 p2_tracked=False,
599 merged=False,
599 merged=False,
600 clean_p1=False,
600 clean_p1=False,
601 clean_p2=False,
601 clean_p2=False,
602 possibly_dirty=False,
602 possibly_dirty=False,
603 parentfiledata=None,
603 parentfiledata=None,
604 ):
604 ):
605 """update the information about a file in the dirstate
605 """update the information about a file in the dirstate
606
606
607 This is to be called when the direstates parent changes to keep track
607 This is to be called when the direstates parent changes to keep track
608 of what is the file situation in regards to the working copy and its parent.
608 of what is the file situation in regards to the working copy and its parent.
609
609
610 This function must be called within a `dirstate.parentchange` context.
610 This function must be called within a `dirstate.parentchange` context.
611
611
612 note: the API is at an early stage and we might need to adjust it
612 note: the API is at an early stage and we might need to adjust it
613 depending of what information ends up being relevant and useful to
613 depending of what information ends up being relevant and useful to
614 other processing.
614 other processing.
615 """
615 """
616 if merged and (clean_p1 or clean_p2):
616 if merged and (clean_p1 or clean_p2):
617 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
617 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
618 raise error.ProgrammingError(msg)
618 raise error.ProgrammingError(msg)
619
619
620 # note: I do not think we need to double check name clash here since we
620 # note: I do not think we need to double check name clash here since we
621 # are in a update/merge case that should already have taken care of
621 # are in a update/merge case that should already have taken care of
622 # this. The test agrees
622 # this. The test agrees
623
623
624 self._dirty = True
624 self._dirty = True
625 self._updatedfiles.add(filename)
625 self._updatedfiles.add(filename)
626
626
627 need_parent_file_data = (
627 need_parent_file_data = (
628 not (possibly_dirty or clean_p2 or merged)
628 not (possibly_dirty or clean_p2 or merged)
629 and wc_tracked
629 and wc_tracked
630 and p1_tracked
630 and p1_tracked
631 )
631 )
632
632
633 # this mean we are doing call for file we do not really care about the
633 # this mean we are doing call for file we do not really care about the
634 # data (eg: added or removed), however this should be a minor overhead
634 # data (eg: added or removed), however this should be a minor overhead
635 # compared to the overall update process calling this.
635 # compared to the overall update process calling this.
636 if need_parent_file_data:
636 if need_parent_file_data:
637 if parentfiledata is None:
637 if parentfiledata is None:
638 parentfiledata = self._get_filedata(filename)
638 parentfiledata = self._get_filedata(filename)
639 mtime = parentfiledata[2]
639 mtime = parentfiledata[2]
640
640
641 if mtime > self._lastnormaltime:
641 if mtime > self._lastnormaltime:
642 # Remember the most recent modification timeslot for
642 # Remember the most recent modification timeslot for
643 # status(), to make sure we won't miss future
643 # status(), to make sure we won't miss future
644 # size-preserving file content modifications that happen
644 # size-preserving file content modifications that happen
645 # within the same timeslot.
645 # within the same timeslot.
646 self._lastnormaltime = mtime
646 self._lastnormaltime = mtime
647
647
648 self._map.reset_state(
648 self._map.reset_state(
649 filename,
649 filename,
650 wc_tracked,
650 wc_tracked,
651 p1_tracked,
651 p1_tracked,
652 p2_tracked=p2_tracked,
652 p2_tracked=p2_tracked,
653 merged=merged,
653 merged=merged,
654 clean_p1=clean_p1,
654 clean_p1=clean_p1,
655 clean_p2=clean_p2,
655 clean_p2=clean_p2,
656 possibly_dirty=possibly_dirty,
656 possibly_dirty=possibly_dirty,
657 parentfiledata=parentfiledata,
657 parentfiledata=parentfiledata,
658 )
658 )
659 if (
659 if (
660 parentfiledata is not None
660 parentfiledata is not None
661 and parentfiledata[2] > self._lastnormaltime
661 and parentfiledata[2] > self._lastnormaltime
662 ):
662 ):
663 # Remember the most recent modification timeslot for status(),
663 # Remember the most recent modification timeslot for status(),
664 # to make sure we won't miss future size-preserving file content
664 # to make sure we won't miss future size-preserving file content
665 # modifications that happen within the same timeslot.
665 # modifications that happen within the same timeslot.
666 self._lastnormaltime = parentfiledata[2]
666 self._lastnormaltime = parentfiledata[2]
667
667
668 def _check_new_tracked_filename(self, filename):
668 def _check_new_tracked_filename(self, filename):
669 scmutil.checkfilename(filename)
669 scmutil.checkfilename(filename)
670 if self._map.hastrackeddir(filename):
670 if self._map.hastrackeddir(filename):
671 msg = _(b'directory %r already in dirstate')
671 msg = _(b'directory %r already in dirstate')
672 msg %= pycompat.bytestr(filename)
672 msg %= pycompat.bytestr(filename)
673 raise error.Abort(msg)
673 raise error.Abort(msg)
674 # shadows
674 # shadows
675 for d in pathutil.finddirs(filename):
675 for d in pathutil.finddirs(filename):
676 if self._map.hastrackeddir(d):
676 if self._map.hastrackeddir(d):
677 break
677 break
678 entry = self._map.get(d)
678 entry = self._map.get(d)
679 if entry is not None and not entry.removed:
679 if entry is not None and not entry.removed:
680 msg = _(b'file %r in dirstate clashes with %r')
680 msg = _(b'file %r in dirstate clashes with %r')
681 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
681 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
682 raise error.Abort(msg)
682 raise error.Abort(msg)
683
683
684 def _get_filedata(self, filename):
684 def _get_filedata(self, filename):
685 """returns"""
685 """returns"""
686 s = os.lstat(self._join(filename))
686 s = os.lstat(self._join(filename))
687 mode = s.st_mode
687 mode = s.st_mode
688 size = s.st_size
688 size = s.st_size
689 mtime = s[stat.ST_MTIME]
689 mtime = s[stat.ST_MTIME]
690 return (mode, size, mtime)
690 return (mode, size, mtime)
691
691
692 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
692 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
693 if exists is None:
693 if exists is None:
694 exists = os.path.lexists(os.path.join(self._root, path))
694 exists = os.path.lexists(os.path.join(self._root, path))
695 if not exists:
695 if not exists:
696 # Maybe a path component exists
696 # Maybe a path component exists
697 if not ignoremissing and b'/' in path:
697 if not ignoremissing and b'/' in path:
698 d, f = path.rsplit(b'/', 1)
698 d, f = path.rsplit(b'/', 1)
699 d = self._normalize(d, False, ignoremissing, None)
699 d = self._normalize(d, False, ignoremissing, None)
700 folded = d + b"/" + f
700 folded = d + b"/" + f
701 else:
701 else:
702 # No path components, preserve original case
702 # No path components, preserve original case
703 folded = path
703 folded = path
704 else:
704 else:
705 # recursively normalize leading directory components
705 # recursively normalize leading directory components
706 # against dirstate
706 # against dirstate
707 if b'/' in normed:
707 if b'/' in normed:
708 d, f = normed.rsplit(b'/', 1)
708 d, f = normed.rsplit(b'/', 1)
709 d = self._normalize(d, False, ignoremissing, True)
709 d = self._normalize(d, False, ignoremissing, True)
710 r = self._root + b"/" + d
710 r = self._root + b"/" + d
711 folded = d + b"/" + util.fspath(f, r)
711 folded = d + b"/" + util.fspath(f, r)
712 else:
712 else:
713 folded = util.fspath(normed, self._root)
713 folded = util.fspath(normed, self._root)
714 storemap[normed] = folded
714 storemap[normed] = folded
715
715
716 return folded
716 return folded
717
717
718 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
718 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
719 normed = util.normcase(path)
719 normed = util.normcase(path)
720 folded = self._map.filefoldmap.get(normed, None)
720 folded = self._map.filefoldmap.get(normed, None)
721 if folded is None:
721 if folded is None:
722 if isknown:
722 if isknown:
723 folded = path
723 folded = path
724 else:
724 else:
725 folded = self._discoverpath(
725 folded = self._discoverpath(
726 path, normed, ignoremissing, exists, self._map.filefoldmap
726 path, normed, ignoremissing, exists, self._map.filefoldmap
727 )
727 )
728 return folded
728 return folded
729
729
730 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
730 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
731 normed = util.normcase(path)
731 normed = util.normcase(path)
732 folded = self._map.filefoldmap.get(normed, None)
732 folded = self._map.filefoldmap.get(normed, None)
733 if folded is None:
733 if folded is None:
734 folded = self._map.dirfoldmap.get(normed, None)
734 folded = self._map.dirfoldmap.get(normed, None)
735 if folded is None:
735 if folded is None:
736 if isknown:
736 if isknown:
737 folded = path
737 folded = path
738 else:
738 else:
739 # store discovered result in dirfoldmap so that future
739 # store discovered result in dirfoldmap so that future
740 # normalizefile calls don't start matching directories
740 # normalizefile calls don't start matching directories
741 folded = self._discoverpath(
741 folded = self._discoverpath(
742 path, normed, ignoremissing, exists, self._map.dirfoldmap
742 path, normed, ignoremissing, exists, self._map.dirfoldmap
743 )
743 )
744 return folded
744 return folded
745
745
746 def normalize(self, path, isknown=False, ignoremissing=False):
746 def normalize(self, path, isknown=False, ignoremissing=False):
747 """
747 """
748 normalize the case of a pathname when on a casefolding filesystem
748 normalize the case of a pathname when on a casefolding filesystem
749
749
750 isknown specifies whether the filename came from walking the
750 isknown specifies whether the filename came from walking the
751 disk, to avoid extra filesystem access.
751 disk, to avoid extra filesystem access.
752
752
753 If ignoremissing is True, missing path are returned
753 If ignoremissing is True, missing path are returned
754 unchanged. Otherwise, we try harder to normalize possibly
754 unchanged. Otherwise, we try harder to normalize possibly
755 existing path components.
755 existing path components.
756
756
757 The normalized case is determined based on the following precedence:
757 The normalized case is determined based on the following precedence:
758
758
759 - version of name already stored in the dirstate
759 - version of name already stored in the dirstate
760 - version of name stored on disk
760 - version of name stored on disk
761 - version provided via command arguments
761 - version provided via command arguments
762 """
762 """
763
763
764 if self._checkcase:
764 if self._checkcase:
765 return self._normalize(path, isknown, ignoremissing)
765 return self._normalize(path, isknown, ignoremissing)
766 return path
766 return path
767
767
768 def clear(self):
768 def clear(self):
769 self._map.clear()
769 self._map.clear()
770 self._lastnormaltime = 0
770 self._lastnormaltime = 0
771 self._updatedfiles.clear()
771 self._updatedfiles.clear()
772 self._dirty = True
772 self._dirty = True
773
773
774 def rebuild(self, parent, allfiles, changedfiles=None):
774 def rebuild(self, parent, allfiles, changedfiles=None):
775 if changedfiles is None:
775 if changedfiles is None:
776 # Rebuild entire dirstate
776 # Rebuild entire dirstate
777 to_lookup = allfiles
777 to_lookup = allfiles
778 to_drop = []
778 to_drop = []
779 lastnormaltime = self._lastnormaltime
779 lastnormaltime = self._lastnormaltime
780 self.clear()
780 self.clear()
781 self._lastnormaltime = lastnormaltime
781 self._lastnormaltime = lastnormaltime
782 elif len(changedfiles) < 10:
782 elif len(changedfiles) < 10:
783 # Avoid turning allfiles into a set, which can be expensive if it's
783 # Avoid turning allfiles into a set, which can be expensive if it's
784 # large.
784 # large.
785 to_lookup = []
785 to_lookup = []
786 to_drop = []
786 to_drop = []
787 for f in changedfiles:
787 for f in changedfiles:
788 if f in allfiles:
788 if f in allfiles:
789 to_lookup.append(f)
789 to_lookup.append(f)
790 else:
790 else:
791 to_drop.append(f)
791 to_drop.append(f)
792 else:
792 else:
793 changedfilesset = set(changedfiles)
793 changedfilesset = set(changedfiles)
794 to_lookup = changedfilesset & set(allfiles)
794 to_lookup = changedfilesset & set(allfiles)
795 to_drop = changedfilesset - to_lookup
795 to_drop = changedfilesset - to_lookup
796
796
797 if self._origpl is None:
797 if self._origpl is None:
798 self._origpl = self._pl
798 self._origpl = self._pl
799 self._map.setparents(parent, self._nodeconstants.nullid)
799 self._map.setparents(parent, self._nodeconstants.nullid)
800
800
801 for f in to_lookup:
801 for f in to_lookup:
802
802
803 if self.in_merge:
803 if self.in_merge:
804 self.set_tracked(f)
804 self.set_tracked(f)
805 else:
805 else:
806 self._map.reset_state(
806 self._map.reset_state(
807 f,
807 f,
808 wc_tracked=True,
808 wc_tracked=True,
809 p1_tracked=True,
809 p1_tracked=True,
810 possibly_dirty=True,
810 possibly_dirty=True,
811 )
811 )
812 self._updatedfiles.add(f)
812 self._updatedfiles.add(f)
813 for f in to_drop:
813 for f in to_drop:
814 self._map.reset_state(f)
814 self._map.reset_state(f)
815 self._updatedfiles.add(f)
815 self._updatedfiles.add(f)
816
816
817 self._dirty = True
817 self._dirty = True
818
818
819 def identity(self):
819 def identity(self):
820 """Return identity of dirstate itself to detect changing in storage
820 """Return identity of dirstate itself to detect changing in storage
821
821
822 If identity of previous dirstate is equal to this, writing
822 If identity of previous dirstate is equal to this, writing
823 changes based on the former dirstate out can keep consistency.
823 changes based on the former dirstate out can keep consistency.
824 """
824 """
825 return self._map.identity
825 return self._map.identity
826
826
827 def write(self, tr):
827 def write(self, tr):
828 if not self._dirty:
828 if not self._dirty:
829 return
829 return
830
830
831 filename = self._filename
831 filename = self._filename
832 if tr:
832 if tr:
833 # 'dirstate.write()' is not only for writing in-memory
833 # 'dirstate.write()' is not only for writing in-memory
834 # changes out, but also for dropping ambiguous timestamp.
834 # changes out, but also for dropping ambiguous timestamp.
835 # delayed writing re-raise "ambiguous timestamp issue".
835 # delayed writing re-raise "ambiguous timestamp issue".
836 # See also the wiki page below for detail:
836 # See also the wiki page below for detail:
837 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
837 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
838
838
839 # emulate dropping timestamp in 'parsers.pack_dirstate'
839 # emulate dropping timestamp in 'parsers.pack_dirstate'
840 now = _getfsnow(self._opener)
840 now = _getfsnow(self._opener)
841 self._map.clearambiguoustimes(self._updatedfiles, now)
841 self._map.clearambiguoustimes(self._updatedfiles, now)
842
842
843 # emulate that all 'dirstate.normal' results are written out
843 # emulate that all 'dirstate.normal' results are written out
844 self._lastnormaltime = 0
844 self._lastnormaltime = 0
845 self._updatedfiles.clear()
845 self._updatedfiles.clear()
846
846
847 # delay writing in-memory changes out
847 # delay writing in-memory changes out
848 tr.addfilegenerator(
848 tr.addfilegenerator(
849 b'dirstate',
849 b'dirstate',
850 (self._filename,),
850 (self._filename,),
851 lambda f: self._writedirstate(tr, f),
851 lambda f: self._writedirstate(tr, f),
852 location=b'plain',
852 location=b'plain',
853 )
853 )
854 return
854 return
855
855
856 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
856 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
857 self._writedirstate(tr, st)
857 self._writedirstate(tr, st)
858
858
859 def addparentchangecallback(self, category, callback):
859 def addparentchangecallback(self, category, callback):
860 """add a callback to be called when the wd parents are changed
860 """add a callback to be called when the wd parents are changed
861
861
862 Callback will be called with the following arguments:
862 Callback will be called with the following arguments:
863 dirstate, (oldp1, oldp2), (newp1, newp2)
863 dirstate, (oldp1, oldp2), (newp1, newp2)
864
864
865 Category is a unique identifier to allow overwriting an old callback
865 Category is a unique identifier to allow overwriting an old callback
866 with a newer callback.
866 with a newer callback.
867 """
867 """
868 self._plchangecallbacks[category] = callback
868 self._plchangecallbacks[category] = callback
869
869
870 def _writedirstate(self, tr, st):
870 def _writedirstate(self, tr, st):
871 # notify callbacks about parents change
871 # notify callbacks about parents change
872 if self._origpl is not None and self._origpl != self._pl:
872 if self._origpl is not None and self._origpl != self._pl:
873 for c, callback in sorted(
873 for c, callback in sorted(
874 pycompat.iteritems(self._plchangecallbacks)
874 pycompat.iteritems(self._plchangecallbacks)
875 ):
875 ):
876 callback(self, self._origpl, self._pl)
876 callback(self, self._origpl, self._pl)
877 self._origpl = None
877 self._origpl = None
878 # use the modification time of the newly created temporary file as the
878 # use the modification time of the newly created temporary file as the
879 # filesystem's notion of 'now'
879 # filesystem's notion of 'now'
880 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
880 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
881
881
882 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
882 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
883 # timestamp of each entries in dirstate, because of 'now > mtime'
883 # timestamp of each entries in dirstate, because of 'now > mtime'
884 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
884 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
885 if delaywrite > 0:
885 if delaywrite > 0:
886 # do we have any files to delay for?
886 # do we have any files to delay for?
887 for f, e in pycompat.iteritems(self._map):
887 for f, e in pycompat.iteritems(self._map):
888 if e.need_delay(now):
888 if e.need_delay(now):
889 import time # to avoid useless import
889 import time # to avoid useless import
890
890
891 # rather than sleep n seconds, sleep until the next
891 # rather than sleep n seconds, sleep until the next
892 # multiple of n seconds
892 # multiple of n seconds
893 clock = time.time()
893 clock = time.time()
894 start = int(clock) - (int(clock) % delaywrite)
894 start = int(clock) - (int(clock) % delaywrite)
895 end = start + delaywrite
895 end = start + delaywrite
896 time.sleep(end - clock)
896 time.sleep(end - clock)
897 now = end # trust our estimate that the end is near now
897 now = end # trust our estimate that the end is near now
898 break
898 break
899
899
900 self._map.write(tr, st, now)
900 self._map.write(tr, st, now)
901 self._lastnormaltime = 0
901 self._lastnormaltime = 0
902 self._dirty = False
902 self._dirty = False
903
903
904 def _dirignore(self, f):
904 def _dirignore(self, f):
905 if self._ignore(f):
905 if self._ignore(f):
906 return True
906 return True
907 for p in pathutil.finddirs(f):
907 for p in pathutil.finddirs(f):
908 if self._ignore(p):
908 if self._ignore(p):
909 return True
909 return True
910 return False
910 return False
911
911
912 def _ignorefiles(self):
912 def _ignorefiles(self):
913 files = []
913 files = []
914 if os.path.exists(self._join(b'.hgignore')):
914 if os.path.exists(self._join(b'.hgignore')):
915 files.append(self._join(b'.hgignore'))
915 files.append(self._join(b'.hgignore'))
916 for name, path in self._ui.configitems(b"ui"):
916 for name, path in self._ui.configitems(b"ui"):
917 if name == b'ignore' or name.startswith(b'ignore.'):
917 if name == b'ignore' or name.startswith(b'ignore.'):
918 # we need to use os.path.join here rather than self._join
918 # we need to use os.path.join here rather than self._join
919 # because path is arbitrary and user-specified
919 # because path is arbitrary and user-specified
920 files.append(os.path.join(self._rootdir, util.expandpath(path)))
920 files.append(os.path.join(self._rootdir, util.expandpath(path)))
921 return files
921 return files
922
922
923 def _ignorefileandline(self, f):
923 def _ignorefileandline(self, f):
924 files = collections.deque(self._ignorefiles())
924 files = collections.deque(self._ignorefiles())
925 visited = set()
925 visited = set()
926 while files:
926 while files:
927 i = files.popleft()
927 i = files.popleft()
928 patterns = matchmod.readpatternfile(
928 patterns = matchmod.readpatternfile(
929 i, self._ui.warn, sourceinfo=True
929 i, self._ui.warn, sourceinfo=True
930 )
930 )
931 for pattern, lineno, line in patterns:
931 for pattern, lineno, line in patterns:
932 kind, p = matchmod._patsplit(pattern, b'glob')
932 kind, p = matchmod._patsplit(pattern, b'glob')
933 if kind == b"subinclude":
933 if kind == b"subinclude":
934 if p not in visited:
934 if p not in visited:
935 files.append(p)
935 files.append(p)
936 continue
936 continue
937 m = matchmod.match(
937 m = matchmod.match(
938 self._root, b'', [], [pattern], warn=self._ui.warn
938 self._root, b'', [], [pattern], warn=self._ui.warn
939 )
939 )
940 if m(f):
940 if m(f):
941 return (i, lineno, line)
941 return (i, lineno, line)
942 visited.add(i)
942 visited.add(i)
943 return (None, -1, b"")
943 return (None, -1, b"")
944
944
945 def _walkexplicit(self, match, subrepos):
945 def _walkexplicit(self, match, subrepos):
946 """Get stat data about the files explicitly specified by match.
946 """Get stat data about the files explicitly specified by match.
947
947
948 Return a triple (results, dirsfound, dirsnotfound).
948 Return a triple (results, dirsfound, dirsnotfound).
949 - results is a mapping from filename to stat result. It also contains
949 - results is a mapping from filename to stat result. It also contains
950 listings mapping subrepos and .hg to None.
950 listings mapping subrepos and .hg to None.
951 - dirsfound is a list of files found to be directories.
951 - dirsfound is a list of files found to be directories.
952 - dirsnotfound is a list of files that the dirstate thinks are
952 - dirsnotfound is a list of files that the dirstate thinks are
953 directories and that were not found."""
953 directories and that were not found."""
954
954
955 def badtype(mode):
955 def badtype(mode):
956 kind = _(b'unknown')
956 kind = _(b'unknown')
957 if stat.S_ISCHR(mode):
957 if stat.S_ISCHR(mode):
958 kind = _(b'character device')
958 kind = _(b'character device')
959 elif stat.S_ISBLK(mode):
959 elif stat.S_ISBLK(mode):
960 kind = _(b'block device')
960 kind = _(b'block device')
961 elif stat.S_ISFIFO(mode):
961 elif stat.S_ISFIFO(mode):
962 kind = _(b'fifo')
962 kind = _(b'fifo')
963 elif stat.S_ISSOCK(mode):
963 elif stat.S_ISSOCK(mode):
964 kind = _(b'socket')
964 kind = _(b'socket')
965 elif stat.S_ISDIR(mode):
965 elif stat.S_ISDIR(mode):
966 kind = _(b'directory')
966 kind = _(b'directory')
967 return _(b'unsupported file type (type is %s)') % kind
967 return _(b'unsupported file type (type is %s)') % kind
968
968
969 badfn = match.bad
969 badfn = match.bad
970 dmap = self._map
970 dmap = self._map
971 lstat = os.lstat
971 lstat = os.lstat
972 getkind = stat.S_IFMT
972 getkind = stat.S_IFMT
973 dirkind = stat.S_IFDIR
973 dirkind = stat.S_IFDIR
974 regkind = stat.S_IFREG
974 regkind = stat.S_IFREG
975 lnkkind = stat.S_IFLNK
975 lnkkind = stat.S_IFLNK
976 join = self._join
976 join = self._join
977 dirsfound = []
977 dirsfound = []
978 foundadd = dirsfound.append
978 foundadd = dirsfound.append
979 dirsnotfound = []
979 dirsnotfound = []
980 notfoundadd = dirsnotfound.append
980 notfoundadd = dirsnotfound.append
981
981
982 if not match.isexact() and self._checkcase:
982 if not match.isexact() and self._checkcase:
983 normalize = self._normalize
983 normalize = self._normalize
984 else:
984 else:
985 normalize = None
985 normalize = None
986
986
987 files = sorted(match.files())
987 files = sorted(match.files())
988 subrepos.sort()
988 subrepos.sort()
989 i, j = 0, 0
989 i, j = 0, 0
990 while i < len(files) and j < len(subrepos):
990 while i < len(files) and j < len(subrepos):
991 subpath = subrepos[j] + b"/"
991 subpath = subrepos[j] + b"/"
992 if files[i] < subpath:
992 if files[i] < subpath:
993 i += 1
993 i += 1
994 continue
994 continue
995 while i < len(files) and files[i].startswith(subpath):
995 while i < len(files) and files[i].startswith(subpath):
996 del files[i]
996 del files[i]
997 j += 1
997 j += 1
998
998
999 if not files or b'' in files:
999 if not files or b'' in files:
1000 files = [b'']
1000 files = [b'']
1001 # constructing the foldmap is expensive, so don't do it for the
1001 # constructing the foldmap is expensive, so don't do it for the
1002 # common case where files is ['']
1002 # common case where files is ['']
1003 normalize = None
1003 normalize = None
1004 results = dict.fromkeys(subrepos)
1004 results = dict.fromkeys(subrepos)
1005 results[b'.hg'] = None
1005 results[b'.hg'] = None
1006
1006
1007 for ff in files:
1007 for ff in files:
1008 if normalize:
1008 if normalize:
1009 nf = normalize(ff, False, True)
1009 nf = normalize(ff, False, True)
1010 else:
1010 else:
1011 nf = ff
1011 nf = ff
1012 if nf in results:
1012 if nf in results:
1013 continue
1013 continue
1014
1014
1015 try:
1015 try:
1016 st = lstat(join(nf))
1016 st = lstat(join(nf))
1017 kind = getkind(st.st_mode)
1017 kind = getkind(st.st_mode)
1018 if kind == dirkind:
1018 if kind == dirkind:
1019 if nf in dmap:
1019 if nf in dmap:
1020 # file replaced by dir on disk but still in dirstate
1020 # file replaced by dir on disk but still in dirstate
1021 results[nf] = None
1021 results[nf] = None
1022 foundadd((nf, ff))
1022 foundadd((nf, ff))
1023 elif kind == regkind or kind == lnkkind:
1023 elif kind == regkind or kind == lnkkind:
1024 results[nf] = st
1024 results[nf] = st
1025 else:
1025 else:
1026 badfn(ff, badtype(kind))
1026 badfn(ff, badtype(kind))
1027 if nf in dmap:
1027 if nf in dmap:
1028 results[nf] = None
1028 results[nf] = None
1029 except OSError as inst: # nf not found on disk - it is dirstate only
1029 except OSError as inst: # nf not found on disk - it is dirstate only
1030 if nf in dmap: # does it exactly match a missing file?
1030 if nf in dmap: # does it exactly match a missing file?
1031 results[nf] = None
1031 results[nf] = None
1032 else: # does it match a missing directory?
1032 else: # does it match a missing directory?
1033 if self._map.hasdir(nf):
1033 if self._map.hasdir(nf):
1034 notfoundadd(nf)
1034 notfoundadd(nf)
1035 else:
1035 else:
1036 badfn(ff, encoding.strtolocal(inst.strerror))
1036 badfn(ff, encoding.strtolocal(inst.strerror))
1037
1037
1038 # match.files() may contain explicitly-specified paths that shouldn't
1038 # match.files() may contain explicitly-specified paths that shouldn't
1039 # be taken; drop them from the list of files found. dirsfound/notfound
1039 # be taken; drop them from the list of files found. dirsfound/notfound
1040 # aren't filtered here because they will be tested later.
1040 # aren't filtered here because they will be tested later.
1041 if match.anypats():
1041 if match.anypats():
1042 for f in list(results):
1042 for f in list(results):
1043 if f == b'.hg' or f in subrepos:
1043 if f == b'.hg' or f in subrepos:
1044 # keep sentinel to disable further out-of-repo walks
1044 # keep sentinel to disable further out-of-repo walks
1045 continue
1045 continue
1046 if not match(f):
1046 if not match(f):
1047 del results[f]
1047 del results[f]
1048
1048
1049 # Case insensitive filesystems cannot rely on lstat() failing to detect
1049 # Case insensitive filesystems cannot rely on lstat() failing to detect
1050 # a case-only rename. Prune the stat object for any file that does not
1050 # a case-only rename. Prune the stat object for any file that does not
1051 # match the case in the filesystem, if there are multiple files that
1051 # match the case in the filesystem, if there are multiple files that
1052 # normalize to the same path.
1052 # normalize to the same path.
1053 if match.isexact() and self._checkcase:
1053 if match.isexact() and self._checkcase:
1054 normed = {}
1054 normed = {}
1055
1055
1056 for f, st in pycompat.iteritems(results):
1056 for f, st in pycompat.iteritems(results):
1057 if st is None:
1057 if st is None:
1058 continue
1058 continue
1059
1059
1060 nc = util.normcase(f)
1060 nc = util.normcase(f)
1061 paths = normed.get(nc)
1061 paths = normed.get(nc)
1062
1062
1063 if paths is None:
1063 if paths is None:
1064 paths = set()
1064 paths = set()
1065 normed[nc] = paths
1065 normed[nc] = paths
1066
1066
1067 paths.add(f)
1067 paths.add(f)
1068
1068
1069 for norm, paths in pycompat.iteritems(normed):
1069 for norm, paths in pycompat.iteritems(normed):
1070 if len(paths) > 1:
1070 if len(paths) > 1:
1071 for path in paths:
1071 for path in paths:
1072 folded = self._discoverpath(
1072 folded = self._discoverpath(
1073 path, norm, True, None, self._map.dirfoldmap
1073 path, norm, True, None, self._map.dirfoldmap
1074 )
1074 )
1075 if path != folded:
1075 if path != folded:
1076 results[path] = None
1076 results[path] = None
1077
1077
1078 return results, dirsfound, dirsnotfound
1078 return results, dirsfound, dirsnotfound
1079
1079
1080 def walk(self, match, subrepos, unknown, ignored, full=True):
1080 def walk(self, match, subrepos, unknown, ignored, full=True):
1081 """
1081 """
1082 Walk recursively through the directory tree, finding all files
1082 Walk recursively through the directory tree, finding all files
1083 matched by match.
1083 matched by match.
1084
1084
1085 If full is False, maybe skip some known-clean files.
1085 If full is False, maybe skip some known-clean files.
1086
1086
1087 Return a dict mapping filename to stat-like object (either
1087 Return a dict mapping filename to stat-like object (either
1088 mercurial.osutil.stat instance or return value of os.stat()).
1088 mercurial.osutil.stat instance or return value of os.stat()).
1089
1089
1090 """
1090 """
1091 # full is a flag that extensions that hook into walk can use -- this
1091 # full is a flag that extensions that hook into walk can use -- this
1092 # implementation doesn't use it at all. This satisfies the contract
1092 # implementation doesn't use it at all. This satisfies the contract
1093 # because we only guarantee a "maybe".
1093 # because we only guarantee a "maybe".
1094
1094
1095 if ignored:
1095 if ignored:
1096 ignore = util.never
1096 ignore = util.never
1097 dirignore = util.never
1097 dirignore = util.never
1098 elif unknown:
1098 elif unknown:
1099 ignore = self._ignore
1099 ignore = self._ignore
1100 dirignore = self._dirignore
1100 dirignore = self._dirignore
1101 else:
1101 else:
1102 # if not unknown and not ignored, drop dir recursion and step 2
1102 # if not unknown and not ignored, drop dir recursion and step 2
1103 ignore = util.always
1103 ignore = util.always
1104 dirignore = util.always
1104 dirignore = util.always
1105
1105
1106 matchfn = match.matchfn
1106 matchfn = match.matchfn
1107 matchalways = match.always()
1107 matchalways = match.always()
1108 matchtdir = match.traversedir
1108 matchtdir = match.traversedir
1109 dmap = self._map
1109 dmap = self._map
1110 listdir = util.listdir
1110 listdir = util.listdir
1111 lstat = os.lstat
1111 lstat = os.lstat
1112 dirkind = stat.S_IFDIR
1112 dirkind = stat.S_IFDIR
1113 regkind = stat.S_IFREG
1113 regkind = stat.S_IFREG
1114 lnkkind = stat.S_IFLNK
1114 lnkkind = stat.S_IFLNK
1115 join = self._join
1115 join = self._join
1116
1116
1117 exact = skipstep3 = False
1117 exact = skipstep3 = False
1118 if match.isexact(): # match.exact
1118 if match.isexact(): # match.exact
1119 exact = True
1119 exact = True
1120 dirignore = util.always # skip step 2
1120 dirignore = util.always # skip step 2
1121 elif match.prefix(): # match.match, no patterns
1121 elif match.prefix(): # match.match, no patterns
1122 skipstep3 = True
1122 skipstep3 = True
1123
1123
1124 if not exact and self._checkcase:
1124 if not exact and self._checkcase:
1125 normalize = self._normalize
1125 normalize = self._normalize
1126 normalizefile = self._normalizefile
1126 normalizefile = self._normalizefile
1127 skipstep3 = False
1127 skipstep3 = False
1128 else:
1128 else:
1129 normalize = self._normalize
1129 normalize = self._normalize
1130 normalizefile = None
1130 normalizefile = None
1131
1131
1132 # step 1: find all explicit files
1132 # step 1: find all explicit files
1133 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1133 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1134 if matchtdir:
1134 if matchtdir:
1135 for d in work:
1135 for d in work:
1136 matchtdir(d[0])
1136 matchtdir(d[0])
1137 for d in dirsnotfound:
1137 for d in dirsnotfound:
1138 matchtdir(d)
1138 matchtdir(d)
1139
1139
1140 skipstep3 = skipstep3 and not (work or dirsnotfound)
1140 skipstep3 = skipstep3 and not (work or dirsnotfound)
1141 work = [d for d in work if not dirignore(d[0])]
1141 work = [d for d in work if not dirignore(d[0])]
1142
1142
1143 # step 2: visit subdirectories
1143 # step 2: visit subdirectories
1144 def traverse(work, alreadynormed):
1144 def traverse(work, alreadynormed):
1145 wadd = work.append
1145 wadd = work.append
1146 while work:
1146 while work:
1147 tracing.counter('dirstate.walk work', len(work))
1147 tracing.counter('dirstate.walk work', len(work))
1148 nd = work.pop()
1148 nd = work.pop()
1149 visitentries = match.visitchildrenset(nd)
1149 visitentries = match.visitchildrenset(nd)
1150 if not visitentries:
1150 if not visitentries:
1151 continue
1151 continue
1152 if visitentries == b'this' or visitentries == b'all':
1152 if visitentries == b'this' or visitentries == b'all':
1153 visitentries = None
1153 visitentries = None
1154 skip = None
1154 skip = None
1155 if nd != b'':
1155 if nd != b'':
1156 skip = b'.hg'
1156 skip = b'.hg'
1157 try:
1157 try:
1158 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1158 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1159 entries = listdir(join(nd), stat=True, skip=skip)
1159 entries = listdir(join(nd), stat=True, skip=skip)
1160 except OSError as inst:
1160 except OSError as inst:
1161 if inst.errno in (errno.EACCES, errno.ENOENT):
1161 if inst.errno in (errno.EACCES, errno.ENOENT):
1162 match.bad(
1162 match.bad(
1163 self.pathto(nd), encoding.strtolocal(inst.strerror)
1163 self.pathto(nd), encoding.strtolocal(inst.strerror)
1164 )
1164 )
1165 continue
1165 continue
1166 raise
1166 raise
1167 for f, kind, st in entries:
1167 for f, kind, st in entries:
1168 # Some matchers may return files in the visitentries set,
1168 # Some matchers may return files in the visitentries set,
1169 # instead of 'this', if the matcher explicitly mentions them
1169 # instead of 'this', if the matcher explicitly mentions them
1170 # and is not an exactmatcher. This is acceptable; we do not
1170 # and is not an exactmatcher. This is acceptable; we do not
1171 # make any hard assumptions about file-or-directory below
1171 # make any hard assumptions about file-or-directory below
1172 # based on the presence of `f` in visitentries. If
1172 # based on the presence of `f` in visitentries. If
1173 # visitchildrenset returned a set, we can always skip the
1173 # visitchildrenset returned a set, we can always skip the
1174 # entries *not* in the set it provided regardless of whether
1174 # entries *not* in the set it provided regardless of whether
1175 # they're actually a file or a directory.
1175 # they're actually a file or a directory.
1176 if visitentries and f not in visitentries:
1176 if visitentries and f not in visitentries:
1177 continue
1177 continue
1178 if normalizefile:
1178 if normalizefile:
1179 # even though f might be a directory, we're only
1179 # even though f might be a directory, we're only
1180 # interested in comparing it to files currently in the
1180 # interested in comparing it to files currently in the
1181 # dmap -- therefore normalizefile is enough
1181 # dmap -- therefore normalizefile is enough
1182 nf = normalizefile(
1182 nf = normalizefile(
1183 nd and (nd + b"/" + f) or f, True, True
1183 nd and (nd + b"/" + f) or f, True, True
1184 )
1184 )
1185 else:
1185 else:
1186 nf = nd and (nd + b"/" + f) or f
1186 nf = nd and (nd + b"/" + f) or f
1187 if nf not in results:
1187 if nf not in results:
1188 if kind == dirkind:
1188 if kind == dirkind:
1189 if not ignore(nf):
1189 if not ignore(nf):
1190 if matchtdir:
1190 if matchtdir:
1191 matchtdir(nf)
1191 matchtdir(nf)
1192 wadd(nf)
1192 wadd(nf)
1193 if nf in dmap and (matchalways or matchfn(nf)):
1193 if nf in dmap and (matchalways or matchfn(nf)):
1194 results[nf] = None
1194 results[nf] = None
1195 elif kind == regkind or kind == lnkkind:
1195 elif kind == regkind or kind == lnkkind:
1196 if nf in dmap:
1196 if nf in dmap:
1197 if matchalways or matchfn(nf):
1197 if matchalways or matchfn(nf):
1198 results[nf] = st
1198 results[nf] = st
1199 elif (matchalways or matchfn(nf)) and not ignore(
1199 elif (matchalways or matchfn(nf)) and not ignore(
1200 nf
1200 nf
1201 ):
1201 ):
1202 # unknown file -- normalize if necessary
1202 # unknown file -- normalize if necessary
1203 if not alreadynormed:
1203 if not alreadynormed:
1204 nf = normalize(nf, False, True)
1204 nf = normalize(nf, False, True)
1205 results[nf] = st
1205 results[nf] = st
1206 elif nf in dmap and (matchalways or matchfn(nf)):
1206 elif nf in dmap and (matchalways or matchfn(nf)):
1207 results[nf] = None
1207 results[nf] = None
1208
1208
1209 for nd, d in work:
1209 for nd, d in work:
1210 # alreadynormed means that processwork doesn't have to do any
1210 # alreadynormed means that processwork doesn't have to do any
1211 # expensive directory normalization
1211 # expensive directory normalization
1212 alreadynormed = not normalize or nd == d
1212 alreadynormed = not normalize or nd == d
1213 traverse([d], alreadynormed)
1213 traverse([d], alreadynormed)
1214
1214
1215 for s in subrepos:
1215 for s in subrepos:
1216 del results[s]
1216 del results[s]
1217 del results[b'.hg']
1217 del results[b'.hg']
1218
1218
1219 # step 3: visit remaining files from dmap
1219 # step 3: visit remaining files from dmap
1220 if not skipstep3 and not exact:
1220 if not skipstep3 and not exact:
1221 # If a dmap file is not in results yet, it was either
1221 # If a dmap file is not in results yet, it was either
1222 # a) not matching matchfn b) ignored, c) missing, or d) under a
1222 # a) not matching matchfn b) ignored, c) missing, or d) under a
1223 # symlink directory.
1223 # symlink directory.
1224 if not results and matchalways:
1224 if not results and matchalways:
1225 visit = [f for f in dmap]
1225 visit = [f for f in dmap]
1226 else:
1226 else:
1227 visit = [f for f in dmap if f not in results and matchfn(f)]
1227 visit = [f for f in dmap if f not in results and matchfn(f)]
1228 visit.sort()
1228 visit.sort()
1229
1229
1230 if unknown:
1230 if unknown:
1231 # unknown == True means we walked all dirs under the roots
1231 # unknown == True means we walked all dirs under the roots
1232 # that wasn't ignored, and everything that matched was stat'ed
1232 # that wasn't ignored, and everything that matched was stat'ed
1233 # and is already in results.
1233 # and is already in results.
1234 # The rest must thus be ignored or under a symlink.
1234 # The rest must thus be ignored or under a symlink.
1235 audit_path = pathutil.pathauditor(self._root, cached=True)
1235 audit_path = pathutil.pathauditor(self._root, cached=True)
1236
1236
1237 for nf in iter(visit):
1237 for nf in iter(visit):
1238 # If a stat for the same file was already added with a
1238 # If a stat for the same file was already added with a
1239 # different case, don't add one for this, since that would
1239 # different case, don't add one for this, since that would
1240 # make it appear as if the file exists under both names
1240 # make it appear as if the file exists under both names
1241 # on disk.
1241 # on disk.
1242 if (
1242 if (
1243 normalizefile
1243 normalizefile
1244 and normalizefile(nf, True, True) in results
1244 and normalizefile(nf, True, True) in results
1245 ):
1245 ):
1246 results[nf] = None
1246 results[nf] = None
1247 # Report ignored items in the dmap as long as they are not
1247 # Report ignored items in the dmap as long as they are not
1248 # under a symlink directory.
1248 # under a symlink directory.
1249 elif audit_path.check(nf):
1249 elif audit_path.check(nf):
1250 try:
1250 try:
1251 results[nf] = lstat(join(nf))
1251 results[nf] = lstat(join(nf))
1252 # file was just ignored, no links, and exists
1252 # file was just ignored, no links, and exists
1253 except OSError:
1253 except OSError:
1254 # file doesn't exist
1254 # file doesn't exist
1255 results[nf] = None
1255 results[nf] = None
1256 else:
1256 else:
1257 # It's either missing or under a symlink directory
1257 # It's either missing or under a symlink directory
1258 # which we in this case report as missing
1258 # which we in this case report as missing
1259 results[nf] = None
1259 results[nf] = None
1260 else:
1260 else:
1261 # We may not have walked the full directory tree above,
1261 # We may not have walked the full directory tree above,
1262 # so stat and check everything we missed.
1262 # so stat and check everything we missed.
1263 iv = iter(visit)
1263 iv = iter(visit)
1264 for st in util.statfiles([join(i) for i in visit]):
1264 for st in util.statfiles([join(i) for i in visit]):
1265 results[next(iv)] = st
1265 results[next(iv)] = st
1266 return results
1266 return results
1267
1267
1268 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1268 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1269 # Force Rayon (Rust parallelism library) to respect the number of
1269 # Force Rayon (Rust parallelism library) to respect the number of
1270 # workers. This is a temporary workaround until Rust code knows
1270 # workers. This is a temporary workaround until Rust code knows
1271 # how to read the config file.
1271 # how to read the config file.
1272 numcpus = self._ui.configint(b"worker", b"numcpus")
1272 numcpus = self._ui.configint(b"worker", b"numcpus")
1273 if numcpus is not None:
1273 if numcpus is not None:
1274 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1274 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1275
1275
1276 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1276 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1277 if not workers_enabled:
1277 if not workers_enabled:
1278 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1278 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1279
1279
1280 (
1280 (
1281 lookup,
1281 lookup,
1282 modified,
1282 modified,
1283 added,
1283 added,
1284 removed,
1284 removed,
1285 deleted,
1285 deleted,
1286 clean,
1286 clean,
1287 ignored,
1287 ignored,
1288 unknown,
1288 unknown,
1289 warnings,
1289 warnings,
1290 bad,
1290 bad,
1291 traversed,
1291 traversed,
1292 dirty,
1292 dirty,
1293 ) = rustmod.status(
1293 ) = rustmod.status(
1294 self._map._rustmap,
1294 self._map._rustmap,
1295 matcher,
1295 matcher,
1296 self._rootdir,
1296 self._rootdir,
1297 self._ignorefiles(),
1297 self._ignorefiles(),
1298 self._checkexec,
1298 self._checkexec,
1299 self._lastnormaltime,
1299 self._lastnormaltime,
1300 bool(list_clean),
1300 bool(list_clean),
1301 bool(list_ignored),
1301 bool(list_ignored),
1302 bool(list_unknown),
1302 bool(list_unknown),
1303 bool(matcher.traversedir),
1303 bool(matcher.traversedir),
1304 )
1304 )
1305
1305
1306 self._dirty |= dirty
1306 self._dirty |= dirty
1307
1307
1308 if matcher.traversedir:
1308 if matcher.traversedir:
1309 for dir in traversed:
1309 for dir in traversed:
1310 matcher.traversedir(dir)
1310 matcher.traversedir(dir)
1311
1311
1312 if self._ui.warn:
1312 if self._ui.warn:
1313 for item in warnings:
1313 for item in warnings:
1314 if isinstance(item, tuple):
1314 if isinstance(item, tuple):
1315 file_path, syntax = item
1315 file_path, syntax = item
1316 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1316 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1317 file_path,
1317 file_path,
1318 syntax,
1318 syntax,
1319 )
1319 )
1320 self._ui.warn(msg)
1320 self._ui.warn(msg)
1321 else:
1321 else:
1322 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1322 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1323 self._ui.warn(
1323 self._ui.warn(
1324 msg
1324 msg
1325 % (
1325 % (
1326 pathutil.canonpath(
1326 pathutil.canonpath(
1327 self._rootdir, self._rootdir, item
1327 self._rootdir, self._rootdir, item
1328 ),
1328 ),
1329 b"No such file or directory",
1329 b"No such file or directory",
1330 )
1330 )
1331 )
1331 )
1332
1332
1333 for (fn, message) in bad:
1333 for (fn, message) in bad:
1334 matcher.bad(fn, encoding.strtolocal(message))
1334 matcher.bad(fn, encoding.strtolocal(message))
1335
1335
1336 status = scmutil.status(
1336 status = scmutil.status(
1337 modified=modified,
1337 modified=modified,
1338 added=added,
1338 added=added,
1339 removed=removed,
1339 removed=removed,
1340 deleted=deleted,
1340 deleted=deleted,
1341 unknown=unknown,
1341 unknown=unknown,
1342 ignored=ignored,
1342 ignored=ignored,
1343 clean=clean,
1343 clean=clean,
1344 )
1344 )
1345 return (lookup, status)
1345 return (lookup, status)
1346
1346
1347 def status(self, match, subrepos, ignored, clean, unknown):
1347 def status(self, match, subrepos, ignored, clean, unknown):
1348 """Determine the status of the working copy relative to the
1348 """Determine the status of the working copy relative to the
1349 dirstate and return a pair of (unsure, status), where status is of type
1349 dirstate and return a pair of (unsure, status), where status is of type
1350 scmutil.status and:
1350 scmutil.status and:
1351
1351
1352 unsure:
1352 unsure:
1353 files that might have been modified since the dirstate was
1353 files that might have been modified since the dirstate was
1354 written, but need to be read to be sure (size is the same
1354 written, but need to be read to be sure (size is the same
1355 but mtime differs)
1355 but mtime differs)
1356 status.modified:
1356 status.modified:
1357 files that have definitely been modified since the dirstate
1357 files that have definitely been modified since the dirstate
1358 was written (different size or mode)
1358 was written (different size or mode)
1359 status.clean:
1359 status.clean:
1360 files that have definitely not been modified since the
1360 files that have definitely not been modified since the
1361 dirstate was written
1361 dirstate was written
1362 """
1362 """
1363 listignored, listclean, listunknown = ignored, clean, unknown
1363 listignored, listclean, listunknown = ignored, clean, unknown
1364 lookup, modified, added, unknown, ignored = [], [], [], [], []
1364 lookup, modified, added, unknown, ignored = [], [], [], [], []
1365 removed, deleted, clean = [], [], []
1365 removed, deleted, clean = [], [], []
1366
1366
1367 dmap = self._map
1367 dmap = self._map
1368 dmap.preload()
1368 dmap.preload()
1369
1369
1370 use_rust = True
1370 use_rust = True
1371
1371
1372 allowed_matchers = (
1372 allowed_matchers = (
1373 matchmod.alwaysmatcher,
1373 matchmod.alwaysmatcher,
1374 matchmod.exactmatcher,
1374 matchmod.exactmatcher,
1375 matchmod.includematcher,
1375 matchmod.includematcher,
1376 )
1376 )
1377
1377
1378 if rustmod is None:
1378 if rustmod is None:
1379 use_rust = False
1379 use_rust = False
1380 elif self._checkcase:
1380 elif self._checkcase:
1381 # Case-insensitive filesystems are not handled yet
1381 # Case-insensitive filesystems are not handled yet
1382 use_rust = False
1382 use_rust = False
1383 elif subrepos:
1383 elif subrepos:
1384 use_rust = False
1384 use_rust = False
1385 elif sparse.enabled:
1385 elif sparse.enabled:
1386 use_rust = False
1386 use_rust = False
1387 elif not isinstance(match, allowed_matchers):
1387 elif not isinstance(match, allowed_matchers):
1388 # Some matchers have yet to be implemented
1388 # Some matchers have yet to be implemented
1389 use_rust = False
1389 use_rust = False
1390
1390
1391 if use_rust:
1391 if use_rust:
1392 try:
1392 try:
1393 return self._rust_status(
1393 return self._rust_status(
1394 match, listclean, listignored, listunknown
1394 match, listclean, listignored, listunknown
1395 )
1395 )
1396 except rustmod.FallbackError:
1396 except rustmod.FallbackError:
1397 pass
1397 pass
1398
1398
1399 def noop(f):
1399 def noop(f):
1400 pass
1400 pass
1401
1401
1402 dcontains = dmap.__contains__
1402 dcontains = dmap.__contains__
1403 dget = dmap.__getitem__
1403 dget = dmap.__getitem__
1404 ladd = lookup.append # aka "unsure"
1404 ladd = lookup.append # aka "unsure"
1405 madd = modified.append
1405 madd = modified.append
1406 aadd = added.append
1406 aadd = added.append
1407 uadd = unknown.append if listunknown else noop
1407 uadd = unknown.append if listunknown else noop
1408 iadd = ignored.append if listignored else noop
1408 iadd = ignored.append if listignored else noop
1409 radd = removed.append
1409 radd = removed.append
1410 dadd = deleted.append
1410 dadd = deleted.append
1411 cadd = clean.append if listclean else noop
1411 cadd = clean.append if listclean else noop
1412 mexact = match.exact
1412 mexact = match.exact
1413 dirignore = self._dirignore
1413 dirignore = self._dirignore
1414 checkexec = self._checkexec
1414 checkexec = self._checkexec
1415 copymap = self._map.copymap
1415 copymap = self._map.copymap
1416 lastnormaltime = self._lastnormaltime
1416 lastnormaltime = self._lastnormaltime
1417
1417
1418 # We need to do full walks when either
1418 # We need to do full walks when either
1419 # - we're listing all clean files, or
1419 # - we're listing all clean files, or
1420 # - match.traversedir does something, because match.traversedir should
1420 # - match.traversedir does something, because match.traversedir should
1421 # be called for every dir in the working dir
1421 # be called for every dir in the working dir
1422 full = listclean or match.traversedir is not None
1422 full = listclean or match.traversedir is not None
1423 for fn, st in pycompat.iteritems(
1423 for fn, st in pycompat.iteritems(
1424 self.walk(match, subrepos, listunknown, listignored, full=full)
1424 self.walk(match, subrepos, listunknown, listignored, full=full)
1425 ):
1425 ):
1426 if not dcontains(fn):
1426 if not dcontains(fn):
1427 if (listignored or mexact(fn)) and dirignore(fn):
1427 if (listignored or mexact(fn)) and dirignore(fn):
1428 if listignored:
1428 if listignored:
1429 iadd(fn)
1429 iadd(fn)
1430 else:
1430 else:
1431 uadd(fn)
1431 uadd(fn)
1432 continue
1432 continue
1433
1433
1434 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1434 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1435 # written like that for performance reasons. dmap[fn] is not a
1435 # written like that for performance reasons. dmap[fn] is not a
1436 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1436 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1437 # opcode has fast paths when the value to be unpacked is a tuple or
1437 # opcode has fast paths when the value to be unpacked is a tuple or
1438 # a list, but falls back to creating a full-fledged iterator in
1438 # a list, but falls back to creating a full-fledged iterator in
1439 # general. That is much slower than simply accessing and storing the
1439 # general. That is much slower than simply accessing and storing the
1440 # tuple members one by one.
1440 # tuple members one by one.
1441 t = dget(fn)
1441 t = dget(fn)
1442 mode = t.mode
1442 mode = t.mode
1443 size = t.size
1443 size = t.size
1444 time = t.mtime
1444 time = t.mtime
1445
1445
1446 if not st and t.tracked:
1446 if not st and t.tracked:
1447 dadd(fn)
1447 dadd(fn)
1448 elif t.merged:
1448 elif t.merged:
1449 madd(fn)
1449 madd(fn)
1450 elif t.added:
1450 elif t.added:
1451 aadd(fn)
1451 aadd(fn)
1452 elif t.removed:
1452 elif t.removed:
1453 radd(fn)
1453 radd(fn)
1454 elif t.tracked:
1454 elif t.tracked:
1455 if (
1455 if (
1456 size >= 0
1456 size >= 0
1457 and (
1457 and (
1458 (size != st.st_size and size != st.st_size & _rangemask)
1458 (size != st.st_size and size != st.st_size & _rangemask)
1459 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1459 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1460 )
1460 )
1461 or t.from_p2
1461 or t.from_p2
1462 or fn in copymap
1462 or fn in copymap
1463 ):
1463 ):
1464 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1464 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1465 # issue6456: Size returned may be longer due to
1465 # issue6456: Size returned may be longer due to
1466 # encryption on EXT-4 fscrypt, undecided.
1466 # encryption on EXT-4 fscrypt, undecided.
1467 ladd(fn)
1467 ladd(fn)
1468 else:
1468 else:
1469 madd(fn)
1469 madd(fn)
1470 elif (
1470 elif (
1471 time != st[stat.ST_MTIME]
1471 time != st[stat.ST_MTIME]
1472 and time != st[stat.ST_MTIME] & _rangemask
1472 and time != st[stat.ST_MTIME] & _rangemask
1473 ):
1473 ):
1474 ladd(fn)
1474 ladd(fn)
1475 elif st[stat.ST_MTIME] == lastnormaltime:
1475 elif st[stat.ST_MTIME] == lastnormaltime:
1476 # fn may have just been marked as normal and it may have
1476 # fn may have just been marked as normal and it may have
1477 # changed in the same second without changing its size.
1477 # changed in the same second without changing its size.
1478 # This can happen if we quickly do multiple commits.
1478 # This can happen if we quickly do multiple commits.
1479 # Force lookup, so we don't miss such a racy file change.
1479 # Force lookup, so we don't miss such a racy file change.
1480 ladd(fn)
1480 ladd(fn)
1481 elif listclean:
1481 elif listclean:
1482 cadd(fn)
1482 cadd(fn)
1483 status = scmutil.status(
1483 status = scmutil.status(
1484 modified, added, removed, deleted, unknown, ignored, clean
1484 modified, added, removed, deleted, unknown, ignored, clean
1485 )
1485 )
1486 return (lookup, status)
1486 return (lookup, status)
1487
1487
1488 def matches(self, match):
1488 def matches(self, match):
1489 """
1489 """
1490 return files in the dirstate (in whatever state) filtered by match
1490 return files in the dirstate (in whatever state) filtered by match
1491 """
1491 """
1492 dmap = self._map
1492 dmap = self._map
1493 if rustmod is not None:
1493 if rustmod is not None:
1494 dmap = self._map._rustmap
1494 dmap = self._map._rustmap
1495
1495
1496 if match.always():
1496 if match.always():
1497 return dmap.keys()
1497 return dmap.keys()
1498 files = match.files()
1498 files = match.files()
1499 if match.isexact():
1499 if match.isexact():
1500 # fast path -- filter the other way around, since typically files is
1500 # fast path -- filter the other way around, since typically files is
1501 # much smaller than dmap
1501 # much smaller than dmap
1502 return [f for f in files if f in dmap]
1502 return [f for f in files if f in dmap]
1503 if match.prefix() and all(fn in dmap for fn in files):
1503 if match.prefix() and all(fn in dmap for fn in files):
1504 # fast path -- all the values are known to be files, so just return
1504 # fast path -- all the values are known to be files, so just return
1505 # that
1505 # that
1506 return list(files)
1506 return list(files)
1507 return [f for f in dmap if match(f)]
1507 return [f for f in dmap if match(f)]
1508
1508
1509 def _actualfilename(self, tr):
1509 def _actualfilename(self, tr):
1510 if tr:
1510 if tr:
1511 return self._pendingfilename
1511 return self._pendingfilename
1512 else:
1512 else:
1513 return self._filename
1513 return self._filename
1514
1514
1515 def savebackup(self, tr, backupname):
1515 def savebackup(self, tr, backupname):
1516 '''Save current dirstate into backup file'''
1516 '''Save current dirstate into backup file'''
1517 filename = self._actualfilename(tr)
1517 filename = self._actualfilename(tr)
1518 assert backupname != filename
1518 assert backupname != filename
1519
1519
1520 # use '_writedirstate' instead of 'write' to write changes certainly,
1520 # use '_writedirstate' instead of 'write' to write changes certainly,
1521 # because the latter omits writing out if transaction is running.
1521 # because the latter omits writing out if transaction is running.
1522 # output file will be used to create backup of dirstate at this point.
1522 # output file will be used to create backup of dirstate at this point.
1523 if self._dirty or not self._opener.exists(filename):
1523 if self._dirty or not self._opener.exists(filename):
1524 self._writedirstate(
1524 self._writedirstate(
1525 tr,
1525 tr,
1526 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1526 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1527 )
1527 )
1528
1528
1529 if tr:
1529 if tr:
1530 # ensure that subsequent tr.writepending returns True for
1530 # ensure that subsequent tr.writepending returns True for
1531 # changes written out above, even if dirstate is never
1531 # changes written out above, even if dirstate is never
1532 # changed after this
1532 # changed after this
1533 tr.addfilegenerator(
1533 tr.addfilegenerator(
1534 b'dirstate',
1534 b'dirstate',
1535 (self._filename,),
1535 (self._filename,),
1536 lambda f: self._writedirstate(tr, f),
1536 lambda f: self._writedirstate(tr, f),
1537 location=b'plain',
1537 location=b'plain',
1538 )
1538 )
1539
1539
1540 # ensure that pending file written above is unlinked at
1540 # ensure that pending file written above is unlinked at
1541 # failure, even if tr.writepending isn't invoked until the
1541 # failure, even if tr.writepending isn't invoked until the
1542 # end of this transaction
1542 # end of this transaction
1543 tr.registertmp(filename, location=b'plain')
1543 tr.registertmp(filename, location=b'plain')
1544
1544
1545 self._opener.tryunlink(backupname)
1545 self._opener.tryunlink(backupname)
1546 # hardlink backup is okay because _writedirstate is always called
1546 # hardlink backup is okay because _writedirstate is always called
1547 # with an "atomictemp=True" file.
1547 # with an "atomictemp=True" file.
1548 util.copyfile(
1548 util.copyfile(
1549 self._opener.join(filename),
1549 self._opener.join(filename),
1550 self._opener.join(backupname),
1550 self._opener.join(backupname),
1551 hardlink=True,
1551 hardlink=True,
1552 )
1552 )
1553
1553
1554 def restorebackup(self, tr, backupname):
1554 def restorebackup(self, tr, backupname):
1555 '''Restore dirstate by backup file'''
1555 '''Restore dirstate by backup file'''
1556 # this "invalidate()" prevents "wlock.release()" from writing
1556 # this "invalidate()" prevents "wlock.release()" from writing
1557 # changes of dirstate out after restoring from backup file
1557 # changes of dirstate out after restoring from backup file
1558 self.invalidate()
1558 self.invalidate()
1559 filename = self._actualfilename(tr)
1559 filename = self._actualfilename(tr)
1560 o = self._opener
1560 o = self._opener
1561 if util.samefile(o.join(backupname), o.join(filename)):
1561 if util.samefile(o.join(backupname), o.join(filename)):
1562 o.unlink(backupname)
1562 o.unlink(backupname)
1563 else:
1563 else:
1564 o.rename(backupname, filename, checkambig=True)
1564 o.rename(backupname, filename, checkambig=True)
1565
1565
1566 def clearbackup(self, tr, backupname):
1566 def clearbackup(self, tr, backupname):
1567 '''Clear backup file'''
1567 '''Clear backup file'''
1568 self._opener.unlink(backupname)
1568 self._opener.unlink(backupname)
@@ -1,962 +1,965 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 from .dirstateutils import (
21 from .dirstateutils import (
22 docket as docketmod,
22 docket as docketmod,
23 )
23 )
24
24
25 parsers = policy.importmod('parsers')
25 parsers = policy.importmod('parsers')
26 rustmod = policy.importrust('dirstate')
26 rustmod = policy.importrust('dirstate')
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 DirstateItem = parsers.DirstateItem
30 if rustmod is None:
31 DirstateItem = parsers.DirstateItem
32 else:
33 DirstateItem = rustmod.DirstateItem
31
34
32 rangemask = 0x7FFFFFFF
35 rangemask = 0x7FFFFFFF
33
36
34
37
35 class dirstatemap(object):
38 class dirstatemap(object):
36 """Map encapsulating the dirstate's contents.
39 """Map encapsulating the dirstate's contents.
37
40
38 The dirstate contains the following state:
41 The dirstate contains the following state:
39
42
40 - `identity` is the identity of the dirstate file, which can be used to
43 - `identity` is the identity of the dirstate file, which can be used to
41 detect when changes have occurred to the dirstate file.
44 detect when changes have occurred to the dirstate file.
42
45
43 - `parents` is a pair containing the parents of the working copy. The
46 - `parents` is a pair containing the parents of the working copy. The
44 parents are updated by calling `setparents`.
47 parents are updated by calling `setparents`.
45
48
46 - the state map maps filenames to tuples of (state, mode, size, mtime),
49 - the state map maps filenames to tuples of (state, mode, size, mtime),
47 where state is a single character representing 'normal', 'added',
50 where state is a single character representing 'normal', 'added',
48 'removed', or 'merged'. It is read by treating the dirstate as a
51 'removed', or 'merged'. It is read by treating the dirstate as a
49 dict. File state is updated by calling various methods (see each
52 dict. File state is updated by calling various methods (see each
50 documentation for details):
53 documentation for details):
51
54
52 - `reset_state`,
55 - `reset_state`,
53 - `set_tracked`
56 - `set_tracked`
54 - `set_untracked`
57 - `set_untracked`
55 - `set_clean`
58 - `set_clean`
56 - `set_possibly_dirty`
59 - `set_possibly_dirty`
57
60
58 - `copymap` maps destination filenames to their source filename.
61 - `copymap` maps destination filenames to their source filename.
59
62
60 The dirstate also provides the following views onto the state:
63 The dirstate also provides the following views onto the state:
61
64
62 - `nonnormalset` is a set of the filenames that have state other
65 - `nonnormalset` is a set of the filenames that have state other
63 than 'normal', or are normal but have an mtime of -1 ('normallookup').
66 than 'normal', or are normal but have an mtime of -1 ('normallookup').
64
67
65 - `otherparentset` is a set of the filenames that are marked as coming
68 - `otherparentset` is a set of the filenames that are marked as coming
66 from the second parent when the dirstate is currently being merged.
69 from the second parent when the dirstate is currently being merged.
67
70
68 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
71 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
69 form that they appear as in the dirstate.
72 form that they appear as in the dirstate.
70
73
71 - `dirfoldmap` is a dict mapping normalized directory names to the
74 - `dirfoldmap` is a dict mapping normalized directory names to the
72 denormalized form that they appear as in the dirstate.
75 denormalized form that they appear as in the dirstate.
73 """
76 """
74
77
75 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
78 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
76 self._ui = ui
79 self._ui = ui
77 self._opener = opener
80 self._opener = opener
78 self._root = root
81 self._root = root
79 self._filename = b'dirstate'
82 self._filename = b'dirstate'
80 self._nodelen = 20
83 self._nodelen = 20
81 self._nodeconstants = nodeconstants
84 self._nodeconstants = nodeconstants
82 assert (
85 assert (
83 not use_dirstate_v2
86 not use_dirstate_v2
84 ), "should have detected unsupported requirement"
87 ), "should have detected unsupported requirement"
85
88
86 self._parents = None
89 self._parents = None
87 self._dirtyparents = False
90 self._dirtyparents = False
88
91
89 # for consistent view between _pl() and _read() invocations
92 # for consistent view between _pl() and _read() invocations
90 self._pendingmode = None
93 self._pendingmode = None
91
94
92 @propertycache
95 @propertycache
93 def _map(self):
96 def _map(self):
94 self._map = {}
97 self._map = {}
95 self.read()
98 self.read()
96 return self._map
99 return self._map
97
100
98 @propertycache
101 @propertycache
99 def copymap(self):
102 def copymap(self):
100 self.copymap = {}
103 self.copymap = {}
101 self._map
104 self._map
102 return self.copymap
105 return self.copymap
103
106
104 def clear(self):
107 def clear(self):
105 self._map.clear()
108 self._map.clear()
106 self.copymap.clear()
109 self.copymap.clear()
107 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
110 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
108 util.clearcachedproperty(self, b"_dirs")
111 util.clearcachedproperty(self, b"_dirs")
109 util.clearcachedproperty(self, b"_alldirs")
112 util.clearcachedproperty(self, b"_alldirs")
110 util.clearcachedproperty(self, b"filefoldmap")
113 util.clearcachedproperty(self, b"filefoldmap")
111 util.clearcachedproperty(self, b"dirfoldmap")
114 util.clearcachedproperty(self, b"dirfoldmap")
112 util.clearcachedproperty(self, b"nonnormalset")
115 util.clearcachedproperty(self, b"nonnormalset")
113 util.clearcachedproperty(self, b"otherparentset")
116 util.clearcachedproperty(self, b"otherparentset")
114
117
115 def items(self):
118 def items(self):
116 return pycompat.iteritems(self._map)
119 return pycompat.iteritems(self._map)
117
120
118 # forward for python2,3 compat
121 # forward for python2,3 compat
119 iteritems = items
122 iteritems = items
120
123
121 def debug_iter(self, all):
124 def debug_iter(self, all):
122 """
125 """
123 Return an iterator of (filename, state, mode, size, mtime) tuples
126 Return an iterator of (filename, state, mode, size, mtime) tuples
124
127
125 `all` is unused when Rust is not enabled
128 `all` is unused when Rust is not enabled
126 """
129 """
127 for (filename, item) in self.items():
130 for (filename, item) in self.items():
128 yield (filename, item.state, item.mode, item.size, item.mtime)
131 yield (filename, item.state, item.mode, item.size, item.mtime)
129
132
130 def __len__(self):
133 def __len__(self):
131 return len(self._map)
134 return len(self._map)
132
135
133 def __iter__(self):
136 def __iter__(self):
134 return iter(self._map)
137 return iter(self._map)
135
138
136 def get(self, key, default=None):
139 def get(self, key, default=None):
137 return self._map.get(key, default)
140 return self._map.get(key, default)
138
141
139 def __contains__(self, key):
142 def __contains__(self, key):
140 return key in self._map
143 return key in self._map
141
144
142 def __getitem__(self, key):
145 def __getitem__(self, key):
143 return self._map[key]
146 return self._map[key]
144
147
145 def keys(self):
148 def keys(self):
146 return self._map.keys()
149 return self._map.keys()
147
150
148 def preload(self):
151 def preload(self):
149 """Loads the underlying data, if it's not already loaded"""
152 """Loads the underlying data, if it's not already loaded"""
150 self._map
153 self._map
151
154
152 def _dirs_incr(self, filename, old_entry=None):
155 def _dirs_incr(self, filename, old_entry=None):
153 """incremente the dirstate counter if applicable"""
156 """incremente the dirstate counter if applicable"""
154 if (
157 if (
155 old_entry is None or old_entry.removed
158 old_entry is None or old_entry.removed
156 ) and "_dirs" in self.__dict__:
159 ) and "_dirs" in self.__dict__:
157 self._dirs.addpath(filename)
160 self._dirs.addpath(filename)
158 if old_entry is None and "_alldirs" in self.__dict__:
161 if old_entry is None and "_alldirs" in self.__dict__:
159 self._alldirs.addpath(filename)
162 self._alldirs.addpath(filename)
160
163
161 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
164 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
162 """decremente the dirstate counter if applicable"""
165 """decremente the dirstate counter if applicable"""
163 if old_entry is not None:
166 if old_entry is not None:
164 if "_dirs" in self.__dict__ and not old_entry.removed:
167 if "_dirs" in self.__dict__ and not old_entry.removed:
165 self._dirs.delpath(filename)
168 self._dirs.delpath(filename)
166 if "_alldirs" in self.__dict__ and not remove_variant:
169 if "_alldirs" in self.__dict__ and not remove_variant:
167 self._alldirs.delpath(filename)
170 self._alldirs.delpath(filename)
168 elif remove_variant and "_alldirs" in self.__dict__:
171 elif remove_variant and "_alldirs" in self.__dict__:
169 self._alldirs.addpath(filename)
172 self._alldirs.addpath(filename)
170 if "filefoldmap" in self.__dict__:
173 if "filefoldmap" in self.__dict__:
171 normed = util.normcase(filename)
174 normed = util.normcase(filename)
172 self.filefoldmap.pop(normed, None)
175 self.filefoldmap.pop(normed, None)
173
176
174 def set_possibly_dirty(self, filename):
177 def set_possibly_dirty(self, filename):
175 """record that the current state of the file on disk is unknown"""
178 """record that the current state of the file on disk is unknown"""
176 self[filename].set_possibly_dirty()
179 self[filename].set_possibly_dirty()
177
180
178 def set_clean(self, filename, mode, size, mtime):
181 def set_clean(self, filename, mode, size, mtime):
179 """mark a file as back to a clean state"""
182 """mark a file as back to a clean state"""
180 entry = self[filename]
183 entry = self[filename]
181 mtime = mtime & rangemask
184 mtime = mtime & rangemask
182 size = size & rangemask
185 size = size & rangemask
183 entry.set_clean(mode, size, mtime)
186 entry.set_clean(mode, size, mtime)
184 self.copymap.pop(filename, None)
187 self.copymap.pop(filename, None)
185 self.nonnormalset.discard(filename)
188 self.nonnormalset.discard(filename)
186
189
187 def reset_state(
190 def reset_state(
188 self,
191 self,
189 filename,
192 filename,
190 wc_tracked=False,
193 wc_tracked=False,
191 p1_tracked=False,
194 p1_tracked=False,
192 p2_tracked=False,
195 p2_tracked=False,
193 merged=False,
196 merged=False,
194 clean_p1=False,
197 clean_p1=False,
195 clean_p2=False,
198 clean_p2=False,
196 possibly_dirty=False,
199 possibly_dirty=False,
197 parentfiledata=None,
200 parentfiledata=None,
198 ):
201 ):
199 """Set a entry to a given state, diregarding all previous state
202 """Set a entry to a given state, diregarding all previous state
200
203
201 This is to be used by the part of the dirstate API dedicated to
204 This is to be used by the part of the dirstate API dedicated to
202 adjusting the dirstate after a update/merge.
205 adjusting the dirstate after a update/merge.
203
206
204 note: calling this might result to no entry existing at all if the
207 note: calling this might result to no entry existing at all if the
205 dirstate map does not see any point at having one for this file
208 dirstate map does not see any point at having one for this file
206 anymore.
209 anymore.
207 """
210 """
208 if merged and (clean_p1 or clean_p2):
211 if merged and (clean_p1 or clean_p2):
209 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
212 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
210 raise error.ProgrammingError(msg)
213 raise error.ProgrammingError(msg)
211 # copy information are now outdated
214 # copy information are now outdated
212 # (maybe new information should be in directly passed to this function)
215 # (maybe new information should be in directly passed to this function)
213 self.copymap.pop(filename, None)
216 self.copymap.pop(filename, None)
214
217
215 if not (p1_tracked or p2_tracked or wc_tracked):
218 if not (p1_tracked or p2_tracked or wc_tracked):
216 old_entry = self._map.pop(filename, None)
219 old_entry = self._map.pop(filename, None)
217 self._dirs_decr(filename, old_entry=old_entry)
220 self._dirs_decr(filename, old_entry=old_entry)
218 self.nonnormalset.discard(filename)
221 self.nonnormalset.discard(filename)
219 self.copymap.pop(filename, None)
222 self.copymap.pop(filename, None)
220 return
223 return
221 elif merged:
224 elif merged:
222 # XXX might be merged and removed ?
225 # XXX might be merged and removed ?
223 entry = self.get(filename)
226 entry = self.get(filename)
224 if entry is None or not entry.tracked:
227 if entry is None or not entry.tracked:
225 # XXX mostly replicate dirstate.other parent. We should get
228 # XXX mostly replicate dirstate.other parent. We should get
226 # the higher layer to pass us more reliable data where `merged`
229 # the higher layer to pass us more reliable data where `merged`
227 # actually mean merged. Dropping this clause will show failure
230 # actually mean merged. Dropping this clause will show failure
228 # in `test-graft.t`
231 # in `test-graft.t`
229 merged = False
232 merged = False
230 clean_p2 = True
233 clean_p2 = True
231 elif not (p1_tracked or p2_tracked) and wc_tracked:
234 elif not (p1_tracked or p2_tracked) and wc_tracked:
232 pass # file is added, nothing special to adjust
235 pass # file is added, nothing special to adjust
233 elif (p1_tracked or p2_tracked) and not wc_tracked:
236 elif (p1_tracked or p2_tracked) and not wc_tracked:
234 pass
237 pass
235 elif clean_p2 and wc_tracked:
238 elif clean_p2 and wc_tracked:
236 if p1_tracked or self.get(filename) is not None:
239 if p1_tracked or self.get(filename) is not None:
237 # XXX the `self.get` call is catching some case in
240 # XXX the `self.get` call is catching some case in
238 # `test-merge-remove.t` where the file is tracked in p1, the
241 # `test-merge-remove.t` where the file is tracked in p1, the
239 # p1_tracked argument is False.
242 # p1_tracked argument is False.
240 #
243 #
241 # In addition, this seems to be a case where the file is marked
244 # In addition, this seems to be a case where the file is marked
242 # as merged without actually being the result of a merge
245 # as merged without actually being the result of a merge
243 # action. So thing are not ideal here.
246 # action. So thing are not ideal here.
244 merged = True
247 merged = True
245 clean_p2 = False
248 clean_p2 = False
246 elif not p1_tracked and p2_tracked and wc_tracked:
249 elif not p1_tracked and p2_tracked and wc_tracked:
247 clean_p2 = True
250 clean_p2 = True
248 elif possibly_dirty:
251 elif possibly_dirty:
249 pass
252 pass
250 elif wc_tracked:
253 elif wc_tracked:
251 # this is a "normal" file
254 # this is a "normal" file
252 if parentfiledata is None:
255 if parentfiledata is None:
253 msg = b'failed to pass parentfiledata for a normal file: %s'
256 msg = b'failed to pass parentfiledata for a normal file: %s'
254 msg %= filename
257 msg %= filename
255 raise error.ProgrammingError(msg)
258 raise error.ProgrammingError(msg)
256 else:
259 else:
257 assert False, 'unreachable'
260 assert False, 'unreachable'
258
261
259 old_entry = self._map.get(filename)
262 old_entry = self._map.get(filename)
260 self._dirs_incr(filename, old_entry)
263 self._dirs_incr(filename, old_entry)
261 entry = DirstateItem(
264 entry = DirstateItem(
262 wc_tracked=wc_tracked,
265 wc_tracked=wc_tracked,
263 p1_tracked=p1_tracked,
266 p1_tracked=p1_tracked,
264 p2_tracked=p2_tracked,
267 p2_tracked=p2_tracked,
265 merged=merged,
268 merged=merged,
266 clean_p1=clean_p1,
269 clean_p1=clean_p1,
267 clean_p2=clean_p2,
270 clean_p2=clean_p2,
268 possibly_dirty=possibly_dirty,
271 possibly_dirty=possibly_dirty,
269 parentfiledata=parentfiledata,
272 parentfiledata=parentfiledata,
270 )
273 )
271 if entry.dm_nonnormal:
274 if entry.dm_nonnormal:
272 self.nonnormalset.add(filename)
275 self.nonnormalset.add(filename)
273 else:
276 else:
274 self.nonnormalset.discard(filename)
277 self.nonnormalset.discard(filename)
275 if entry.dm_otherparent:
278 if entry.dm_otherparent:
276 self.otherparentset.add(filename)
279 self.otherparentset.add(filename)
277 else:
280 else:
278 self.otherparentset.discard(filename)
281 self.otherparentset.discard(filename)
279 self._map[filename] = entry
282 self._map[filename] = entry
280
283
281 def set_tracked(self, filename):
284 def set_tracked(self, filename):
282 new = False
285 new = False
283 entry = self.get(filename)
286 entry = self.get(filename)
284 if entry is None:
287 if entry is None:
285 self._dirs_incr(filename)
288 self._dirs_incr(filename)
286 entry = DirstateItem(
289 entry = DirstateItem(
287 p1_tracked=False,
290 p1_tracked=False,
288 p2_tracked=False,
291 p2_tracked=False,
289 wc_tracked=True,
292 wc_tracked=True,
290 merged=False,
293 merged=False,
291 clean_p1=False,
294 clean_p1=False,
292 clean_p2=False,
295 clean_p2=False,
293 possibly_dirty=False,
296 possibly_dirty=False,
294 parentfiledata=None,
297 parentfiledata=None,
295 )
298 )
296 self._map[filename] = entry
299 self._map[filename] = entry
297 if entry.dm_nonnormal:
300 if entry.dm_nonnormal:
298 self.nonnormalset.add(filename)
301 self.nonnormalset.add(filename)
299 new = True
302 new = True
300 elif not entry.tracked:
303 elif not entry.tracked:
301 self._dirs_incr(filename, entry)
304 self._dirs_incr(filename, entry)
302 entry.set_tracked()
305 entry.set_tracked()
303 new = True
306 new = True
304 else:
307 else:
305 # XXX This is probably overkill for more case, but we need this to
308 # XXX This is probably overkill for more case, but we need this to
306 # fully replace the `normallookup` call with `set_tracked` one.
309 # fully replace the `normallookup` call with `set_tracked` one.
307 # Consider smoothing this in the future.
310 # Consider smoothing this in the future.
308 self.set_possibly_dirty(filename)
311 self.set_possibly_dirty(filename)
309 return new
312 return new
310
313
311 def set_untracked(self, f):
314 def set_untracked(self, f):
312 """Mark a file as no longer tracked in the dirstate map"""
315 """Mark a file as no longer tracked in the dirstate map"""
313 entry = self.get(f)
316 entry = self.get(f)
314 if entry is None:
317 if entry is None:
315 return False
318 return False
316 else:
319 else:
317 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
320 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
318 if not entry.merged:
321 if not entry.merged:
319 self.copymap.pop(f, None)
322 self.copymap.pop(f, None)
320 if entry.added:
323 if entry.added:
321 self.nonnormalset.discard(f)
324 self.nonnormalset.discard(f)
322 self._map.pop(f, None)
325 self._map.pop(f, None)
323 else:
326 else:
324 self.nonnormalset.add(f)
327 self.nonnormalset.add(f)
325 if entry.from_p2:
328 if entry.from_p2:
326 self.otherparentset.add(f)
329 self.otherparentset.add(f)
327 entry.set_untracked()
330 entry.set_untracked()
328 return True
331 return True
329
332
330 def clearambiguoustimes(self, files, now):
333 def clearambiguoustimes(self, files, now):
331 for f in files:
334 for f in files:
332 e = self.get(f)
335 e = self.get(f)
333 if e is not None and e.need_delay(now):
336 if e is not None and e.need_delay(now):
334 e.set_possibly_dirty()
337 e.set_possibly_dirty()
335 self.nonnormalset.add(f)
338 self.nonnormalset.add(f)
336
339
337 def nonnormalentries(self):
340 def nonnormalentries(self):
338 '''Compute the nonnormal dirstate entries from the dmap'''
341 '''Compute the nonnormal dirstate entries from the dmap'''
339 try:
342 try:
340 return parsers.nonnormalotherparententries(self._map)
343 return parsers.nonnormalotherparententries(self._map)
341 except AttributeError:
344 except AttributeError:
342 nonnorm = set()
345 nonnorm = set()
343 otherparent = set()
346 otherparent = set()
344 for fname, e in pycompat.iteritems(self._map):
347 for fname, e in pycompat.iteritems(self._map):
345 if e.dm_nonnormal:
348 if e.dm_nonnormal:
346 nonnorm.add(fname)
349 nonnorm.add(fname)
347 if e.from_p2:
350 if e.from_p2:
348 otherparent.add(fname)
351 otherparent.add(fname)
349 return nonnorm, otherparent
352 return nonnorm, otherparent
350
353
351 @propertycache
354 @propertycache
352 def filefoldmap(self):
355 def filefoldmap(self):
353 """Returns a dictionary mapping normalized case paths to their
356 """Returns a dictionary mapping normalized case paths to their
354 non-normalized versions.
357 non-normalized versions.
355 """
358 """
356 try:
359 try:
357 makefilefoldmap = parsers.make_file_foldmap
360 makefilefoldmap = parsers.make_file_foldmap
358 except AttributeError:
361 except AttributeError:
359 pass
362 pass
360 else:
363 else:
361 return makefilefoldmap(
364 return makefilefoldmap(
362 self._map, util.normcasespec, util.normcasefallback
365 self._map, util.normcasespec, util.normcasefallback
363 )
366 )
364
367
365 f = {}
368 f = {}
366 normcase = util.normcase
369 normcase = util.normcase
367 for name, s in pycompat.iteritems(self._map):
370 for name, s in pycompat.iteritems(self._map):
368 if not s.removed:
371 if not s.removed:
369 f[normcase(name)] = name
372 f[normcase(name)] = name
370 f[b'.'] = b'.' # prevents useless util.fspath() invocation
373 f[b'.'] = b'.' # prevents useless util.fspath() invocation
371 return f
374 return f
372
375
373 def hastrackeddir(self, d):
376 def hastrackeddir(self, d):
374 """
377 """
375 Returns True if the dirstate contains a tracked (not removed) file
378 Returns True if the dirstate contains a tracked (not removed) file
376 in this directory.
379 in this directory.
377 """
380 """
378 return d in self._dirs
381 return d in self._dirs
379
382
380 def hasdir(self, d):
383 def hasdir(self, d):
381 """
384 """
382 Returns True if the dirstate contains a file (tracked or removed)
385 Returns True if the dirstate contains a file (tracked or removed)
383 in this directory.
386 in this directory.
384 """
387 """
385 return d in self._alldirs
388 return d in self._alldirs
386
389
387 @propertycache
390 @propertycache
388 def _dirs(self):
391 def _dirs(self):
389 return pathutil.dirs(self._map, only_tracked=True)
392 return pathutil.dirs(self._map, only_tracked=True)
390
393
391 @propertycache
394 @propertycache
392 def _alldirs(self):
395 def _alldirs(self):
393 return pathutil.dirs(self._map)
396 return pathutil.dirs(self._map)
394
397
395 def _opendirstatefile(self):
398 def _opendirstatefile(self):
396 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
399 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
397 if self._pendingmode is not None and self._pendingmode != mode:
400 if self._pendingmode is not None and self._pendingmode != mode:
398 fp.close()
401 fp.close()
399 raise error.Abort(
402 raise error.Abort(
400 _(b'working directory state may be changed parallelly')
403 _(b'working directory state may be changed parallelly')
401 )
404 )
402 self._pendingmode = mode
405 self._pendingmode = mode
403 return fp
406 return fp
404
407
405 def parents(self):
408 def parents(self):
406 if not self._parents:
409 if not self._parents:
407 try:
410 try:
408 fp = self._opendirstatefile()
411 fp = self._opendirstatefile()
409 st = fp.read(2 * self._nodelen)
412 st = fp.read(2 * self._nodelen)
410 fp.close()
413 fp.close()
411 except IOError as err:
414 except IOError as err:
412 if err.errno != errno.ENOENT:
415 if err.errno != errno.ENOENT:
413 raise
416 raise
414 # File doesn't exist, so the current state is empty
417 # File doesn't exist, so the current state is empty
415 st = b''
418 st = b''
416
419
417 l = len(st)
420 l = len(st)
418 if l == self._nodelen * 2:
421 if l == self._nodelen * 2:
419 self._parents = (
422 self._parents = (
420 st[: self._nodelen],
423 st[: self._nodelen],
421 st[self._nodelen : 2 * self._nodelen],
424 st[self._nodelen : 2 * self._nodelen],
422 )
425 )
423 elif l == 0:
426 elif l == 0:
424 self._parents = (
427 self._parents = (
425 self._nodeconstants.nullid,
428 self._nodeconstants.nullid,
426 self._nodeconstants.nullid,
429 self._nodeconstants.nullid,
427 )
430 )
428 else:
431 else:
429 raise error.Abort(
432 raise error.Abort(
430 _(b'working directory state appears damaged!')
433 _(b'working directory state appears damaged!')
431 )
434 )
432
435
433 return self._parents
436 return self._parents
434
437
435 def setparents(self, p1, p2):
438 def setparents(self, p1, p2):
436 self._parents = (p1, p2)
439 self._parents = (p1, p2)
437 self._dirtyparents = True
440 self._dirtyparents = True
438
441
439 def read(self):
442 def read(self):
440 # ignore HG_PENDING because identity is used only for writing
443 # ignore HG_PENDING because identity is used only for writing
441 self.identity = util.filestat.frompath(
444 self.identity = util.filestat.frompath(
442 self._opener.join(self._filename)
445 self._opener.join(self._filename)
443 )
446 )
444
447
445 try:
448 try:
446 fp = self._opendirstatefile()
449 fp = self._opendirstatefile()
447 try:
450 try:
448 st = fp.read()
451 st = fp.read()
449 finally:
452 finally:
450 fp.close()
453 fp.close()
451 except IOError as err:
454 except IOError as err:
452 if err.errno != errno.ENOENT:
455 if err.errno != errno.ENOENT:
453 raise
456 raise
454 return
457 return
455 if not st:
458 if not st:
456 return
459 return
457
460
458 if util.safehasattr(parsers, b'dict_new_presized'):
461 if util.safehasattr(parsers, b'dict_new_presized'):
459 # Make an estimate of the number of files in the dirstate based on
462 # Make an estimate of the number of files in the dirstate based on
460 # its size. This trades wasting some memory for avoiding costly
463 # its size. This trades wasting some memory for avoiding costly
461 # resizes. Each entry have a prefix of 17 bytes followed by one or
464 # resizes. Each entry have a prefix of 17 bytes followed by one or
462 # two path names. Studies on various large-scale real-world repositories
465 # two path names. Studies on various large-scale real-world repositories
463 # found 54 bytes a reasonable upper limit for the average path names.
466 # found 54 bytes a reasonable upper limit for the average path names.
464 # Copy entries are ignored for the sake of this estimate.
467 # Copy entries are ignored for the sake of this estimate.
465 self._map = parsers.dict_new_presized(len(st) // 71)
468 self._map = parsers.dict_new_presized(len(st) // 71)
466
469
467 # Python's garbage collector triggers a GC each time a certain number
470 # Python's garbage collector triggers a GC each time a certain number
468 # of container objects (the number being defined by
471 # of container objects (the number being defined by
469 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
472 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
470 # for each file in the dirstate. The C version then immediately marks
473 # for each file in the dirstate. The C version then immediately marks
471 # them as not to be tracked by the collector. However, this has no
474 # them as not to be tracked by the collector. However, this has no
472 # effect on when GCs are triggered, only on what objects the GC looks
475 # effect on when GCs are triggered, only on what objects the GC looks
473 # into. This means that O(number of files) GCs are unavoidable.
476 # into. This means that O(number of files) GCs are unavoidable.
474 # Depending on when in the process's lifetime the dirstate is parsed,
477 # Depending on when in the process's lifetime the dirstate is parsed,
475 # this can get very expensive. As a workaround, disable GC while
478 # this can get very expensive. As a workaround, disable GC while
476 # parsing the dirstate.
479 # parsing the dirstate.
477 #
480 #
478 # (we cannot decorate the function directly since it is in a C module)
481 # (we cannot decorate the function directly since it is in a C module)
479 parse_dirstate = util.nogc(parsers.parse_dirstate)
482 parse_dirstate = util.nogc(parsers.parse_dirstate)
480 p = parse_dirstate(self._map, self.copymap, st)
483 p = parse_dirstate(self._map, self.copymap, st)
481 if not self._dirtyparents:
484 if not self._dirtyparents:
482 self.setparents(*p)
485 self.setparents(*p)
483
486
484 # Avoid excess attribute lookups by fast pathing certain checks
487 # Avoid excess attribute lookups by fast pathing certain checks
485 self.__contains__ = self._map.__contains__
488 self.__contains__ = self._map.__contains__
486 self.__getitem__ = self._map.__getitem__
489 self.__getitem__ = self._map.__getitem__
487 self.get = self._map.get
490 self.get = self._map.get
488
491
489 def write(self, _tr, st, now):
492 def write(self, _tr, st, now):
490 st.write(
493 st.write(
491 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
494 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
492 )
495 )
493 st.close()
496 st.close()
494 self._dirtyparents = False
497 self._dirtyparents = False
495 self.nonnormalset, self.otherparentset = self.nonnormalentries()
498 self.nonnormalset, self.otherparentset = self.nonnormalentries()
496
499
497 @propertycache
500 @propertycache
498 def nonnormalset(self):
501 def nonnormalset(self):
499 nonnorm, otherparents = self.nonnormalentries()
502 nonnorm, otherparents = self.nonnormalentries()
500 self.otherparentset = otherparents
503 self.otherparentset = otherparents
501 return nonnorm
504 return nonnorm
502
505
503 @propertycache
506 @propertycache
504 def otherparentset(self):
507 def otherparentset(self):
505 nonnorm, otherparents = self.nonnormalentries()
508 nonnorm, otherparents = self.nonnormalentries()
506 self.nonnormalset = nonnorm
509 self.nonnormalset = nonnorm
507 return otherparents
510 return otherparents
508
511
509 def non_normal_or_other_parent_paths(self):
512 def non_normal_or_other_parent_paths(self):
510 return self.nonnormalset.union(self.otherparentset)
513 return self.nonnormalset.union(self.otherparentset)
511
514
512 @propertycache
515 @propertycache
513 def identity(self):
516 def identity(self):
514 self._map
517 self._map
515 return self.identity
518 return self.identity
516
519
517 @propertycache
520 @propertycache
518 def dirfoldmap(self):
521 def dirfoldmap(self):
519 f = {}
522 f = {}
520 normcase = util.normcase
523 normcase = util.normcase
521 for name in self._dirs:
524 for name in self._dirs:
522 f[normcase(name)] = name
525 f[normcase(name)] = name
523 return f
526 return f
524
527
525
528
526 if rustmod is not None:
529 if rustmod is not None:
527
530
528 class dirstatemap(object):
531 class dirstatemap(object):
529 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
532 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
530 self._use_dirstate_v2 = use_dirstate_v2
533 self._use_dirstate_v2 = use_dirstate_v2
531 self._nodeconstants = nodeconstants
534 self._nodeconstants = nodeconstants
532 self._ui = ui
535 self._ui = ui
533 self._opener = opener
536 self._opener = opener
534 self._root = root
537 self._root = root
535 self._filename = b'dirstate'
538 self._filename = b'dirstate'
536 self._nodelen = 20 # Also update Rust code when changing this!
539 self._nodelen = 20 # Also update Rust code when changing this!
537 self._parents = None
540 self._parents = None
538 self._dirtyparents = False
541 self._dirtyparents = False
539 self._docket = None
542 self._docket = None
540
543
541 # for consistent view between _pl() and _read() invocations
544 # for consistent view between _pl() and _read() invocations
542 self._pendingmode = None
545 self._pendingmode = None
543
546
544 self._use_dirstate_tree = self._ui.configbool(
547 self._use_dirstate_tree = self._ui.configbool(
545 b"experimental",
548 b"experimental",
546 b"dirstate-tree.in-memory",
549 b"dirstate-tree.in-memory",
547 False,
550 False,
548 )
551 )
549
552
550 def addfile(
553 def addfile(
551 self,
554 self,
552 f,
555 f,
553 mode=0,
556 mode=0,
554 size=None,
557 size=None,
555 mtime=None,
558 mtime=None,
556 added=False,
559 added=False,
557 merged=False,
560 merged=False,
558 from_p2=False,
561 from_p2=False,
559 possibly_dirty=False,
562 possibly_dirty=False,
560 ):
563 ):
561 ret = self._rustmap.addfile(
564 ret = self._rustmap.addfile(
562 f,
565 f,
563 mode,
566 mode,
564 size,
567 size,
565 mtime,
568 mtime,
566 added,
569 added,
567 merged,
570 merged,
568 from_p2,
571 from_p2,
569 possibly_dirty,
572 possibly_dirty,
570 )
573 )
571 if added:
574 if added:
572 self.copymap.pop(f, None)
575 self.copymap.pop(f, None)
573 return ret
576 return ret
574
577
575 def reset_state(
578 def reset_state(
576 self,
579 self,
577 filename,
580 filename,
578 wc_tracked=False,
581 wc_tracked=False,
579 p1_tracked=False,
582 p1_tracked=False,
580 p2_tracked=False,
583 p2_tracked=False,
581 merged=False,
584 merged=False,
582 clean_p1=False,
585 clean_p1=False,
583 clean_p2=False,
586 clean_p2=False,
584 possibly_dirty=False,
587 possibly_dirty=False,
585 parentfiledata=None,
588 parentfiledata=None,
586 ):
589 ):
587 """Set a entry to a given state, disregarding all previous state
590 """Set a entry to a given state, disregarding all previous state
588
591
589 This is to be used by the part of the dirstate API dedicated to
592 This is to be used by the part of the dirstate API dedicated to
590 adjusting the dirstate after a update/merge.
593 adjusting the dirstate after a update/merge.
591
594
592 note: calling this might result to no entry existing at all if the
595 note: calling this might result to no entry existing at all if the
593 dirstate map does not see any point at having one for this file
596 dirstate map does not see any point at having one for this file
594 anymore.
597 anymore.
595 """
598 """
596 if merged and (clean_p1 or clean_p2):
599 if merged and (clean_p1 or clean_p2):
597 msg = (
600 msg = (
598 b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
601 b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
599 )
602 )
600 raise error.ProgrammingError(msg)
603 raise error.ProgrammingError(msg)
601 # copy information are now outdated
604 # copy information are now outdated
602 # (maybe new information should be in directly passed to this function)
605 # (maybe new information should be in directly passed to this function)
603 self.copymap.pop(filename, None)
606 self.copymap.pop(filename, None)
604
607
605 if not (p1_tracked or p2_tracked or wc_tracked):
608 if not (p1_tracked or p2_tracked or wc_tracked):
606 self.dropfile(filename)
609 self.dropfile(filename)
607 elif merged:
610 elif merged:
608 # XXX might be merged and removed ?
611 # XXX might be merged and removed ?
609 entry = self.get(filename)
612 entry = self.get(filename)
610 if entry is not None and entry.tracked:
613 if entry is not None and entry.tracked:
611 # XXX mostly replicate dirstate.other parent. We should get
614 # XXX mostly replicate dirstate.other parent. We should get
612 # the higher layer to pass us more reliable data where `merged`
615 # the higher layer to pass us more reliable data where `merged`
613 # actually mean merged. Dropping the else clause will show
616 # actually mean merged. Dropping the else clause will show
614 # failure in `test-graft.t`
617 # failure in `test-graft.t`
615 self.addfile(filename, merged=True)
618 self.addfile(filename, merged=True)
616 else:
619 else:
617 self.addfile(filename, from_p2=True)
620 self.addfile(filename, from_p2=True)
618 elif not (p1_tracked or p2_tracked) and wc_tracked:
621 elif not (p1_tracked or p2_tracked) and wc_tracked:
619 self.addfile(
622 self.addfile(
620 filename, added=True, possibly_dirty=possibly_dirty
623 filename, added=True, possibly_dirty=possibly_dirty
621 )
624 )
622 elif (p1_tracked or p2_tracked) and not wc_tracked:
625 elif (p1_tracked or p2_tracked) and not wc_tracked:
623 # XXX might be merged and removed ?
626 # XXX might be merged and removed ?
624 self[filename] = DirstateItem.from_v1_data(b'r', 0, 0, 0)
627 self[filename] = DirstateItem.from_v1_data(b'r', 0, 0, 0)
625 self.nonnormalset.add(filename)
628 self.nonnormalset.add(filename)
626 elif clean_p2 and wc_tracked:
629 elif clean_p2 and wc_tracked:
627 if p1_tracked or self.get(filename) is not None:
630 if p1_tracked or self.get(filename) is not None:
628 # XXX the `self.get` call is catching some case in
631 # XXX the `self.get` call is catching some case in
629 # `test-merge-remove.t` where the file is tracked in p1, the
632 # `test-merge-remove.t` where the file is tracked in p1, the
630 # p1_tracked argument is False.
633 # p1_tracked argument is False.
631 #
634 #
632 # In addition, this seems to be a case where the file is marked
635 # In addition, this seems to be a case where the file is marked
633 # as merged without actually being the result of a merge
636 # as merged without actually being the result of a merge
634 # action. So thing are not ideal here.
637 # action. So thing are not ideal here.
635 self.addfile(filename, merged=True)
638 self.addfile(filename, merged=True)
636 else:
639 else:
637 self.addfile(filename, from_p2=True)
640 self.addfile(filename, from_p2=True)
638 elif not p1_tracked and p2_tracked and wc_tracked:
641 elif not p1_tracked and p2_tracked and wc_tracked:
639 self.addfile(
642 self.addfile(
640 filename, from_p2=True, possibly_dirty=possibly_dirty
643 filename, from_p2=True, possibly_dirty=possibly_dirty
641 )
644 )
642 elif possibly_dirty:
645 elif possibly_dirty:
643 self.addfile(filename, possibly_dirty=possibly_dirty)
646 self.addfile(filename, possibly_dirty=possibly_dirty)
644 elif wc_tracked:
647 elif wc_tracked:
645 # this is a "normal" file
648 # this is a "normal" file
646 if parentfiledata is None:
649 if parentfiledata is None:
647 msg = b'failed to pass parentfiledata for a normal file: %s'
650 msg = b'failed to pass parentfiledata for a normal file: %s'
648 msg %= filename
651 msg %= filename
649 raise error.ProgrammingError(msg)
652 raise error.ProgrammingError(msg)
650 mode, size, mtime = parentfiledata
653 mode, size, mtime = parentfiledata
651 self.addfile(filename, mode=mode, size=size, mtime=mtime)
654 self.addfile(filename, mode=mode, size=size, mtime=mtime)
652 self.nonnormalset.discard(filename)
655 self.nonnormalset.discard(filename)
653 else:
656 else:
654 assert False, 'unreachable'
657 assert False, 'unreachable'
655
658
656 def set_tracked(self, filename):
659 def set_tracked(self, filename):
657 new = False
660 new = False
658 entry = self.get(filename)
661 entry = self.get(filename)
659 if entry is None:
662 if entry is None:
660 self.addfile(filename, added=True)
663 self.addfile(filename, added=True)
661 new = True
664 new = True
662 elif not entry.tracked:
665 elif not entry.tracked:
663 entry.set_tracked()
666 entry.set_tracked()
664 self._rustmap.set_v1(filename, entry)
667 self._rustmap.set_v1(filename, entry)
665 new = True
668 new = True
666 else:
669 else:
667 # XXX This is probably overkill for more case, but we need this to
670 # XXX This is probably overkill for more case, but we need this to
668 # fully replace the `normallookup` call with `set_tracked` one.
671 # fully replace the `normallookup` call with `set_tracked` one.
669 # Consider smoothing this in the future.
672 # Consider smoothing this in the future.
670 self.set_possibly_dirty(filename)
673 self.set_possibly_dirty(filename)
671 return new
674 return new
672
675
673 def set_untracked(self, f):
676 def set_untracked(self, f):
674 """Mark a file as no longer tracked in the dirstate map"""
677 """Mark a file as no longer tracked in the dirstate map"""
675 # in merge is only trigger more logic, so it "fine" to pass it.
678 # in merge is only trigger more logic, so it "fine" to pass it.
676 #
679 #
677 # the inner rust dirstate map code need to be adjusted once the API
680 # the inner rust dirstate map code need to be adjusted once the API
678 # for dirstate/dirstatemap/DirstateItem is a bit more settled
681 # for dirstate/dirstatemap/DirstateItem is a bit more settled
679 entry = self.get(f)
682 entry = self.get(f)
680 if entry is None:
683 if entry is None:
681 return False
684 return False
682 else:
685 else:
683 if entry.added:
686 if entry.added:
684 self._rustmap.copymap().pop(f, None)
687 self._rustmap.copymap().pop(f, None)
685 self._rustmap.dropfile(f)
688 self._rustmap.dropfile(f)
686 else:
689 else:
687 self._rustmap.removefile(f, in_merge=True)
690 self._rustmap.removefile(f, in_merge=True)
688 return True
691 return True
689
692
690 def removefile(self, *args, **kwargs):
693 def removefile(self, *args, **kwargs):
691 return self._rustmap.removefile(*args, **kwargs)
694 return self._rustmap.removefile(*args, **kwargs)
692
695
693 def dropfile(self, f, *args, **kwargs):
696 def dropfile(self, f, *args, **kwargs):
694 self._rustmap.copymap().pop(f, None)
697 self._rustmap.copymap().pop(f, None)
695 return self._rustmap.dropfile(f, *args, **kwargs)
698 return self._rustmap.dropfile(f, *args, **kwargs)
696
699
697 def clearambiguoustimes(self, *args, **kwargs):
700 def clearambiguoustimes(self, *args, **kwargs):
698 return self._rustmap.clearambiguoustimes(*args, **kwargs)
701 return self._rustmap.clearambiguoustimes(*args, **kwargs)
699
702
700 def nonnormalentries(self):
703 def nonnormalentries(self):
701 return self._rustmap.nonnormalentries()
704 return self._rustmap.nonnormalentries()
702
705
703 def get(self, *args, **kwargs):
706 def get(self, *args, **kwargs):
704 return self._rustmap.get(*args, **kwargs)
707 return self._rustmap.get(*args, **kwargs)
705
708
706 @property
709 @property
707 def copymap(self):
710 def copymap(self):
708 return self._rustmap.copymap()
711 return self._rustmap.copymap()
709
712
710 def debug_iter(self, all):
713 def debug_iter(self, all):
711 """
714 """
712 Return an iterator of (filename, state, mode, size, mtime) tuples
715 Return an iterator of (filename, state, mode, size, mtime) tuples
713
716
714 `all`: also include with `state == b' '` dirstate tree nodes that
717 `all`: also include with `state == b' '` dirstate tree nodes that
715 don't have an associated `DirstateItem`.
718 don't have an associated `DirstateItem`.
716
719
717 """
720 """
718 return self._rustmap.debug_iter(all)
721 return self._rustmap.debug_iter(all)
719
722
720 def preload(self):
723 def preload(self):
721 self._rustmap
724 self._rustmap
722
725
723 def clear(self):
726 def clear(self):
724 self._rustmap.clear()
727 self._rustmap.clear()
725 self.setparents(
728 self.setparents(
726 self._nodeconstants.nullid, self._nodeconstants.nullid
729 self._nodeconstants.nullid, self._nodeconstants.nullid
727 )
730 )
728 util.clearcachedproperty(self, b"_dirs")
731 util.clearcachedproperty(self, b"_dirs")
729 util.clearcachedproperty(self, b"_alldirs")
732 util.clearcachedproperty(self, b"_alldirs")
730 util.clearcachedproperty(self, b"dirfoldmap")
733 util.clearcachedproperty(self, b"dirfoldmap")
731
734
732 def items(self):
735 def items(self):
733 return self._rustmap.items()
736 return self._rustmap.items()
734
737
735 def keys(self):
738 def keys(self):
736 return iter(self._rustmap)
739 return iter(self._rustmap)
737
740
738 def __contains__(self, key):
741 def __contains__(self, key):
739 return key in self._rustmap
742 return key in self._rustmap
740
743
741 def __getitem__(self, item):
744 def __getitem__(self, item):
742 return self._rustmap[item]
745 return self._rustmap[item]
743
746
744 def __len__(self):
747 def __len__(self):
745 return len(self._rustmap)
748 return len(self._rustmap)
746
749
747 def __iter__(self):
750 def __iter__(self):
748 return iter(self._rustmap)
751 return iter(self._rustmap)
749
752
750 # forward for python2,3 compat
753 # forward for python2,3 compat
751 iteritems = items
754 iteritems = items
752
755
753 def _opendirstatefile(self):
756 def _opendirstatefile(self):
754 fp, mode = txnutil.trypending(
757 fp, mode = txnutil.trypending(
755 self._root, self._opener, self._filename
758 self._root, self._opener, self._filename
756 )
759 )
757 if self._pendingmode is not None and self._pendingmode != mode:
760 if self._pendingmode is not None and self._pendingmode != mode:
758 fp.close()
761 fp.close()
759 raise error.Abort(
762 raise error.Abort(
760 _(b'working directory state may be changed parallelly')
763 _(b'working directory state may be changed parallelly')
761 )
764 )
762 self._pendingmode = mode
765 self._pendingmode = mode
763 return fp
766 return fp
764
767
765 def _readdirstatefile(self, size=-1):
768 def _readdirstatefile(self, size=-1):
766 try:
769 try:
767 with self._opendirstatefile() as fp:
770 with self._opendirstatefile() as fp:
768 return fp.read(size)
771 return fp.read(size)
769 except IOError as err:
772 except IOError as err:
770 if err.errno != errno.ENOENT:
773 if err.errno != errno.ENOENT:
771 raise
774 raise
772 # File doesn't exist, so the current state is empty
775 # File doesn't exist, so the current state is empty
773 return b''
776 return b''
774
777
775 def setparents(self, p1, p2):
778 def setparents(self, p1, p2):
776 self._parents = (p1, p2)
779 self._parents = (p1, p2)
777 self._dirtyparents = True
780 self._dirtyparents = True
778
781
779 def parents(self):
782 def parents(self):
780 if not self._parents:
783 if not self._parents:
781 if self._use_dirstate_v2:
784 if self._use_dirstate_v2:
782 self._parents = self.docket.parents
785 self._parents = self.docket.parents
783 else:
786 else:
784 read_len = self._nodelen * 2
787 read_len = self._nodelen * 2
785 st = self._readdirstatefile(read_len)
788 st = self._readdirstatefile(read_len)
786 l = len(st)
789 l = len(st)
787 if l == read_len:
790 if l == read_len:
788 self._parents = (
791 self._parents = (
789 st[: self._nodelen],
792 st[: self._nodelen],
790 st[self._nodelen : 2 * self._nodelen],
793 st[self._nodelen : 2 * self._nodelen],
791 )
794 )
792 elif l == 0:
795 elif l == 0:
793 self._parents = (
796 self._parents = (
794 self._nodeconstants.nullid,
797 self._nodeconstants.nullid,
795 self._nodeconstants.nullid,
798 self._nodeconstants.nullid,
796 )
799 )
797 else:
800 else:
798 raise error.Abort(
801 raise error.Abort(
799 _(b'working directory state appears damaged!')
802 _(b'working directory state appears damaged!')
800 )
803 )
801
804
802 return self._parents
805 return self._parents
803
806
804 @property
807 @property
805 def docket(self):
808 def docket(self):
806 if not self._docket:
809 if not self._docket:
807 if not self._use_dirstate_v2:
810 if not self._use_dirstate_v2:
808 raise error.ProgrammingError(
811 raise error.ProgrammingError(
809 b'dirstate only has a docket in v2 format'
812 b'dirstate only has a docket in v2 format'
810 )
813 )
811 self._docket = docketmod.DirstateDocket.parse(
814 self._docket = docketmod.DirstateDocket.parse(
812 self._readdirstatefile(), self._nodeconstants
815 self._readdirstatefile(), self._nodeconstants
813 )
816 )
814 return self._docket
817 return self._docket
815
818
816 @propertycache
819 @propertycache
817 def _rustmap(self):
820 def _rustmap(self):
818 """
821 """
819 Fills the Dirstatemap when called.
822 Fills the Dirstatemap when called.
820 """
823 """
821 # ignore HG_PENDING because identity is used only for writing
824 # ignore HG_PENDING because identity is used only for writing
822 self.identity = util.filestat.frompath(
825 self.identity = util.filestat.frompath(
823 self._opener.join(self._filename)
826 self._opener.join(self._filename)
824 )
827 )
825
828
826 if self._use_dirstate_v2:
829 if self._use_dirstate_v2:
827 if self.docket.uuid:
830 if self.docket.uuid:
828 # TODO: use mmap when possible
831 # TODO: use mmap when possible
829 data = self._opener.read(self.docket.data_filename())
832 data = self._opener.read(self.docket.data_filename())
830 else:
833 else:
831 data = b''
834 data = b''
832 self._rustmap = rustmod.DirstateMap.new_v2(
835 self._rustmap = rustmod.DirstateMap.new_v2(
833 data, self.docket.data_size, self.docket.tree_metadata
836 data, self.docket.data_size, self.docket.tree_metadata
834 )
837 )
835 parents = self.docket.parents
838 parents = self.docket.parents
836 else:
839 else:
837 self._rustmap, parents = rustmod.DirstateMap.new_v1(
840 self._rustmap, parents = rustmod.DirstateMap.new_v1(
838 self._use_dirstate_tree, self._readdirstatefile()
841 self._use_dirstate_tree, self._readdirstatefile()
839 )
842 )
840
843
841 if parents and not self._dirtyparents:
844 if parents and not self._dirtyparents:
842 self.setparents(*parents)
845 self.setparents(*parents)
843
846
844 self.__contains__ = self._rustmap.__contains__
847 self.__contains__ = self._rustmap.__contains__
845 self.__getitem__ = self._rustmap.__getitem__
848 self.__getitem__ = self._rustmap.__getitem__
846 self.get = self._rustmap.get
849 self.get = self._rustmap.get
847 return self._rustmap
850 return self._rustmap
848
851
849 def write(self, tr, st, now):
852 def write(self, tr, st, now):
850 if not self._use_dirstate_v2:
853 if not self._use_dirstate_v2:
851 p1, p2 = self.parents()
854 p1, p2 = self.parents()
852 packed = self._rustmap.write_v1(p1, p2, now)
855 packed = self._rustmap.write_v1(p1, p2, now)
853 st.write(packed)
856 st.write(packed)
854 st.close()
857 st.close()
855 self._dirtyparents = False
858 self._dirtyparents = False
856 return
859 return
857
860
858 # We can only append to an existing data file if there is one
861 # We can only append to an existing data file if there is one
859 can_append = self.docket.uuid is not None
862 can_append = self.docket.uuid is not None
860 packed, meta, append = self._rustmap.write_v2(now, can_append)
863 packed, meta, append = self._rustmap.write_v2(now, can_append)
861 if append:
864 if append:
862 docket = self.docket
865 docket = self.docket
863 data_filename = docket.data_filename()
866 data_filename = docket.data_filename()
864 if tr:
867 if tr:
865 tr.add(data_filename, docket.data_size)
868 tr.add(data_filename, docket.data_size)
866 with self._opener(data_filename, b'r+b') as fp:
869 with self._opener(data_filename, b'r+b') as fp:
867 fp.seek(docket.data_size)
870 fp.seek(docket.data_size)
868 assert fp.tell() == docket.data_size
871 assert fp.tell() == docket.data_size
869 written = fp.write(packed)
872 written = fp.write(packed)
870 if written is not None: # py2 may return None
873 if written is not None: # py2 may return None
871 assert written == len(packed), (written, len(packed))
874 assert written == len(packed), (written, len(packed))
872 docket.data_size += len(packed)
875 docket.data_size += len(packed)
873 docket.parents = self.parents()
876 docket.parents = self.parents()
874 docket.tree_metadata = meta
877 docket.tree_metadata = meta
875 st.write(docket.serialize())
878 st.write(docket.serialize())
876 st.close()
879 st.close()
877 else:
880 else:
878 old_docket = self.docket
881 old_docket = self.docket
879 new_docket = docketmod.DirstateDocket.with_new_uuid(
882 new_docket = docketmod.DirstateDocket.with_new_uuid(
880 self.parents(), len(packed), meta
883 self.parents(), len(packed), meta
881 )
884 )
882 data_filename = new_docket.data_filename()
885 data_filename = new_docket.data_filename()
883 if tr:
886 if tr:
884 tr.add(data_filename, 0)
887 tr.add(data_filename, 0)
885 self._opener.write(data_filename, packed)
888 self._opener.write(data_filename, packed)
886 # Write the new docket after the new data file has been
889 # Write the new docket after the new data file has been
887 # written. Because `st` was opened with `atomictemp=True`,
890 # written. Because `st` was opened with `atomictemp=True`,
888 # the actual `.hg/dirstate` file is only affected on close.
891 # the actual `.hg/dirstate` file is only affected on close.
889 st.write(new_docket.serialize())
892 st.write(new_docket.serialize())
890 st.close()
893 st.close()
891 # Remove the old data file after the new docket pointing to
894 # Remove the old data file after the new docket pointing to
892 # the new data file was written.
895 # the new data file was written.
893 if old_docket.uuid:
896 if old_docket.uuid:
894 data_filename = old_docket.data_filename()
897 data_filename = old_docket.data_filename()
895 unlink = lambda _tr=None: self._opener.unlink(data_filename)
898 unlink = lambda _tr=None: self._opener.unlink(data_filename)
896 if tr:
899 if tr:
897 category = b"dirstate-v2-clean-" + old_docket.uuid
900 category = b"dirstate-v2-clean-" + old_docket.uuid
898 tr.addpostclose(category, unlink)
901 tr.addpostclose(category, unlink)
899 else:
902 else:
900 unlink()
903 unlink()
901 self._docket = new_docket
904 self._docket = new_docket
902 # Reload from the newly-written file
905 # Reload from the newly-written file
903 util.clearcachedproperty(self, b"_rustmap")
906 util.clearcachedproperty(self, b"_rustmap")
904 self._dirtyparents = False
907 self._dirtyparents = False
905
908
906 @propertycache
909 @propertycache
907 def filefoldmap(self):
910 def filefoldmap(self):
908 """Returns a dictionary mapping normalized case paths to their
911 """Returns a dictionary mapping normalized case paths to their
909 non-normalized versions.
912 non-normalized versions.
910 """
913 """
911 return self._rustmap.filefoldmapasdict()
914 return self._rustmap.filefoldmapasdict()
912
915
913 def hastrackeddir(self, d):
916 def hastrackeddir(self, d):
914 return self._rustmap.hastrackeddir(d)
917 return self._rustmap.hastrackeddir(d)
915
918
916 def hasdir(self, d):
919 def hasdir(self, d):
917 return self._rustmap.hasdir(d)
920 return self._rustmap.hasdir(d)
918
921
919 @propertycache
922 @propertycache
920 def identity(self):
923 def identity(self):
921 self._rustmap
924 self._rustmap
922 return self.identity
925 return self.identity
923
926
924 @property
927 @property
925 def nonnormalset(self):
928 def nonnormalset(self):
926 nonnorm = self._rustmap.non_normal_entries()
929 nonnorm = self._rustmap.non_normal_entries()
927 return nonnorm
930 return nonnorm
928
931
929 @propertycache
932 @propertycache
930 def otherparentset(self):
933 def otherparentset(self):
931 otherparents = self._rustmap.other_parent_entries()
934 otherparents = self._rustmap.other_parent_entries()
932 return otherparents
935 return otherparents
933
936
934 def non_normal_or_other_parent_paths(self):
937 def non_normal_or_other_parent_paths(self):
935 return self._rustmap.non_normal_or_other_parent_paths()
938 return self._rustmap.non_normal_or_other_parent_paths()
936
939
937 @propertycache
940 @propertycache
938 def dirfoldmap(self):
941 def dirfoldmap(self):
939 f = {}
942 f = {}
940 normcase = util.normcase
943 normcase = util.normcase
941 for name in self._rustmap.tracked_dirs():
944 for name in self._rustmap.tracked_dirs():
942 f[normcase(name)] = name
945 f[normcase(name)] = name
943 return f
946 return f
944
947
945 def set_possibly_dirty(self, filename):
948 def set_possibly_dirty(self, filename):
946 """record that the current state of the file on disk is unknown"""
949 """record that the current state of the file on disk is unknown"""
947 entry = self[filename]
950 entry = self[filename]
948 entry.set_possibly_dirty()
951 entry.set_possibly_dirty()
949 self._rustmap.set_v1(filename, entry)
952 self._rustmap.set_v1(filename, entry)
950
953
951 def set_clean(self, filename, mode, size, mtime):
954 def set_clean(self, filename, mode, size, mtime):
952 """mark a file as back to a clean state"""
955 """mark a file as back to a clean state"""
953 entry = self[filename]
956 entry = self[filename]
954 mtime = mtime & rangemask
957 mtime = mtime & rangemask
955 size = size & rangemask
958 size = size & rangemask
956 entry.set_clean(mode, size, mtime)
959 entry.set_clean(mode, size, mtime)
957 self._rustmap.set_v1(filename, entry)
960 self._rustmap.set_v1(filename, entry)
958 self._rustmap.copymap().pop(filename, None)
961 self._rustmap.copymap().pop(filename, None)
959
962
960 def __setitem__(self, key, value):
963 def __setitem__(self, key, value):
961 assert isinstance(value, DirstateItem)
964 assert isinstance(value, DirstateItem)
962 self._rustmap.set_v1(key, value)
965 self._rustmap.set_v1(key, value)
@@ -1,115 +1,72 b''
1 // dirstate.rs
1 // dirstate.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate` module provided by the
8 //! Bindings for the `hg::dirstate` module provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10 //!
10 //!
11 //! From Python, this will be seen as `mercurial.rustext.dirstate`
11 //! From Python, this will be seen as `mercurial.rustext.dirstate`
12 mod copymap;
12 mod copymap;
13 mod dirs_multiset;
13 mod dirs_multiset;
14 mod dirstate_map;
14 mod dirstate_map;
15 mod item;
15 mod item;
16 mod non_normal_entries;
16 mod non_normal_entries;
17 mod status;
17 mod status;
18 use self::item::DirstateItem;
18 use self::item::DirstateItem;
19 use crate::{
19 use crate::{
20 dirstate::{
20 dirstate::{
21 dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper,
21 dirs_multiset::Dirs, dirstate_map::DirstateMap, status::status_wrapper,
22 },
22 },
23 exceptions,
23 exceptions,
24 };
24 };
25 use cpython::{
25 use cpython::{PyBytes, PyDict, PyList, PyModule, PyObject, PyResult, Python};
26 PyBytes, PyDict, PyErr, PyList, PyModule, PyObject, PyResult, Python,
27 };
28 use hg::dirstate_tree::on_disk::V2_FORMAT_MARKER;
26 use hg::dirstate_tree::on_disk::V2_FORMAT_MARKER;
29 use hg::DirstateEntry;
30 use libc::{c_char, c_int};
31
32 // C code uses a custom `dirstate_tuple` type, checks in multiple instances
33 // for this type, and raises a Python `Exception` if the check does not pass.
34 // Because this type differs only in name from the regular Python tuple, it
35 // would be a good idea in the near future to remove it entirely to allow
36 // for a pure Python tuple of the same effective structure to be used,
37 // rendering this type and the capsule below useless.
38 py_capsule_fn!(
39 from mercurial.cext.parsers import make_dirstate_item_CAPI
40 as make_dirstate_item_capi
41 signature (
42 state: c_char,
43 mode: c_int,
44 size: c_int,
45 mtime: c_int,
46 ) -> *mut RawPyObject
47 );
48
49 pub fn make_dirstate_item(
50 py: Python,
51 entry: &DirstateEntry,
52 ) -> PyResult<PyObject> {
53 // Explicitly go through u8 first, then cast to platform-specific `c_char`
54 // because Into<u8> has a specific implementation while `as c_char` would
55 // just do a naive enum cast.
56 let state_code: u8 = entry.state().into();
57
58 let make = make_dirstate_item_capi::retrieve(py)?;
59 let maybe_obj = unsafe {
60 let ptr = make(
61 state_code as c_char,
62 entry.mode(),
63 entry.size(),
64 entry.mtime(),
65 );
66 PyObject::from_owned_ptr_opt(py, ptr)
67 };
68 maybe_obj.ok_or_else(|| PyErr::fetch(py))
69 }
70
27
71 /// Create the module, with `__package__` given from parent
28 /// Create the module, with `__package__` given from parent
72 pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
29 pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
73 let dotted_name = &format!("{}.dirstate", package);
30 let dotted_name = &format!("{}.dirstate", package);
74 let m = PyModule::new(py, dotted_name)?;
31 let m = PyModule::new(py, dotted_name)?;
75
32
76 env_logger::init();
33 env_logger::init();
77
34
78 m.add(py, "__package__", package)?;
35 m.add(py, "__package__", package)?;
79 m.add(py, "__doc__", "Dirstate - Rust implementation")?;
36 m.add(py, "__doc__", "Dirstate - Rust implementation")?;
80
37
81 m.add(
38 m.add(
82 py,
39 py,
83 "FallbackError",
40 "FallbackError",
84 py.get_type::<exceptions::FallbackError>(),
41 py.get_type::<exceptions::FallbackError>(),
85 )?;
42 )?;
86 m.add_class::<Dirs>(py)?;
43 m.add_class::<Dirs>(py)?;
87 m.add_class::<DirstateMap>(py)?;
44 m.add_class::<DirstateMap>(py)?;
88 m.add_class::<DirstateItem>(py)?;
45 m.add_class::<DirstateItem>(py)?;
89 m.add(py, "V2_FORMAT_MARKER", PyBytes::new(py, V2_FORMAT_MARKER))?;
46 m.add(py, "V2_FORMAT_MARKER", PyBytes::new(py, V2_FORMAT_MARKER))?;
90 m.add(
47 m.add(
91 py,
48 py,
92 "status",
49 "status",
93 py_fn!(
50 py_fn!(
94 py,
51 py,
95 status_wrapper(
52 status_wrapper(
96 dmap: DirstateMap,
53 dmap: DirstateMap,
97 root_dir: PyObject,
54 root_dir: PyObject,
98 matcher: PyObject,
55 matcher: PyObject,
99 ignorefiles: PyList,
56 ignorefiles: PyList,
100 check_exec: bool,
57 check_exec: bool,
101 last_normal_time: i64,
58 last_normal_time: i64,
102 list_clean: bool,
59 list_clean: bool,
103 list_ignored: bool,
60 list_ignored: bool,
104 list_unknown: bool,
61 list_unknown: bool,
105 collect_traversed_dirs: bool
62 collect_traversed_dirs: bool
106 )
63 )
107 ),
64 ),
108 )?;
65 )?;
109
66
110 let sys = PyModule::import(py, "sys")?;
67 let sys = PyModule::import(py, "sys")?;
111 let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
68 let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
112 sys_modules.set_item(py, dotted_name, &m)?;
69 sys_modules.set_item(py, dotted_name, &m)?;
113
70
114 Ok(m)
71 Ok(m)
115 }
72 }
@@ -1,675 +1,675 b''
1 // dirstate_map.rs
1 // dirstate_map.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
8 //! Bindings for the `hg::dirstate::dirstate_map` file provided by the
9 //! `hg-core` package.
9 //! `hg-core` package.
10
10
11 use std::cell::{RefCell, RefMut};
11 use std::cell::{RefCell, RefMut};
12 use std::convert::TryInto;
12 use std::convert::TryInto;
13
13
14 use cpython::{
14 use cpython::{
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
15 exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
16 PyObject, PyResult, PySet, PyString, Python, PythonObject, ToPyObject,
17 UnsafePyLeaked,
17 UnsafePyLeaked,
18 };
18 };
19
19
20 use crate::{
20 use crate::{
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
21 dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator},
22 dirstate::make_dirstate_item,
22 dirstate::item::DirstateItem,
23 dirstate::non_normal_entries::{
23 dirstate::non_normal_entries::{
24 NonNormalEntries, NonNormalEntriesIterator,
24 NonNormalEntries, NonNormalEntriesIterator,
25 },
25 },
26 pybytes_deref::PyBytesDeref,
26 pybytes_deref::PyBytesDeref,
27 };
27 };
28 use hg::{
28 use hg::{
29 dirstate::parsers::Timestamp,
29 dirstate::parsers::Timestamp,
30 dirstate::MTIME_UNSET,
30 dirstate::MTIME_UNSET,
31 dirstate::SIZE_NON_NORMAL,
31 dirstate::SIZE_NON_NORMAL,
32 dirstate_tree::dirstate_map::DirstateMap as TreeDirstateMap,
32 dirstate_tree::dirstate_map::DirstateMap as TreeDirstateMap,
33 dirstate_tree::dispatch::DirstateMapMethods,
33 dirstate_tree::dispatch::DirstateMapMethods,
34 dirstate_tree::on_disk::DirstateV2ParseError,
34 dirstate_tree::on_disk::DirstateV2ParseError,
35 dirstate_tree::owning::OwningDirstateMap,
35 dirstate_tree::owning::OwningDirstateMap,
36 revlog::Node,
36 revlog::Node,
37 utils::files::normalize_case,
37 utils::files::normalize_case,
38 utils::hg_path::{HgPath, HgPathBuf},
38 utils::hg_path::{HgPath, HgPathBuf},
39 DirstateEntry, DirstateError, DirstateMap as RustDirstateMap,
39 DirstateEntry, DirstateError, DirstateMap as RustDirstateMap,
40 DirstateParents, EntryState, StateMapIter,
40 DirstateParents, EntryState, StateMapIter,
41 };
41 };
42
42
43 // TODO
43 // TODO
44 // This object needs to share references to multiple members of its Rust
44 // This object needs to share references to multiple members of its Rust
45 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
45 // inner struct, namely `copy_map`, `dirs` and `all_dirs`.
46 // Right now `CopyMap` is done, but it needs to have an explicit reference
46 // Right now `CopyMap` is done, but it needs to have an explicit reference
47 // to `RustDirstateMap` which itself needs to have an encapsulation for
47 // to `RustDirstateMap` which itself needs to have an encapsulation for
48 // every method in `CopyMap` (copymapcopy, etc.).
48 // every method in `CopyMap` (copymapcopy, etc.).
49 // This is ugly and hard to maintain.
49 // This is ugly and hard to maintain.
50 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
50 // The same logic applies to `dirs` and `all_dirs`, however the `Dirs`
51 // `py_class!` is already implemented and does not mention
51 // `py_class!` is already implemented and does not mention
52 // `RustDirstateMap`, rightfully so.
52 // `RustDirstateMap`, rightfully so.
53 // All attributes also have to have a separate refcount data attribute for
53 // All attributes also have to have a separate refcount data attribute for
54 // leaks, with all methods that go along for reference sharing.
54 // leaks, with all methods that go along for reference sharing.
55 py_class!(pub class DirstateMap |py| {
55 py_class!(pub class DirstateMap |py| {
56 @shared data inner: Box<dyn DirstateMapMethods + Send>;
56 @shared data inner: Box<dyn DirstateMapMethods + Send>;
57
57
58 /// Returns a `(dirstate_map, parents)` tuple
58 /// Returns a `(dirstate_map, parents)` tuple
59 @staticmethod
59 @staticmethod
60 def new_v1(
60 def new_v1(
61 use_dirstate_tree: bool,
61 use_dirstate_tree: bool,
62 on_disk: PyBytes,
62 on_disk: PyBytes,
63 ) -> PyResult<PyObject> {
63 ) -> PyResult<PyObject> {
64 let (inner, parents) = if use_dirstate_tree {
64 let (inner, parents) = if use_dirstate_tree {
65 let on_disk = PyBytesDeref::new(py, on_disk);
65 let on_disk = PyBytesDeref::new(py, on_disk);
66 let mut map = OwningDirstateMap::new_empty(on_disk);
66 let mut map = OwningDirstateMap::new_empty(on_disk);
67 let (on_disk, map_placeholder) = map.get_mut_pair();
67 let (on_disk, map_placeholder) = map.get_mut_pair();
68
68
69 let (actual_map, parents) = TreeDirstateMap::new_v1(on_disk)
69 let (actual_map, parents) = TreeDirstateMap::new_v1(on_disk)
70 .map_err(|e| dirstate_error(py, e))?;
70 .map_err(|e| dirstate_error(py, e))?;
71 *map_placeholder = actual_map;
71 *map_placeholder = actual_map;
72 (Box::new(map) as _, parents)
72 (Box::new(map) as _, parents)
73 } else {
73 } else {
74 let bytes = on_disk.data(py);
74 let bytes = on_disk.data(py);
75 let mut map = RustDirstateMap::default();
75 let mut map = RustDirstateMap::default();
76 let parents = map.read(bytes).map_err(|e| dirstate_error(py, e))?;
76 let parents = map.read(bytes).map_err(|e| dirstate_error(py, e))?;
77 (Box::new(map) as _, parents)
77 (Box::new(map) as _, parents)
78 };
78 };
79 let map = Self::create_instance(py, inner)?;
79 let map = Self::create_instance(py, inner)?;
80 let parents = parents.map(|p| {
80 let parents = parents.map(|p| {
81 let p1 = PyBytes::new(py, p.p1.as_bytes());
81 let p1 = PyBytes::new(py, p.p1.as_bytes());
82 let p2 = PyBytes::new(py, p.p2.as_bytes());
82 let p2 = PyBytes::new(py, p.p2.as_bytes());
83 (p1, p2)
83 (p1, p2)
84 });
84 });
85 Ok((map, parents).to_py_object(py).into_object())
85 Ok((map, parents).to_py_object(py).into_object())
86 }
86 }
87
87
88 /// Returns a DirstateMap
88 /// Returns a DirstateMap
89 @staticmethod
89 @staticmethod
90 def new_v2(
90 def new_v2(
91 on_disk: PyBytes,
91 on_disk: PyBytes,
92 data_size: usize,
92 data_size: usize,
93 tree_metadata: PyBytes,
93 tree_metadata: PyBytes,
94 ) -> PyResult<PyObject> {
94 ) -> PyResult<PyObject> {
95 let dirstate_error = |e: DirstateError| {
95 let dirstate_error = |e: DirstateError| {
96 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
96 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
97 };
97 };
98 let on_disk = PyBytesDeref::new(py, on_disk);
98 let on_disk = PyBytesDeref::new(py, on_disk);
99 let mut map = OwningDirstateMap::new_empty(on_disk);
99 let mut map = OwningDirstateMap::new_empty(on_disk);
100 let (on_disk, map_placeholder) = map.get_mut_pair();
100 let (on_disk, map_placeholder) = map.get_mut_pair();
101 *map_placeholder = TreeDirstateMap::new_v2(
101 *map_placeholder = TreeDirstateMap::new_v2(
102 on_disk, data_size, tree_metadata.data(py),
102 on_disk, data_size, tree_metadata.data(py),
103 ).map_err(dirstate_error)?;
103 ).map_err(dirstate_error)?;
104 let map = Self::create_instance(py, Box::new(map))?;
104 let map = Self::create_instance(py, Box::new(map))?;
105 Ok(map.into_object())
105 Ok(map.into_object())
106 }
106 }
107
107
108 def clear(&self) -> PyResult<PyObject> {
108 def clear(&self) -> PyResult<PyObject> {
109 self.inner(py).borrow_mut().clear();
109 self.inner(py).borrow_mut().clear();
110 Ok(py.None())
110 Ok(py.None())
111 }
111 }
112
112
113 def get(
113 def get(
114 &self,
114 &self,
115 key: PyObject,
115 key: PyObject,
116 default: Option<PyObject> = None
116 default: Option<PyObject> = None
117 ) -> PyResult<Option<PyObject>> {
117 ) -> PyResult<Option<PyObject>> {
118 let key = key.extract::<PyBytes>(py)?;
118 let key = key.extract::<PyBytes>(py)?;
119 match self
119 match self
120 .inner(py)
120 .inner(py)
121 .borrow()
121 .borrow()
122 .get(HgPath::new(key.data(py)))
122 .get(HgPath::new(key.data(py)))
123 .map_err(|e| v2_error(py, e))?
123 .map_err(|e| v2_error(py, e))?
124 {
124 {
125 Some(entry) => {
125 Some(entry) => {
126 Ok(Some(make_dirstate_item(py, &entry)?))
126 Ok(Some(DirstateItem::new_as_pyobject(py, entry)?))
127 },
127 },
128 None => Ok(default)
128 None => Ok(default)
129 }
129 }
130 }
130 }
131
131
132 def set_v1(&self, path: PyObject, item: PyObject) -> PyResult<PyObject> {
132 def set_v1(&self, path: PyObject, item: PyObject) -> PyResult<PyObject> {
133 let f = path.extract::<PyBytes>(py)?;
133 let f = path.extract::<PyBytes>(py)?;
134 let filename = HgPath::new(f.data(py));
134 let filename = HgPath::new(f.data(py));
135 let state = item.getattr(py, "state")?.extract::<PyBytes>(py)?;
135 let state = item.getattr(py, "state")?.extract::<PyBytes>(py)?;
136 let state = state.data(py)[0];
136 let state = state.data(py)[0];
137 let entry = DirstateEntry::from_v1_data(
137 let entry = DirstateEntry::from_v1_data(
138 state.try_into().expect("state is always valid"),
138 state.try_into().expect("state is always valid"),
139 item.getattr(py, "mode")?.extract(py)?,
139 item.getattr(py, "mode")?.extract(py)?,
140 item.getattr(py, "size")?.extract(py)?,
140 item.getattr(py, "size")?.extract(py)?,
141 item.getattr(py, "mtime")?.extract(py)?,
141 item.getattr(py, "mtime")?.extract(py)?,
142 );
142 );
143 self.inner(py).borrow_mut().set_v1(filename, entry);
143 self.inner(py).borrow_mut().set_v1(filename, entry);
144 Ok(py.None())
144 Ok(py.None())
145 }
145 }
146
146
147 def addfile(
147 def addfile(
148 &self,
148 &self,
149 f: PyObject,
149 f: PyObject,
150 mode: PyObject,
150 mode: PyObject,
151 size: PyObject,
151 size: PyObject,
152 mtime: PyObject,
152 mtime: PyObject,
153 added: PyObject,
153 added: PyObject,
154 merged: PyObject,
154 merged: PyObject,
155 from_p2: PyObject,
155 from_p2: PyObject,
156 possibly_dirty: PyObject,
156 possibly_dirty: PyObject,
157 ) -> PyResult<PyObject> {
157 ) -> PyResult<PyObject> {
158 let f = f.extract::<PyBytes>(py)?;
158 let f = f.extract::<PyBytes>(py)?;
159 let filename = HgPath::new(f.data(py));
159 let filename = HgPath::new(f.data(py));
160 let mode = if mode.is_none(py) {
160 let mode = if mode.is_none(py) {
161 // fallback default value
161 // fallback default value
162 0
162 0
163 } else {
163 } else {
164 mode.extract(py)?
164 mode.extract(py)?
165 };
165 };
166 let size = if size.is_none(py) {
166 let size = if size.is_none(py) {
167 // fallback default value
167 // fallback default value
168 SIZE_NON_NORMAL
168 SIZE_NON_NORMAL
169 } else {
169 } else {
170 size.extract(py)?
170 size.extract(py)?
171 };
171 };
172 let mtime = if mtime.is_none(py) {
172 let mtime = if mtime.is_none(py) {
173 // fallback default value
173 // fallback default value
174 MTIME_UNSET
174 MTIME_UNSET
175 } else {
175 } else {
176 mtime.extract(py)?
176 mtime.extract(py)?
177 };
177 };
178 let entry = DirstateEntry::new_for_add_file(mode, size, mtime);
178 let entry = DirstateEntry::new_for_add_file(mode, size, mtime);
179 let added = added.extract::<PyBool>(py)?.is_true();
179 let added = added.extract::<PyBool>(py)?.is_true();
180 let merged = merged.extract::<PyBool>(py)?.is_true();
180 let merged = merged.extract::<PyBool>(py)?.is_true();
181 let from_p2 = from_p2.extract::<PyBool>(py)?.is_true();
181 let from_p2 = from_p2.extract::<PyBool>(py)?.is_true();
182 let possibly_dirty = possibly_dirty.extract::<PyBool>(py)?.is_true();
182 let possibly_dirty = possibly_dirty.extract::<PyBool>(py)?.is_true();
183 self.inner(py).borrow_mut().add_file(
183 self.inner(py).borrow_mut().add_file(
184 filename,
184 filename,
185 entry,
185 entry,
186 added,
186 added,
187 merged,
187 merged,
188 from_p2,
188 from_p2,
189 possibly_dirty
189 possibly_dirty
190 ).and(Ok(py.None())).or_else(|e: DirstateError| {
190 ).and(Ok(py.None())).or_else(|e: DirstateError| {
191 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
191 Err(PyErr::new::<exc::ValueError, _>(py, e.to_string()))
192 })
192 })
193 }
193 }
194
194
195 def removefile(
195 def removefile(
196 &self,
196 &self,
197 f: PyObject,
197 f: PyObject,
198 in_merge: PyObject
198 in_merge: PyObject
199 ) -> PyResult<PyObject> {
199 ) -> PyResult<PyObject> {
200 self.inner(py).borrow_mut()
200 self.inner(py).borrow_mut()
201 .remove_file(
201 .remove_file(
202 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
202 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
203 in_merge.extract::<PyBool>(py)?.is_true(),
203 in_merge.extract::<PyBool>(py)?.is_true(),
204 )
204 )
205 .or_else(|_| {
205 .or_else(|_| {
206 Err(PyErr::new::<exc::OSError, _>(
206 Err(PyErr::new::<exc::OSError, _>(
207 py,
207 py,
208 "Dirstate error".to_string(),
208 "Dirstate error".to_string(),
209 ))
209 ))
210 })?;
210 })?;
211 Ok(py.None())
211 Ok(py.None())
212 }
212 }
213
213
214 def dropfile(
214 def dropfile(
215 &self,
215 &self,
216 f: PyObject,
216 f: PyObject,
217 ) -> PyResult<PyBool> {
217 ) -> PyResult<PyBool> {
218 self.inner(py).borrow_mut()
218 self.inner(py).borrow_mut()
219 .drop_file(
219 .drop_file(
220 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
220 HgPath::new(f.extract::<PyBytes>(py)?.data(py)),
221 )
221 )
222 .and_then(|b| Ok(b.to_py_object(py)))
222 .and_then(|b| Ok(b.to_py_object(py)))
223 .or_else(|e| {
223 .or_else(|e| {
224 Err(PyErr::new::<exc::OSError, _>(
224 Err(PyErr::new::<exc::OSError, _>(
225 py,
225 py,
226 format!("Dirstate error: {}", e.to_string()),
226 format!("Dirstate error: {}", e.to_string()),
227 ))
227 ))
228 })
228 })
229 }
229 }
230
230
231 def clearambiguoustimes(
231 def clearambiguoustimes(
232 &self,
232 &self,
233 files: PyObject,
233 files: PyObject,
234 now: PyObject
234 now: PyObject
235 ) -> PyResult<PyObject> {
235 ) -> PyResult<PyObject> {
236 let files: PyResult<Vec<HgPathBuf>> = files
236 let files: PyResult<Vec<HgPathBuf>> = files
237 .iter(py)?
237 .iter(py)?
238 .map(|filename| {
238 .map(|filename| {
239 Ok(HgPathBuf::from_bytes(
239 Ok(HgPathBuf::from_bytes(
240 filename?.extract::<PyBytes>(py)?.data(py),
240 filename?.extract::<PyBytes>(py)?.data(py),
241 ))
241 ))
242 })
242 })
243 .collect();
243 .collect();
244 self.inner(py)
244 self.inner(py)
245 .borrow_mut()
245 .borrow_mut()
246 .clear_ambiguous_times(files?, now.extract(py)?)
246 .clear_ambiguous_times(files?, now.extract(py)?)
247 .map_err(|e| v2_error(py, e))?;
247 .map_err(|e| v2_error(py, e))?;
248 Ok(py.None())
248 Ok(py.None())
249 }
249 }
250
250
251 def other_parent_entries(&self) -> PyResult<PyObject> {
251 def other_parent_entries(&self) -> PyResult<PyObject> {
252 let mut inner_shared = self.inner(py).borrow_mut();
252 let mut inner_shared = self.inner(py).borrow_mut();
253 let set = PySet::empty(py)?;
253 let set = PySet::empty(py)?;
254 for path in inner_shared.iter_other_parent_paths() {
254 for path in inner_shared.iter_other_parent_paths() {
255 let path = path.map_err(|e| v2_error(py, e))?;
255 let path = path.map_err(|e| v2_error(py, e))?;
256 set.add(py, PyBytes::new(py, path.as_bytes()))?;
256 set.add(py, PyBytes::new(py, path.as_bytes()))?;
257 }
257 }
258 Ok(set.into_object())
258 Ok(set.into_object())
259 }
259 }
260
260
261 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
261 def non_normal_entries(&self) -> PyResult<NonNormalEntries> {
262 NonNormalEntries::from_inner(py, self.clone_ref(py))
262 NonNormalEntries::from_inner(py, self.clone_ref(py))
263 }
263 }
264
264
265 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
265 def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> {
266 let key = key.extract::<PyBytes>(py)?;
266 let key = key.extract::<PyBytes>(py)?;
267 self.inner(py)
267 self.inner(py)
268 .borrow_mut()
268 .borrow_mut()
269 .non_normal_entries_contains(HgPath::new(key.data(py)))
269 .non_normal_entries_contains(HgPath::new(key.data(py)))
270 .map_err(|e| v2_error(py, e))
270 .map_err(|e| v2_error(py, e))
271 }
271 }
272
272
273 def non_normal_entries_display(&self) -> PyResult<PyString> {
273 def non_normal_entries_display(&self) -> PyResult<PyString> {
274 let mut inner = self.inner(py).borrow_mut();
274 let mut inner = self.inner(py).borrow_mut();
275 let paths = inner
275 let paths = inner
276 .iter_non_normal_paths()
276 .iter_non_normal_paths()
277 .collect::<Result<Vec<_>, _>>()
277 .collect::<Result<Vec<_>, _>>()
278 .map_err(|e| v2_error(py, e))?;
278 .map_err(|e| v2_error(py, e))?;
279 let formatted = format!("NonNormalEntries: {}", hg::utils::join_display(paths, ", "));
279 let formatted = format!("NonNormalEntries: {}", hg::utils::join_display(paths, ", "));
280 Ok(PyString::new(py, &formatted))
280 Ok(PyString::new(py, &formatted))
281 }
281 }
282
282
283 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
283 def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> {
284 let key = key.extract::<PyBytes>(py)?;
284 let key = key.extract::<PyBytes>(py)?;
285 let key = key.data(py);
285 let key = key.data(py);
286 let was_present = self
286 let was_present = self
287 .inner(py)
287 .inner(py)
288 .borrow_mut()
288 .borrow_mut()
289 .non_normal_entries_remove(HgPath::new(key));
289 .non_normal_entries_remove(HgPath::new(key));
290 if !was_present {
290 if !was_present {
291 let msg = String::from_utf8_lossy(key);
291 let msg = String::from_utf8_lossy(key);
292 Err(PyErr::new::<exc::KeyError, _>(py, msg))
292 Err(PyErr::new::<exc::KeyError, _>(py, msg))
293 } else {
293 } else {
294 Ok(py.None())
294 Ok(py.None())
295 }
295 }
296 }
296 }
297
297
298 def non_normal_entries_discard(&self, key: PyObject) -> PyResult<PyObject>
298 def non_normal_entries_discard(&self, key: PyObject) -> PyResult<PyObject>
299 {
299 {
300 let key = key.extract::<PyBytes>(py)?;
300 let key = key.extract::<PyBytes>(py)?;
301 self
301 self
302 .inner(py)
302 .inner(py)
303 .borrow_mut()
303 .borrow_mut()
304 .non_normal_entries_remove(HgPath::new(key.data(py)));
304 .non_normal_entries_remove(HgPath::new(key.data(py)));
305 Ok(py.None())
305 Ok(py.None())
306 }
306 }
307
307
308 def non_normal_entries_add(&self, key: PyObject) -> PyResult<PyObject> {
308 def non_normal_entries_add(&self, key: PyObject) -> PyResult<PyObject> {
309 let key = key.extract::<PyBytes>(py)?;
309 let key = key.extract::<PyBytes>(py)?;
310 self
310 self
311 .inner(py)
311 .inner(py)
312 .borrow_mut()
312 .borrow_mut()
313 .non_normal_entries_add(HgPath::new(key.data(py)));
313 .non_normal_entries_add(HgPath::new(key.data(py)));
314 Ok(py.None())
314 Ok(py.None())
315 }
315 }
316
316
317 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
317 def non_normal_or_other_parent_paths(&self) -> PyResult<PyList> {
318 let mut inner = self.inner(py).borrow_mut();
318 let mut inner = self.inner(py).borrow_mut();
319
319
320 let ret = PyList::new(py, &[]);
320 let ret = PyList::new(py, &[]);
321 for filename in inner.non_normal_or_other_parent_paths() {
321 for filename in inner.non_normal_or_other_parent_paths() {
322 let filename = filename.map_err(|e| v2_error(py, e))?;
322 let filename = filename.map_err(|e| v2_error(py, e))?;
323 let as_pystring = PyBytes::new(py, filename.as_bytes());
323 let as_pystring = PyBytes::new(py, filename.as_bytes());
324 ret.append(py, as_pystring.into_object());
324 ret.append(py, as_pystring.into_object());
325 }
325 }
326 Ok(ret)
326 Ok(ret)
327 }
327 }
328
328
329 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
329 def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> {
330 // Make sure the sets are defined before we no longer have a mutable
330 // Make sure the sets are defined before we no longer have a mutable
331 // reference to the dmap.
331 // reference to the dmap.
332 self.inner(py)
332 self.inner(py)
333 .borrow_mut()
333 .borrow_mut()
334 .set_non_normal_other_parent_entries(false);
334 .set_non_normal_other_parent_entries(false);
335
335
336 let leaked_ref = self.inner(py).leak_immutable();
336 let leaked_ref = self.inner(py).leak_immutable();
337
337
338 NonNormalEntriesIterator::from_inner(py, unsafe {
338 NonNormalEntriesIterator::from_inner(py, unsafe {
339 leaked_ref.map(py, |o| {
339 leaked_ref.map(py, |o| {
340 o.iter_non_normal_paths_panic()
340 o.iter_non_normal_paths_panic()
341 })
341 })
342 })
342 })
343 }
343 }
344
344
345 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
345 def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> {
346 let d = d.extract::<PyBytes>(py)?;
346 let d = d.extract::<PyBytes>(py)?;
347 Ok(self.inner(py).borrow_mut()
347 Ok(self.inner(py).borrow_mut()
348 .has_tracked_dir(HgPath::new(d.data(py)))
348 .has_tracked_dir(HgPath::new(d.data(py)))
349 .map_err(|e| {
349 .map_err(|e| {
350 PyErr::new::<exc::ValueError, _>(py, e.to_string())
350 PyErr::new::<exc::ValueError, _>(py, e.to_string())
351 })?
351 })?
352 .to_py_object(py))
352 .to_py_object(py))
353 }
353 }
354
354
355 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
355 def hasdir(&self, d: PyObject) -> PyResult<PyBool> {
356 let d = d.extract::<PyBytes>(py)?;
356 let d = d.extract::<PyBytes>(py)?;
357 Ok(self.inner(py).borrow_mut()
357 Ok(self.inner(py).borrow_mut()
358 .has_dir(HgPath::new(d.data(py)))
358 .has_dir(HgPath::new(d.data(py)))
359 .map_err(|e| {
359 .map_err(|e| {
360 PyErr::new::<exc::ValueError, _>(py, e.to_string())
360 PyErr::new::<exc::ValueError, _>(py, e.to_string())
361 })?
361 })?
362 .to_py_object(py))
362 .to_py_object(py))
363 }
363 }
364
364
365 def write_v1(
365 def write_v1(
366 &self,
366 &self,
367 p1: PyObject,
367 p1: PyObject,
368 p2: PyObject,
368 p2: PyObject,
369 now: PyObject
369 now: PyObject
370 ) -> PyResult<PyBytes> {
370 ) -> PyResult<PyBytes> {
371 let now = Timestamp(now.extract(py)?);
371 let now = Timestamp(now.extract(py)?);
372
372
373 let mut inner = self.inner(py).borrow_mut();
373 let mut inner = self.inner(py).borrow_mut();
374 let parents = DirstateParents {
374 let parents = DirstateParents {
375 p1: extract_node_id(py, &p1)?,
375 p1: extract_node_id(py, &p1)?,
376 p2: extract_node_id(py, &p2)?,
376 p2: extract_node_id(py, &p2)?,
377 };
377 };
378 let result = inner.pack_v1(parents, now);
378 let result = inner.pack_v1(parents, now);
379 match result {
379 match result {
380 Ok(packed) => Ok(PyBytes::new(py, &packed)),
380 Ok(packed) => Ok(PyBytes::new(py, &packed)),
381 Err(_) => Err(PyErr::new::<exc::OSError, _>(
381 Err(_) => Err(PyErr::new::<exc::OSError, _>(
382 py,
382 py,
383 "Dirstate error".to_string(),
383 "Dirstate error".to_string(),
384 )),
384 )),
385 }
385 }
386 }
386 }
387
387
388 /// Returns new data together with whether that data should be appended to
388 /// Returns new data together with whether that data should be appended to
389 /// the existing data file whose content is at `self.on_disk` (True),
389 /// the existing data file whose content is at `self.on_disk` (True),
390 /// instead of written to a new data file (False).
390 /// instead of written to a new data file (False).
391 def write_v2(
391 def write_v2(
392 &self,
392 &self,
393 now: PyObject,
393 now: PyObject,
394 can_append: bool,
394 can_append: bool,
395 ) -> PyResult<PyObject> {
395 ) -> PyResult<PyObject> {
396 let now = Timestamp(now.extract(py)?);
396 let now = Timestamp(now.extract(py)?);
397
397
398 let mut inner = self.inner(py).borrow_mut();
398 let mut inner = self.inner(py).borrow_mut();
399 let result = inner.pack_v2(now, can_append);
399 let result = inner.pack_v2(now, can_append);
400 match result {
400 match result {
401 Ok((packed, tree_metadata, append)) => {
401 Ok((packed, tree_metadata, append)) => {
402 let packed = PyBytes::new(py, &packed);
402 let packed = PyBytes::new(py, &packed);
403 let tree_metadata = PyBytes::new(py, &tree_metadata);
403 let tree_metadata = PyBytes::new(py, &tree_metadata);
404 let tuple = (packed, tree_metadata, append);
404 let tuple = (packed, tree_metadata, append);
405 Ok(tuple.to_py_object(py).into_object())
405 Ok(tuple.to_py_object(py).into_object())
406 },
406 },
407 Err(_) => Err(PyErr::new::<exc::OSError, _>(
407 Err(_) => Err(PyErr::new::<exc::OSError, _>(
408 py,
408 py,
409 "Dirstate error".to_string(),
409 "Dirstate error".to_string(),
410 )),
410 )),
411 }
411 }
412 }
412 }
413
413
414 def filefoldmapasdict(&self) -> PyResult<PyDict> {
414 def filefoldmapasdict(&self) -> PyResult<PyDict> {
415 let dict = PyDict::new(py);
415 let dict = PyDict::new(py);
416 for item in self.inner(py).borrow_mut().iter() {
416 for item in self.inner(py).borrow_mut().iter() {
417 let (path, entry) = item.map_err(|e| v2_error(py, e))?;
417 let (path, entry) = item.map_err(|e| v2_error(py, e))?;
418 if entry.state() != EntryState::Removed {
418 if entry.state() != EntryState::Removed {
419 let key = normalize_case(path);
419 let key = normalize_case(path);
420 let value = path;
420 let value = path;
421 dict.set_item(
421 dict.set_item(
422 py,
422 py,
423 PyBytes::new(py, key.as_bytes()).into_object(),
423 PyBytes::new(py, key.as_bytes()).into_object(),
424 PyBytes::new(py, value.as_bytes()).into_object(),
424 PyBytes::new(py, value.as_bytes()).into_object(),
425 )?;
425 )?;
426 }
426 }
427 }
427 }
428 Ok(dict)
428 Ok(dict)
429 }
429 }
430
430
431 def __len__(&self) -> PyResult<usize> {
431 def __len__(&self) -> PyResult<usize> {
432 Ok(self.inner(py).borrow().len())
432 Ok(self.inner(py).borrow().len())
433 }
433 }
434
434
435 def __contains__(&self, key: PyObject) -> PyResult<bool> {
435 def __contains__(&self, key: PyObject) -> PyResult<bool> {
436 let key = key.extract::<PyBytes>(py)?;
436 let key = key.extract::<PyBytes>(py)?;
437 self.inner(py)
437 self.inner(py)
438 .borrow()
438 .borrow()
439 .contains_key(HgPath::new(key.data(py)))
439 .contains_key(HgPath::new(key.data(py)))
440 .map_err(|e| v2_error(py, e))
440 .map_err(|e| v2_error(py, e))
441 }
441 }
442
442
443 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
443 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
444 let key = key.extract::<PyBytes>(py)?;
444 let key = key.extract::<PyBytes>(py)?;
445 let key = HgPath::new(key.data(py));
445 let key = HgPath::new(key.data(py));
446 match self
446 match self
447 .inner(py)
447 .inner(py)
448 .borrow()
448 .borrow()
449 .get(key)
449 .get(key)
450 .map_err(|e| v2_error(py, e))?
450 .map_err(|e| v2_error(py, e))?
451 {
451 {
452 Some(entry) => {
452 Some(entry) => {
453 Ok(make_dirstate_item(py, &entry)?)
453 Ok(DirstateItem::new_as_pyobject(py, entry)?)
454 },
454 },
455 None => Err(PyErr::new::<exc::KeyError, _>(
455 None => Err(PyErr::new::<exc::KeyError, _>(
456 py,
456 py,
457 String::from_utf8_lossy(key.as_bytes()),
457 String::from_utf8_lossy(key.as_bytes()),
458 )),
458 )),
459 }
459 }
460 }
460 }
461
461
462 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
462 def keys(&self) -> PyResult<DirstateMapKeysIterator> {
463 let leaked_ref = self.inner(py).leak_immutable();
463 let leaked_ref = self.inner(py).leak_immutable();
464 DirstateMapKeysIterator::from_inner(
464 DirstateMapKeysIterator::from_inner(
465 py,
465 py,
466 unsafe { leaked_ref.map(py, |o| o.iter()) },
466 unsafe { leaked_ref.map(py, |o| o.iter()) },
467 )
467 )
468 }
468 }
469
469
470 def items(&self) -> PyResult<DirstateMapItemsIterator> {
470 def items(&self) -> PyResult<DirstateMapItemsIterator> {
471 let leaked_ref = self.inner(py).leak_immutable();
471 let leaked_ref = self.inner(py).leak_immutable();
472 DirstateMapItemsIterator::from_inner(
472 DirstateMapItemsIterator::from_inner(
473 py,
473 py,
474 unsafe { leaked_ref.map(py, |o| o.iter()) },
474 unsafe { leaked_ref.map(py, |o| o.iter()) },
475 )
475 )
476 }
476 }
477
477
478 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
478 def __iter__(&self) -> PyResult<DirstateMapKeysIterator> {
479 let leaked_ref = self.inner(py).leak_immutable();
479 let leaked_ref = self.inner(py).leak_immutable();
480 DirstateMapKeysIterator::from_inner(
480 DirstateMapKeysIterator::from_inner(
481 py,
481 py,
482 unsafe { leaked_ref.map(py, |o| o.iter()) },
482 unsafe { leaked_ref.map(py, |o| o.iter()) },
483 )
483 )
484 }
484 }
485
485
486 // TODO all copymap* methods, see docstring above
486 // TODO all copymap* methods, see docstring above
487 def copymapcopy(&self) -> PyResult<PyDict> {
487 def copymapcopy(&self) -> PyResult<PyDict> {
488 let dict = PyDict::new(py);
488 let dict = PyDict::new(py);
489 for item in self.inner(py).borrow().copy_map_iter() {
489 for item in self.inner(py).borrow().copy_map_iter() {
490 let (key, value) = item.map_err(|e| v2_error(py, e))?;
490 let (key, value) = item.map_err(|e| v2_error(py, e))?;
491 dict.set_item(
491 dict.set_item(
492 py,
492 py,
493 PyBytes::new(py, key.as_bytes()),
493 PyBytes::new(py, key.as_bytes()),
494 PyBytes::new(py, value.as_bytes()),
494 PyBytes::new(py, value.as_bytes()),
495 )?;
495 )?;
496 }
496 }
497 Ok(dict)
497 Ok(dict)
498 }
498 }
499
499
500 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
500 def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> {
501 let key = key.extract::<PyBytes>(py)?;
501 let key = key.extract::<PyBytes>(py)?;
502 match self
502 match self
503 .inner(py)
503 .inner(py)
504 .borrow()
504 .borrow()
505 .copy_map_get(HgPath::new(key.data(py)))
505 .copy_map_get(HgPath::new(key.data(py)))
506 .map_err(|e| v2_error(py, e))?
506 .map_err(|e| v2_error(py, e))?
507 {
507 {
508 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
508 Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())),
509 None => Err(PyErr::new::<exc::KeyError, _>(
509 None => Err(PyErr::new::<exc::KeyError, _>(
510 py,
510 py,
511 String::from_utf8_lossy(key.data(py)),
511 String::from_utf8_lossy(key.data(py)),
512 )),
512 )),
513 }
513 }
514 }
514 }
515 def copymap(&self) -> PyResult<CopyMap> {
515 def copymap(&self) -> PyResult<CopyMap> {
516 CopyMap::from_inner(py, self.clone_ref(py))
516 CopyMap::from_inner(py, self.clone_ref(py))
517 }
517 }
518
518
519 def copymaplen(&self) -> PyResult<usize> {
519 def copymaplen(&self) -> PyResult<usize> {
520 Ok(self.inner(py).borrow().copy_map_len())
520 Ok(self.inner(py).borrow().copy_map_len())
521 }
521 }
522 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
522 def copymapcontains(&self, key: PyObject) -> PyResult<bool> {
523 let key = key.extract::<PyBytes>(py)?;
523 let key = key.extract::<PyBytes>(py)?;
524 self.inner(py)
524 self.inner(py)
525 .borrow()
525 .borrow()
526 .copy_map_contains_key(HgPath::new(key.data(py)))
526 .copy_map_contains_key(HgPath::new(key.data(py)))
527 .map_err(|e| v2_error(py, e))
527 .map_err(|e| v2_error(py, e))
528 }
528 }
529 def copymapget(
529 def copymapget(
530 &self,
530 &self,
531 key: PyObject,
531 key: PyObject,
532 default: Option<PyObject>
532 default: Option<PyObject>
533 ) -> PyResult<Option<PyObject>> {
533 ) -> PyResult<Option<PyObject>> {
534 let key = key.extract::<PyBytes>(py)?;
534 let key = key.extract::<PyBytes>(py)?;
535 match self
535 match self
536 .inner(py)
536 .inner(py)
537 .borrow()
537 .borrow()
538 .copy_map_get(HgPath::new(key.data(py)))
538 .copy_map_get(HgPath::new(key.data(py)))
539 .map_err(|e| v2_error(py, e))?
539 .map_err(|e| v2_error(py, e))?
540 {
540 {
541 Some(copy) => Ok(Some(
541 Some(copy) => Ok(Some(
542 PyBytes::new(py, copy.as_bytes()).into_object(),
542 PyBytes::new(py, copy.as_bytes()).into_object(),
543 )),
543 )),
544 None => Ok(default),
544 None => Ok(default),
545 }
545 }
546 }
546 }
547 def copymapsetitem(
547 def copymapsetitem(
548 &self,
548 &self,
549 key: PyObject,
549 key: PyObject,
550 value: PyObject
550 value: PyObject
551 ) -> PyResult<PyObject> {
551 ) -> PyResult<PyObject> {
552 let key = key.extract::<PyBytes>(py)?;
552 let key = key.extract::<PyBytes>(py)?;
553 let value = value.extract::<PyBytes>(py)?;
553 let value = value.extract::<PyBytes>(py)?;
554 self.inner(py)
554 self.inner(py)
555 .borrow_mut()
555 .borrow_mut()
556 .copy_map_insert(
556 .copy_map_insert(
557 HgPathBuf::from_bytes(key.data(py)),
557 HgPathBuf::from_bytes(key.data(py)),
558 HgPathBuf::from_bytes(value.data(py)),
558 HgPathBuf::from_bytes(value.data(py)),
559 )
559 )
560 .map_err(|e| v2_error(py, e))?;
560 .map_err(|e| v2_error(py, e))?;
561 Ok(py.None())
561 Ok(py.None())
562 }
562 }
563 def copymappop(
563 def copymappop(
564 &self,
564 &self,
565 key: PyObject,
565 key: PyObject,
566 default: Option<PyObject>
566 default: Option<PyObject>
567 ) -> PyResult<Option<PyObject>> {
567 ) -> PyResult<Option<PyObject>> {
568 let key = key.extract::<PyBytes>(py)?;
568 let key = key.extract::<PyBytes>(py)?;
569 match self
569 match self
570 .inner(py)
570 .inner(py)
571 .borrow_mut()
571 .borrow_mut()
572 .copy_map_remove(HgPath::new(key.data(py)))
572 .copy_map_remove(HgPath::new(key.data(py)))
573 .map_err(|e| v2_error(py, e))?
573 .map_err(|e| v2_error(py, e))?
574 {
574 {
575 Some(_) => Ok(None),
575 Some(_) => Ok(None),
576 None => Ok(default),
576 None => Ok(default),
577 }
577 }
578 }
578 }
579
579
580 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
580 def copymapiter(&self) -> PyResult<CopyMapKeysIterator> {
581 let leaked_ref = self.inner(py).leak_immutable();
581 let leaked_ref = self.inner(py).leak_immutable();
582 CopyMapKeysIterator::from_inner(
582 CopyMapKeysIterator::from_inner(
583 py,
583 py,
584 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
584 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
585 )
585 )
586 }
586 }
587
587
588 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
588 def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> {
589 let leaked_ref = self.inner(py).leak_immutable();
589 let leaked_ref = self.inner(py).leak_immutable();
590 CopyMapItemsIterator::from_inner(
590 CopyMapItemsIterator::from_inner(
591 py,
591 py,
592 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
592 unsafe { leaked_ref.map(py, |o| o.copy_map_iter()) },
593 )
593 )
594 }
594 }
595
595
596 def tracked_dirs(&self) -> PyResult<PyList> {
596 def tracked_dirs(&self) -> PyResult<PyList> {
597 let dirs = PyList::new(py, &[]);
597 let dirs = PyList::new(py, &[]);
598 for path in self.inner(py).borrow_mut().iter_tracked_dirs()
598 for path in self.inner(py).borrow_mut().iter_tracked_dirs()
599 .map_err(|e |dirstate_error(py, e))?
599 .map_err(|e |dirstate_error(py, e))?
600 {
600 {
601 let path = path.map_err(|e| v2_error(py, e))?;
601 let path = path.map_err(|e| v2_error(py, e))?;
602 let path = PyBytes::new(py, path.as_bytes());
602 let path = PyBytes::new(py, path.as_bytes());
603 dirs.append(py, path.into_object())
603 dirs.append(py, path.into_object())
604 }
604 }
605 Ok(dirs)
605 Ok(dirs)
606 }
606 }
607
607
608 def debug_iter(&self, all: bool) -> PyResult<PyList> {
608 def debug_iter(&self, all: bool) -> PyResult<PyList> {
609 let dirs = PyList::new(py, &[]);
609 let dirs = PyList::new(py, &[]);
610 for item in self.inner(py).borrow().debug_iter(all) {
610 for item in self.inner(py).borrow().debug_iter(all) {
611 let (path, (state, mode, size, mtime)) =
611 let (path, (state, mode, size, mtime)) =
612 item.map_err(|e| v2_error(py, e))?;
612 item.map_err(|e| v2_error(py, e))?;
613 let path = PyBytes::new(py, path.as_bytes());
613 let path = PyBytes::new(py, path.as_bytes());
614 let item = (path, state, mode, size, mtime);
614 let item = (path, state, mode, size, mtime);
615 dirs.append(py, item.to_py_object(py).into_object())
615 dirs.append(py, item.to_py_object(py).into_object())
616 }
616 }
617 Ok(dirs)
617 Ok(dirs)
618 }
618 }
619 });
619 });
620
620
621 impl DirstateMap {
621 impl DirstateMap {
622 pub fn get_inner_mut<'a>(
622 pub fn get_inner_mut<'a>(
623 &'a self,
623 &'a self,
624 py: Python<'a>,
624 py: Python<'a>,
625 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
625 ) -> RefMut<'a, Box<dyn DirstateMapMethods + Send>> {
626 self.inner(py).borrow_mut()
626 self.inner(py).borrow_mut()
627 }
627 }
628 fn translate_key(
628 fn translate_key(
629 py: Python,
629 py: Python,
630 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
630 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
631 ) -> PyResult<Option<PyBytes>> {
631 ) -> PyResult<Option<PyBytes>> {
632 let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
632 let (f, _entry) = res.map_err(|e| v2_error(py, e))?;
633 Ok(Some(PyBytes::new(py, f.as_bytes())))
633 Ok(Some(PyBytes::new(py, f.as_bytes())))
634 }
634 }
635 fn translate_key_value(
635 fn translate_key_value(
636 py: Python,
636 py: Python,
637 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
637 res: Result<(&HgPath, DirstateEntry), DirstateV2ParseError>,
638 ) -> PyResult<Option<(PyBytes, PyObject)>> {
638 ) -> PyResult<Option<(PyBytes, PyObject)>> {
639 let (f, entry) = res.map_err(|e| v2_error(py, e))?;
639 let (f, entry) = res.map_err(|e| v2_error(py, e))?;
640 Ok(Some((
640 Ok(Some((
641 PyBytes::new(py, f.as_bytes()),
641 PyBytes::new(py, f.as_bytes()),
642 make_dirstate_item(py, &entry)?,
642 DirstateItem::new_as_pyobject(py, entry)?,
643 )))
643 )))
644 }
644 }
645 }
645 }
646
646
647 py_shared_iterator!(
647 py_shared_iterator!(
648 DirstateMapKeysIterator,
648 DirstateMapKeysIterator,
649 UnsafePyLeaked<StateMapIter<'static>>,
649 UnsafePyLeaked<StateMapIter<'static>>,
650 DirstateMap::translate_key,
650 DirstateMap::translate_key,
651 Option<PyBytes>
651 Option<PyBytes>
652 );
652 );
653
653
654 py_shared_iterator!(
654 py_shared_iterator!(
655 DirstateMapItemsIterator,
655 DirstateMapItemsIterator,
656 UnsafePyLeaked<StateMapIter<'static>>,
656 UnsafePyLeaked<StateMapIter<'static>>,
657 DirstateMap::translate_key_value,
657 DirstateMap::translate_key_value,
658 Option<(PyBytes, PyObject)>
658 Option<(PyBytes, PyObject)>
659 );
659 );
660
660
661 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
661 fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> {
662 let bytes = obj.extract::<PyBytes>(py)?;
662 let bytes = obj.extract::<PyBytes>(py)?;
663 match bytes.data(py).try_into() {
663 match bytes.data(py).try_into() {
664 Ok(s) => Ok(s),
664 Ok(s) => Ok(s),
665 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
665 Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())),
666 }
666 }
667 }
667 }
668
668
669 pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
669 pub(super) fn v2_error(py: Python<'_>, _: DirstateV2ParseError) -> PyErr {
670 PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
670 PyErr::new::<exc::ValueError, _>(py, "corrupted dirstate-v2")
671 }
671 }
672
672
673 fn dirstate_error(py: Python<'_>, e: DirstateError) -> PyErr {
673 fn dirstate_error(py: Python<'_>, e: DirstateError) -> PyErr {
674 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
674 PyErr::new::<exc::OSError, _>(py, format!("Dirstate error: {:?}", e))
675 }
675 }
General Comments 0
You need to be logged in to leave comments. Login now