##// END OF EJS Templates
dirstate: use a new `drop_merge_data` in `setparent`...
marmoute -
r48874:a660d8a5 default
parent child Browse files
Show More
@@ -1,1316 +1,1339 b''
1 /*
1 /*
2 parsers.c - efficient content parsing
2 parsers.c - efficient content parsing
3
3
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
5
5
6 This software may be used and distributed according to the terms of
6 This software may be used and distributed according to the terms of
7 the GNU General Public License, incorporated herein by reference.
7 the GNU General Public License, incorporated herein by reference.
8 */
8 */
9
9
10 #define PY_SSIZE_T_CLEAN
10 #define PY_SSIZE_T_CLEAN
11 #include <Python.h>
11 #include <Python.h>
12 #include <ctype.h>
12 #include <ctype.h>
13 #include <stddef.h>
13 #include <stddef.h>
14 #include <string.h>
14 #include <string.h>
15
15
16 #include "bitmanipulation.h"
16 #include "bitmanipulation.h"
17 #include "charencode.h"
17 #include "charencode.h"
18 #include "util.h"
18 #include "util.h"
19
19
20 #ifdef IS_PY3K
20 #ifdef IS_PY3K
21 /* The mapping of Python types is meant to be temporary to get Python
21 /* The mapping of Python types is meant to be temporary to get Python
22 * 3 to compile. We should remove this once Python 3 support is fully
22 * 3 to compile. We should remove this once Python 3 support is fully
23 * supported and proper types are used in the extensions themselves. */
23 * supported and proper types are used in the extensions themselves. */
24 #define PyInt_Check PyLong_Check
24 #define PyInt_Check PyLong_Check
25 #define PyInt_FromLong PyLong_FromLong
25 #define PyInt_FromLong PyLong_FromLong
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AsLong PyLong_AsLong
27 #define PyInt_AsLong PyLong_AsLong
28 #endif
28 #endif
29
29
30 static const char *const versionerrortext = "Python minor version mismatch";
30 static const char *const versionerrortext = "Python minor version mismatch";
31
31
32 static const int dirstate_v1_from_p2 = -2;
32 static const int dirstate_v1_from_p2 = -2;
33 static const int dirstate_v1_nonnormal = -1;
33 static const int dirstate_v1_nonnormal = -1;
34 static const int ambiguous_time = -1;
34 static const int ambiguous_time = -1;
35
35
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
37 {
37 {
38 Py_ssize_t expected_size;
38 Py_ssize_t expected_size;
39
39
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
41 return NULL;
41 return NULL;
42 }
42 }
43
43
44 return _dict_new_presized(expected_size);
44 return _dict_new_presized(expected_size);
45 }
45 }
46
46
47 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
47 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
48 PyObject *kwds)
48 PyObject *kwds)
49 {
49 {
50 /* We do all the initialization here and not a tp_init function because
50 /* We do all the initialization here and not a tp_init function because
51 * dirstate_item is immutable. */
51 * dirstate_item is immutable. */
52 dirstateItemObject *t;
52 dirstateItemObject *t;
53 int wc_tracked;
53 int wc_tracked;
54 int p1_tracked;
54 int p1_tracked;
55 int p2_tracked;
55 int p2_tracked;
56 int merged;
56 int merged;
57 int clean_p1;
57 int clean_p1;
58 int clean_p2;
58 int clean_p2;
59 int possibly_dirty;
59 int possibly_dirty;
60 PyObject *parentfiledata;
60 PyObject *parentfiledata;
61 static char *keywords_name[] = {
61 static char *keywords_name[] = {
62 "wc_tracked", "p1_tracked", "p2_tracked",
62 "wc_tracked", "p1_tracked", "p2_tracked",
63 "merged", "clean_p1", "clean_p2",
63 "merged", "clean_p1", "clean_p2",
64 "possibly_dirty", "parentfiledata", NULL,
64 "possibly_dirty", "parentfiledata", NULL,
65 };
65 };
66 wc_tracked = 0;
66 wc_tracked = 0;
67 p1_tracked = 0;
67 p1_tracked = 0;
68 p2_tracked = 0;
68 p2_tracked = 0;
69 merged = 0;
69 merged = 0;
70 clean_p1 = 0;
70 clean_p1 = 0;
71 clean_p2 = 0;
71 clean_p2 = 0;
72 possibly_dirty = 0;
72 possibly_dirty = 0;
73 parentfiledata = Py_None;
73 parentfiledata = Py_None;
74 if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiiiiiiO", keywords_name,
74 if (!PyArg_ParseTupleAndKeywords(args, kwds, "iiiiiiiO", keywords_name,
75 &wc_tracked, &p1_tracked, &p2_tracked,
75 &wc_tracked, &p1_tracked, &p2_tracked,
76 &merged, &clean_p1, &clean_p2,
76 &merged, &clean_p1, &clean_p2,
77 &possibly_dirty, &parentfiledata
77 &possibly_dirty, &parentfiledata
78
78
79 )) {
79 )) {
80 return NULL;
80 return NULL;
81 }
81 }
82 if (merged && (clean_p1 || clean_p2)) {
82 if (merged && (clean_p1 || clean_p2)) {
83 PyErr_SetString(PyExc_RuntimeError,
83 PyErr_SetString(PyExc_RuntimeError,
84 "`merged` argument incompatible with "
84 "`merged` argument incompatible with "
85 "`clean_p1`/`clean_p2`");
85 "`clean_p1`/`clean_p2`");
86 return NULL;
86 return NULL;
87 }
87 }
88 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
88 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
89 if (!t) {
89 if (!t) {
90 return NULL;
90 return NULL;
91 }
91 }
92
92
93 t->flags = 0;
93 t->flags = 0;
94 if (wc_tracked) {
94 if (wc_tracked) {
95 t->flags |= dirstate_flag_wc_tracked;
95 t->flags |= dirstate_flag_wc_tracked;
96 }
96 }
97 if (p1_tracked) {
97 if (p1_tracked) {
98 t->flags |= dirstate_flag_p1_tracked;
98 t->flags |= dirstate_flag_p1_tracked;
99 }
99 }
100 if (p2_tracked) {
100 if (p2_tracked) {
101 t->flags |= dirstate_flag_p2_tracked;
101 t->flags |= dirstate_flag_p2_tracked;
102 }
102 }
103 if (possibly_dirty) {
103 if (possibly_dirty) {
104 t->flags |= dirstate_flag_possibly_dirty;
104 t->flags |= dirstate_flag_possibly_dirty;
105 }
105 }
106 if (merged) {
106 if (merged) {
107 t->flags |= dirstate_flag_merged;
107 t->flags |= dirstate_flag_merged;
108 }
108 }
109 if (clean_p1) {
109 if (clean_p1) {
110 t->flags |= dirstate_flag_clean_p1;
110 t->flags |= dirstate_flag_clean_p1;
111 }
111 }
112 if (clean_p2) {
112 if (clean_p2) {
113 t->flags |= dirstate_flag_clean_p2;
113 t->flags |= dirstate_flag_clean_p2;
114 }
114 }
115 t->mode = 0;
115 t->mode = 0;
116 t->size = dirstate_v1_nonnormal;
116 t->size = dirstate_v1_nonnormal;
117 t->mtime = ambiguous_time;
117 t->mtime = ambiguous_time;
118 if (parentfiledata != Py_None) {
118 if (parentfiledata != Py_None) {
119 if (!PyTuple_CheckExact(parentfiledata)) {
119 if (!PyTuple_CheckExact(parentfiledata)) {
120 PyErr_SetString(
120 PyErr_SetString(
121 PyExc_TypeError,
121 PyExc_TypeError,
122 "parentfiledata should be a Tuple or None");
122 "parentfiledata should be a Tuple or None");
123 return NULL;
123 return NULL;
124 }
124 }
125 t->mode =
125 t->mode =
126 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 0));
126 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 0));
127 t->size =
127 t->size =
128 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 1));
128 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 1));
129 t->mtime =
129 t->mtime =
130 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 2));
130 (int)PyLong_AsLong(PyTuple_GetItem(parentfiledata, 2));
131 }
131 }
132 return (PyObject *)t;
132 return (PyObject *)t;
133 }
133 }
134
134
135 static void dirstate_item_dealloc(PyObject *o)
135 static void dirstate_item_dealloc(PyObject *o)
136 {
136 {
137 PyObject_Del(o);
137 PyObject_Del(o);
138 }
138 }
139
139
140 static inline bool dirstate_item_c_tracked(dirstateItemObject *self)
140 static inline bool dirstate_item_c_tracked(dirstateItemObject *self)
141 {
141 {
142 return (self->flags & dirstate_flag_wc_tracked);
142 return (self->flags & dirstate_flag_wc_tracked);
143 }
143 }
144
144
145 static inline bool dirstate_item_c_added(dirstateItemObject *self)
145 static inline bool dirstate_item_c_added(dirstateItemObject *self)
146 {
146 {
147 unsigned char mask =
147 unsigned char mask =
148 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
148 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
149 dirstate_flag_p2_tracked);
149 dirstate_flag_p2_tracked);
150 unsigned char target = dirstate_flag_wc_tracked;
150 unsigned char target = dirstate_flag_wc_tracked;
151 return (self->flags & mask) == target;
151 return (self->flags & mask) == target;
152 }
152 }
153
153
154 static inline bool dirstate_item_c_removed(dirstateItemObject *self)
154 static inline bool dirstate_item_c_removed(dirstateItemObject *self)
155 {
155 {
156 if (self->flags & dirstate_flag_wc_tracked) {
156 if (self->flags & dirstate_flag_wc_tracked) {
157 return false;
157 return false;
158 }
158 }
159 return (self->flags &
159 return (self->flags &
160 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked));
160 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked));
161 }
161 }
162
162
163 static inline bool dirstate_item_c_merged(dirstateItemObject *self)
163 static inline bool dirstate_item_c_merged(dirstateItemObject *self)
164 {
164 {
165 return ((self->flags & dirstate_flag_wc_tracked) &&
165 return ((self->flags & dirstate_flag_wc_tracked) &&
166 (self->flags & dirstate_flag_merged));
166 (self->flags & dirstate_flag_merged));
167 }
167 }
168
168
169 static inline bool dirstate_item_c_merged_removed(dirstateItemObject *self)
169 static inline bool dirstate_item_c_merged_removed(dirstateItemObject *self)
170 {
170 {
171 if (!dirstate_item_c_removed(self)) {
171 if (!dirstate_item_c_removed(self)) {
172 return false;
172 return false;
173 }
173 }
174 return (self->flags & dirstate_flag_merged);
174 return (self->flags & dirstate_flag_merged);
175 }
175 }
176
176
177 static inline bool dirstate_item_c_from_p2(dirstateItemObject *self)
177 static inline bool dirstate_item_c_from_p2(dirstateItemObject *self)
178 {
178 {
179 if (!dirstate_item_c_tracked(self)) {
179 if (!dirstate_item_c_tracked(self)) {
180 return false;
180 return false;
181 }
181 }
182 return (self->flags & dirstate_flag_clean_p2);
182 return (self->flags & dirstate_flag_clean_p2);
183 }
183 }
184
184
185 static inline bool dirstate_item_c_from_p2_removed(dirstateItemObject *self)
185 static inline bool dirstate_item_c_from_p2_removed(dirstateItemObject *self)
186 {
186 {
187 if (!dirstate_item_c_removed(self)) {
187 if (!dirstate_item_c_removed(self)) {
188 return false;
188 return false;
189 }
189 }
190 return (self->flags & dirstate_flag_clean_p2);
190 return (self->flags & dirstate_flag_clean_p2);
191 }
191 }
192
192
193 static inline char dirstate_item_c_v1_state(dirstateItemObject *self)
193 static inline char dirstate_item_c_v1_state(dirstateItemObject *self)
194 {
194 {
195 if (dirstate_item_c_removed(self)) {
195 if (dirstate_item_c_removed(self)) {
196 return 'r';
196 return 'r';
197 } else if (dirstate_item_c_merged(self)) {
197 } else if (dirstate_item_c_merged(self)) {
198 return 'm';
198 return 'm';
199 } else if (dirstate_item_c_added(self)) {
199 } else if (dirstate_item_c_added(self)) {
200 return 'a';
200 return 'a';
201 } else {
201 } else {
202 return 'n';
202 return 'n';
203 }
203 }
204 }
204 }
205
205
206 static inline int dirstate_item_c_v1_mode(dirstateItemObject *self)
206 static inline int dirstate_item_c_v1_mode(dirstateItemObject *self)
207 {
207 {
208 return self->mode;
208 return self->mode;
209 }
209 }
210
210
211 static inline int dirstate_item_c_v1_size(dirstateItemObject *self)
211 static inline int dirstate_item_c_v1_size(dirstateItemObject *self)
212 {
212 {
213 if (dirstate_item_c_merged_removed(self)) {
213 if (dirstate_item_c_merged_removed(self)) {
214 return dirstate_v1_nonnormal;
214 return dirstate_v1_nonnormal;
215 } else if (dirstate_item_c_from_p2_removed(self)) {
215 } else if (dirstate_item_c_from_p2_removed(self)) {
216 return dirstate_v1_from_p2;
216 return dirstate_v1_from_p2;
217 } else if (dirstate_item_c_removed(self)) {
217 } else if (dirstate_item_c_removed(self)) {
218 return 0;
218 return 0;
219 } else if (dirstate_item_c_merged(self)) {
219 } else if (dirstate_item_c_merged(self)) {
220 return dirstate_v1_from_p2;
220 return dirstate_v1_from_p2;
221 } else if (dirstate_item_c_added(self)) {
221 } else if (dirstate_item_c_added(self)) {
222 return dirstate_v1_nonnormal;
222 return dirstate_v1_nonnormal;
223 } else if (dirstate_item_c_from_p2(self)) {
223 } else if (dirstate_item_c_from_p2(self)) {
224 return dirstate_v1_from_p2;
224 return dirstate_v1_from_p2;
225 } else if (self->flags & dirstate_flag_possibly_dirty) {
225 } else if (self->flags & dirstate_flag_possibly_dirty) {
226 return self->size; /* NON NORMAL ? */
226 return self->size; /* NON NORMAL ? */
227 } else {
227 } else {
228 return self->size;
228 return self->size;
229 }
229 }
230 }
230 }
231
231
232 static inline int dirstate_item_c_v1_mtime(dirstateItemObject *self)
232 static inline int dirstate_item_c_v1_mtime(dirstateItemObject *self)
233 {
233 {
234 if (dirstate_item_c_removed(self)) {
234 if (dirstate_item_c_removed(self)) {
235 return 0;
235 return 0;
236 } else if (self->flags & dirstate_flag_possibly_dirty) {
236 } else if (self->flags & dirstate_flag_possibly_dirty) {
237 return ambiguous_time;
237 return ambiguous_time;
238 } else if (dirstate_item_c_merged(self)) {
238 } else if (dirstate_item_c_merged(self)) {
239 return ambiguous_time;
239 return ambiguous_time;
240 } else if (dirstate_item_c_added(self)) {
240 } else if (dirstate_item_c_added(self)) {
241 return ambiguous_time;
241 return ambiguous_time;
242 } else if (dirstate_item_c_from_p2(self)) {
242 } else if (dirstate_item_c_from_p2(self)) {
243 return ambiguous_time;
243 return ambiguous_time;
244 } else {
244 } else {
245 return self->mtime;
245 return self->mtime;
246 }
246 }
247 }
247 }
248
248
249 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
249 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
250 {
250 {
251 char state = dirstate_item_c_v1_state(self);
251 char state = dirstate_item_c_v1_state(self);
252 return PyBytes_FromStringAndSize(&state, 1);
252 return PyBytes_FromStringAndSize(&state, 1);
253 };
253 };
254
254
255 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
255 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
256 {
256 {
257 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
257 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
258 };
258 };
259
259
260 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
260 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
261 {
261 {
262 return PyInt_FromLong(dirstate_item_c_v1_size(self));
262 return PyInt_FromLong(dirstate_item_c_v1_size(self));
263 };
263 };
264
264
265 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
265 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
266 {
266 {
267 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
267 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
268 };
268 };
269
269
270 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
270 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
271 PyObject *value)
271 PyObject *value)
272 {
272 {
273 long now;
273 long now;
274 if (!pylong_to_long(value, &now)) {
274 if (!pylong_to_long(value, &now)) {
275 return NULL;
275 return NULL;
276 }
276 }
277 if (dirstate_item_c_v1_state(self) == 'n' &&
277 if (dirstate_item_c_v1_state(self) == 'n' &&
278 dirstate_item_c_v1_mtime(self) == now) {
278 dirstate_item_c_v1_mtime(self) == now) {
279 Py_RETURN_TRUE;
279 Py_RETURN_TRUE;
280 } else {
280 } else {
281 Py_RETURN_FALSE;
281 Py_RETURN_FALSE;
282 }
282 }
283 };
283 };
284
284
285 /* This will never change since it's bound to V1
285 /* This will never change since it's bound to V1
286 */
286 */
287 static inline dirstateItemObject *
287 static inline dirstateItemObject *
288 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
288 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
289 {
289 {
290 dirstateItemObject *t =
290 dirstateItemObject *t =
291 PyObject_New(dirstateItemObject, &dirstateItemType);
291 PyObject_New(dirstateItemObject, &dirstateItemType);
292 if (!t) {
292 if (!t) {
293 return NULL;
293 return NULL;
294 }
294 }
295
295
296 if (state == 'm') {
296 if (state == 'm') {
297 t->flags =
297 t->flags =
298 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
298 (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
299 dirstate_flag_p2_tracked | dirstate_flag_merged);
299 dirstate_flag_p2_tracked | dirstate_flag_merged);
300 t->mode = 0;
300 t->mode = 0;
301 t->size = dirstate_v1_from_p2;
301 t->size = dirstate_v1_from_p2;
302 t->mtime = ambiguous_time;
302 t->mtime = ambiguous_time;
303 } else if (state == 'a') {
303 } else if (state == 'a') {
304 t->flags = dirstate_flag_wc_tracked;
304 t->flags = dirstate_flag_wc_tracked;
305 t->mode = 0;
305 t->mode = 0;
306 t->size = dirstate_v1_nonnormal;
306 t->size = dirstate_v1_nonnormal;
307 t->mtime = ambiguous_time;
307 t->mtime = ambiguous_time;
308 } else if (state == 'r') {
308 } else if (state == 'r') {
309 t->mode = 0;
309 t->mode = 0;
310 t->size = 0;
310 t->size = 0;
311 t->mtime = 0;
311 t->mtime = 0;
312 if (size == dirstate_v1_nonnormal) {
312 if (size == dirstate_v1_nonnormal) {
313 t->flags =
313 t->flags =
314 (dirstate_flag_p1_tracked |
314 (dirstate_flag_p1_tracked |
315 dirstate_flag_p2_tracked | dirstate_flag_merged);
315 dirstate_flag_p2_tracked | dirstate_flag_merged);
316 } else if (size == dirstate_v1_from_p2) {
316 } else if (size == dirstate_v1_from_p2) {
317 t->flags =
317 t->flags =
318 (dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
318 (dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
319 } else {
319 } else {
320 t->flags = dirstate_flag_p1_tracked;
320 t->flags = dirstate_flag_p1_tracked;
321 }
321 }
322 } else if (state == 'n') {
322 } else if (state == 'n') {
323 if (size == dirstate_v1_from_p2) {
323 if (size == dirstate_v1_from_p2) {
324 t->flags =
324 t->flags =
325 (dirstate_flag_wc_tracked |
325 (dirstate_flag_wc_tracked |
326 dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
326 dirstate_flag_p2_tracked | dirstate_flag_clean_p2);
327 t->mode = 0;
327 t->mode = 0;
328 t->size = dirstate_v1_from_p2;
328 t->size = dirstate_v1_from_p2;
329 t->mtime = ambiguous_time;
329 t->mtime = ambiguous_time;
330 } else if (size == dirstate_v1_nonnormal) {
330 } else if (size == dirstate_v1_nonnormal) {
331 t->flags = (dirstate_flag_wc_tracked |
331 t->flags = (dirstate_flag_wc_tracked |
332 dirstate_flag_p1_tracked |
332 dirstate_flag_p1_tracked |
333 dirstate_flag_possibly_dirty);
333 dirstate_flag_possibly_dirty);
334 t->mode = 0;
334 t->mode = 0;
335 t->size = dirstate_v1_nonnormal;
335 t->size = dirstate_v1_nonnormal;
336 t->mtime = ambiguous_time;
336 t->mtime = ambiguous_time;
337 } else if (mtime == ambiguous_time) {
337 } else if (mtime == ambiguous_time) {
338 t->flags = (dirstate_flag_wc_tracked |
338 t->flags = (dirstate_flag_wc_tracked |
339 dirstate_flag_p1_tracked |
339 dirstate_flag_p1_tracked |
340 dirstate_flag_possibly_dirty);
340 dirstate_flag_possibly_dirty);
341 t->mode = mode;
341 t->mode = mode;
342 t->size = size;
342 t->size = size;
343 t->mtime = 0;
343 t->mtime = 0;
344 } else {
344 } else {
345 t->flags = (dirstate_flag_wc_tracked |
345 t->flags = (dirstate_flag_wc_tracked |
346 dirstate_flag_p1_tracked);
346 dirstate_flag_p1_tracked);
347 t->mode = mode;
347 t->mode = mode;
348 t->size = size;
348 t->size = size;
349 t->mtime = mtime;
349 t->mtime = mtime;
350 }
350 }
351 } else {
351 } else {
352 PyErr_Format(PyExc_RuntimeError,
352 PyErr_Format(PyExc_RuntimeError,
353 "unknown state: `%c` (%d, %d, %d)", state, mode,
353 "unknown state: `%c` (%d, %d, %d)", state, mode,
354 size, mtime, NULL);
354 size, mtime, NULL);
355 Py_DECREF(t);
355 Py_DECREF(t);
356 return NULL;
356 return NULL;
357 }
357 }
358
358
359 return t;
359 return t;
360 }
360 }
361
361
362 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
362 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
363 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
363 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
364 PyObject *args)
364 PyObject *args)
365 {
365 {
366 /* We do all the initialization here and not a tp_init function because
366 /* We do all the initialization here and not a tp_init function because
367 * dirstate_item is immutable. */
367 * dirstate_item is immutable. */
368 char state;
368 char state;
369 int size, mode, mtime;
369 int size, mode, mtime;
370 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
370 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
371 return NULL;
371 return NULL;
372 }
372 }
373 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
373 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
374 };
374 };
375
375
376 /* constructor to help legacy API to build a new "added" item
376 /* constructor to help legacy API to build a new "added" item
377
377
378 Should eventually be removed */
378 Should eventually be removed */
379 static PyObject *dirstate_item_new_added(PyTypeObject *subtype)
379 static PyObject *dirstate_item_new_added(PyTypeObject *subtype)
380 {
380 {
381 dirstateItemObject *t;
381 dirstateItemObject *t;
382 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
382 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
383 if (!t) {
383 if (!t) {
384 return NULL;
384 return NULL;
385 }
385 }
386 t->flags = dirstate_flag_wc_tracked;
386 t->flags = dirstate_flag_wc_tracked;
387 t->mode = 0;
387 t->mode = 0;
388 t->size = dirstate_v1_nonnormal;
388 t->size = dirstate_v1_nonnormal;
389 t->mtime = ambiguous_time;
389 t->mtime = ambiguous_time;
390 return (PyObject *)t;
390 return (PyObject *)t;
391 };
391 };
392
392
393 /* constructor to help legacy API to build a new "merged" item
393 /* constructor to help legacy API to build a new "merged" item
394
394
395 Should eventually be removed */
395 Should eventually be removed */
396 static PyObject *dirstate_item_new_merged(PyTypeObject *subtype)
396 static PyObject *dirstate_item_new_merged(PyTypeObject *subtype)
397 {
397 {
398 dirstateItemObject *t;
398 dirstateItemObject *t;
399 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
399 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
400 if (!t) {
400 if (!t) {
401 return NULL;
401 return NULL;
402 }
402 }
403 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
403 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
404 dirstate_flag_p2_tracked | dirstate_flag_merged);
404 dirstate_flag_p2_tracked | dirstate_flag_merged);
405 t->mode = 0;
405 t->mode = 0;
406 t->size = dirstate_v1_from_p2;
406 t->size = dirstate_v1_from_p2;
407 t->mtime = ambiguous_time;
407 t->mtime = ambiguous_time;
408 return (PyObject *)t;
408 return (PyObject *)t;
409 };
409 };
410
410
411 /* constructor to help legacy API to build a new "from_p2" item
411 /* constructor to help legacy API to build a new "from_p2" item
412
412
413 Should eventually be removed */
413 Should eventually be removed */
414 static PyObject *dirstate_item_new_from_p2(PyTypeObject *subtype)
414 static PyObject *dirstate_item_new_from_p2(PyTypeObject *subtype)
415 {
415 {
416 /* We do all the initialization here and not a tp_init function because
416 /* We do all the initialization here and not a tp_init function because
417 * dirstate_item is immutable. */
417 * dirstate_item is immutable. */
418 dirstateItemObject *t;
418 dirstateItemObject *t;
419 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
419 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
420 if (!t) {
420 if (!t) {
421 return NULL;
421 return NULL;
422 }
422 }
423 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p2_tracked |
423 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p2_tracked |
424 dirstate_flag_clean_p2);
424 dirstate_flag_clean_p2);
425 t->mode = 0;
425 t->mode = 0;
426 t->size = dirstate_v1_from_p2;
426 t->size = dirstate_v1_from_p2;
427 t->mtime = ambiguous_time;
427 t->mtime = ambiguous_time;
428 return (PyObject *)t;
428 return (PyObject *)t;
429 };
429 };
430
430
431 /* constructor to help legacy API to build a new "possibly" item
431 /* constructor to help legacy API to build a new "possibly" item
432
432
433 Should eventually be removed */
433 Should eventually be removed */
434 static PyObject *dirstate_item_new_possibly_dirty(PyTypeObject *subtype)
434 static PyObject *dirstate_item_new_possibly_dirty(PyTypeObject *subtype)
435 {
435 {
436 /* We do all the initialization here and not a tp_init function because
436 /* We do all the initialization here and not a tp_init function because
437 * dirstate_item is immutable. */
437 * dirstate_item is immutable. */
438 dirstateItemObject *t;
438 dirstateItemObject *t;
439 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
439 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
440 if (!t) {
440 if (!t) {
441 return NULL;
441 return NULL;
442 }
442 }
443 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
443 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked |
444 dirstate_flag_possibly_dirty);
444 dirstate_flag_possibly_dirty);
445 t->mode = 0;
445 t->mode = 0;
446 t->size = dirstate_v1_nonnormal;
446 t->size = dirstate_v1_nonnormal;
447 t->mtime = ambiguous_time;
447 t->mtime = ambiguous_time;
448 return (PyObject *)t;
448 return (PyObject *)t;
449 };
449 };
450
450
451 /* constructor to help legacy API to build a new "normal" item
451 /* constructor to help legacy API to build a new "normal" item
452
452
453 Should eventually be removed */
453 Should eventually be removed */
454 static PyObject *dirstate_item_new_normal(PyTypeObject *subtype, PyObject *args)
454 static PyObject *dirstate_item_new_normal(PyTypeObject *subtype, PyObject *args)
455 {
455 {
456 /* We do all the initialization here and not a tp_init function because
456 /* We do all the initialization here and not a tp_init function because
457 * dirstate_item is immutable. */
457 * dirstate_item is immutable. */
458 dirstateItemObject *t;
458 dirstateItemObject *t;
459 int size, mode, mtime;
459 int size, mode, mtime;
460 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
460 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
461 return NULL;
461 return NULL;
462 }
462 }
463
463
464 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
464 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
465 if (!t) {
465 if (!t) {
466 return NULL;
466 return NULL;
467 }
467 }
468 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked);
468 t->flags = (dirstate_flag_wc_tracked | dirstate_flag_p1_tracked);
469 t->mode = mode;
469 t->mode = mode;
470 t->size = size;
470 t->size = size;
471 t->mtime = mtime;
471 t->mtime = mtime;
472 return (PyObject *)t;
472 return (PyObject *)t;
473 };
473 };
474
474
475 /* This means the next status call will have to actually check its content
475 /* This means the next status call will have to actually check its content
476 to make sure it is correct. */
476 to make sure it is correct. */
477 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
477 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
478 {
478 {
479 self->flags |= dirstate_flag_possibly_dirty;
479 self->flags |= dirstate_flag_possibly_dirty;
480 Py_RETURN_NONE;
480 Py_RETURN_NONE;
481 }
481 }
482
482
483 /* See docstring of the python implementation for details */
483 /* See docstring of the python implementation for details */
484 static PyObject *dirstate_item_set_clean(dirstateItemObject *self,
484 static PyObject *dirstate_item_set_clean(dirstateItemObject *self,
485 PyObject *args)
485 PyObject *args)
486 {
486 {
487 int size, mode, mtime;
487 int size, mode, mtime;
488 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
488 if (!PyArg_ParseTuple(args, "iii", &mode, &size, &mtime)) {
489 return NULL;
489 return NULL;
490 }
490 }
491 self->flags = dirstate_flag_wc_tracked | dirstate_flag_p1_tracked;
491 self->flags = dirstate_flag_wc_tracked | dirstate_flag_p1_tracked;
492 self->mode = mode;
492 self->mode = mode;
493 self->size = size;
493 self->size = size;
494 self->mtime = mtime;
494 self->mtime = mtime;
495 Py_RETURN_NONE;
495 Py_RETURN_NONE;
496 }
496 }
497
497
498 static PyObject *dirstate_item_set_tracked(dirstateItemObject *self)
498 static PyObject *dirstate_item_set_tracked(dirstateItemObject *self)
499 {
499 {
500 self->flags |= dirstate_flag_wc_tracked;
500 self->flags |= dirstate_flag_wc_tracked;
501 self->flags |= dirstate_flag_possibly_dirty;
501 self->flags |= dirstate_flag_possibly_dirty;
502 /* size = None on the python size turn into size = NON_NORMAL when
502 /* size = None on the python size turn into size = NON_NORMAL when
503 * accessed. So the next line is currently required, but a some future
503 * accessed. So the next line is currently required, but a some future
504 * clean up would be welcome. */
504 * clean up would be welcome. */
505 self->size = dirstate_v1_nonnormal;
505 self->size = dirstate_v1_nonnormal;
506 Py_RETURN_NONE;
506 Py_RETURN_NONE;
507 }
507 }
508
508
509 static PyObject *dirstate_item_set_untracked(dirstateItemObject *self)
509 static PyObject *dirstate_item_set_untracked(dirstateItemObject *self)
510 {
510 {
511 self->flags &= ~dirstate_flag_wc_tracked;
511 self->flags &= ~dirstate_flag_wc_tracked;
512 self->mode = 0;
512 self->mode = 0;
513 self->mtime = 0;
513 self->mtime = 0;
514 self->size = 0;
514 self->size = 0;
515 Py_RETURN_NONE;
515 Py_RETURN_NONE;
516 }
516 }
517
517
518 static PyObject *dirstate_item_drop_merge_data(dirstateItemObject *self)
519 {
520 if (dirstate_item_c_merged(self) || dirstate_item_c_from_p2(self)) {
521 if (dirstate_item_c_merged(self)) {
522 self->flags |= dirstate_flag_p1_tracked;
523 } else {
524 self->flags &= ~dirstate_flag_p1_tracked;
525 }
526 self->flags &=
527 ~(dirstate_flag_merged | dirstate_flag_clean_p1 |
528 dirstate_flag_clean_p2 | dirstate_flag_p2_tracked);
529 self->flags |= dirstate_flag_possibly_dirty;
530 self->mode = 0;
531 self->mtime = 0;
532 /* size = None on the python size turn into size = NON_NORMAL
533 * when accessed. So the next line is currently required, but a
534 * some future clean up would be welcome. */
535 self->size = dirstate_v1_nonnormal;
536 }
537 Py_RETURN_NONE;
538 }
518 static PyMethodDef dirstate_item_methods[] = {
539 static PyMethodDef dirstate_item_methods[] = {
519 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
540 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
520 "return a \"state\" suitable for v1 serialization"},
541 "return a \"state\" suitable for v1 serialization"},
521 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
542 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
522 "return a \"mode\" suitable for v1 serialization"},
543 "return a \"mode\" suitable for v1 serialization"},
523 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
544 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
524 "return a \"size\" suitable for v1 serialization"},
545 "return a \"size\" suitable for v1 serialization"},
525 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
546 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
526 "return a \"mtime\" suitable for v1 serialization"},
547 "return a \"mtime\" suitable for v1 serialization"},
527 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
548 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
528 "True if the stored mtime would be ambiguous with the current time"},
549 "True if the stored mtime would be ambiguous with the current time"},
529 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
550 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
530 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
551 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
531 {"new_added", (PyCFunction)dirstate_item_new_added,
552 {"new_added", (PyCFunction)dirstate_item_new_added,
532 METH_NOARGS | METH_CLASS,
553 METH_NOARGS | METH_CLASS,
533 "constructor to help legacy API to build a new \"added\" item"},
554 "constructor to help legacy API to build a new \"added\" item"},
534 {"new_merged", (PyCFunction)dirstate_item_new_merged,
555 {"new_merged", (PyCFunction)dirstate_item_new_merged,
535 METH_NOARGS | METH_CLASS,
556 METH_NOARGS | METH_CLASS,
536 "constructor to help legacy API to build a new \"merged\" item"},
557 "constructor to help legacy API to build a new \"merged\" item"},
537 {"new_from_p2", (PyCFunction)dirstate_item_new_from_p2,
558 {"new_from_p2", (PyCFunction)dirstate_item_new_from_p2,
538 METH_NOARGS | METH_CLASS,
559 METH_NOARGS | METH_CLASS,
539 "constructor to help legacy API to build a new \"from_p2\" item"},
560 "constructor to help legacy API to build a new \"from_p2\" item"},
540 {"new_possibly_dirty", (PyCFunction)dirstate_item_new_possibly_dirty,
561 {"new_possibly_dirty", (PyCFunction)dirstate_item_new_possibly_dirty,
541 METH_NOARGS | METH_CLASS,
562 METH_NOARGS | METH_CLASS,
542 "constructor to help legacy API to build a new \"possibly_dirty\" item"},
563 "constructor to help legacy API to build a new \"possibly_dirty\" item"},
543 {"new_normal", (PyCFunction)dirstate_item_new_normal,
564 {"new_normal", (PyCFunction)dirstate_item_new_normal,
544 METH_VARARGS | METH_CLASS,
565 METH_VARARGS | METH_CLASS,
545 "constructor to help legacy API to build a new \"normal\" item"},
566 "constructor to help legacy API to build a new \"normal\" item"},
546 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
567 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
547 METH_NOARGS, "mark a file as \"possibly dirty\""},
568 METH_NOARGS, "mark a file as \"possibly dirty\""},
548 {"set_clean", (PyCFunction)dirstate_item_set_clean, METH_VARARGS,
569 {"set_clean", (PyCFunction)dirstate_item_set_clean, METH_VARARGS,
549 "mark a file as \"clean\""},
570 "mark a file as \"clean\""},
550 {"set_tracked", (PyCFunction)dirstate_item_set_tracked, METH_NOARGS,
571 {"set_tracked", (PyCFunction)dirstate_item_set_tracked, METH_NOARGS,
551 "mark a file as \"tracked\""},
572 "mark a file as \"tracked\""},
552 {"set_untracked", (PyCFunction)dirstate_item_set_untracked, METH_NOARGS,
573 {"set_untracked", (PyCFunction)dirstate_item_set_untracked, METH_NOARGS,
553 "mark a file as \"untracked\""},
574 "mark a file as \"untracked\""},
575 {"drop_merge_data", (PyCFunction)dirstate_item_drop_merge_data, METH_NOARGS,
576 "remove all \"merge-only\" from a DirstateItem"},
554 {NULL} /* Sentinel */
577 {NULL} /* Sentinel */
555 };
578 };
556
579
557 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
580 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
558 {
581 {
559 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
582 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
560 };
583 };
561
584
562 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
585 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
563 {
586 {
564 return PyInt_FromLong(dirstate_item_c_v1_size(self));
587 return PyInt_FromLong(dirstate_item_c_v1_size(self));
565 };
588 };
566
589
567 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
590 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
568 {
591 {
569 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
592 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
570 };
593 };
571
594
572 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
595 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
573 {
596 {
574 char state = dirstate_item_c_v1_state(self);
597 char state = dirstate_item_c_v1_state(self);
575 return PyBytes_FromStringAndSize(&state, 1);
598 return PyBytes_FromStringAndSize(&state, 1);
576 };
599 };
577
600
578 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
601 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
579 {
602 {
580 if (dirstate_item_c_tracked(self)) {
603 if (dirstate_item_c_tracked(self)) {
581 Py_RETURN_TRUE;
604 Py_RETURN_TRUE;
582 } else {
605 } else {
583 Py_RETURN_FALSE;
606 Py_RETURN_FALSE;
584 }
607 }
585 };
608 };
586
609
587 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
610 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
588 {
611 {
589 if (dirstate_item_c_added(self)) {
612 if (dirstate_item_c_added(self)) {
590 Py_RETURN_TRUE;
613 Py_RETURN_TRUE;
591 } else {
614 } else {
592 Py_RETURN_FALSE;
615 Py_RETURN_FALSE;
593 }
616 }
594 };
617 };
595
618
596 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
619 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
597 {
620 {
598 if (dirstate_item_c_merged(self)) {
621 if (dirstate_item_c_merged(self)) {
599 Py_RETURN_TRUE;
622 Py_RETURN_TRUE;
600 } else {
623 } else {
601 Py_RETURN_FALSE;
624 Py_RETURN_FALSE;
602 }
625 }
603 };
626 };
604
627
605 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
628 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
606 {
629 {
607 if (dirstate_item_c_merged_removed(self)) {
630 if (dirstate_item_c_merged_removed(self)) {
608 Py_RETURN_TRUE;
631 Py_RETURN_TRUE;
609 } else {
632 } else {
610 Py_RETURN_FALSE;
633 Py_RETURN_FALSE;
611 }
634 }
612 };
635 };
613
636
614 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
637 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
615 {
638 {
616 if (dirstate_item_c_from_p2(self)) {
639 if (dirstate_item_c_from_p2(self)) {
617 Py_RETURN_TRUE;
640 Py_RETURN_TRUE;
618 } else {
641 } else {
619 Py_RETURN_FALSE;
642 Py_RETURN_FALSE;
620 }
643 }
621 };
644 };
622
645
623 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
646 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
624 {
647 {
625 if (dirstate_item_c_from_p2_removed(self)) {
648 if (dirstate_item_c_from_p2_removed(self)) {
626 Py_RETURN_TRUE;
649 Py_RETURN_TRUE;
627 } else {
650 } else {
628 Py_RETURN_FALSE;
651 Py_RETURN_FALSE;
629 }
652 }
630 };
653 };
631
654
632 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
655 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
633 {
656 {
634 if (dirstate_item_c_removed(self)) {
657 if (dirstate_item_c_removed(self)) {
635 Py_RETURN_TRUE;
658 Py_RETURN_TRUE;
636 } else {
659 } else {
637 Py_RETURN_FALSE;
660 Py_RETURN_FALSE;
638 }
661 }
639 };
662 };
640
663
641 static PyObject *dm_nonnormal(dirstateItemObject *self)
664 static PyObject *dm_nonnormal(dirstateItemObject *self)
642 {
665 {
643 if ((dirstate_item_c_v1_state(self) != 'n') ||
666 if ((dirstate_item_c_v1_state(self) != 'n') ||
644 (dirstate_item_c_v1_mtime(self) == ambiguous_time)) {
667 (dirstate_item_c_v1_mtime(self) == ambiguous_time)) {
645 Py_RETURN_TRUE;
668 Py_RETURN_TRUE;
646 } else {
669 } else {
647 Py_RETURN_FALSE;
670 Py_RETURN_FALSE;
648 }
671 }
649 };
672 };
650 static PyObject *dm_otherparent(dirstateItemObject *self)
673 static PyObject *dm_otherparent(dirstateItemObject *self)
651 {
674 {
652 if (dirstate_item_c_v1_mtime(self) == dirstate_v1_from_p2) {
675 if (dirstate_item_c_v1_mtime(self) == dirstate_v1_from_p2) {
653 Py_RETURN_TRUE;
676 Py_RETURN_TRUE;
654 } else {
677 } else {
655 Py_RETURN_FALSE;
678 Py_RETURN_FALSE;
656 }
679 }
657 };
680 };
658
681
659 static PyGetSetDef dirstate_item_getset[] = {
682 static PyGetSetDef dirstate_item_getset[] = {
660 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
683 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
661 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
684 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
662 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
685 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
663 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
686 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
664 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
687 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
665 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
688 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
666 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
689 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
667 "merged_removed", NULL},
690 "merged_removed", NULL},
668 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
691 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
669 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
692 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
670 "from_p2_removed", NULL},
693 "from_p2_removed", NULL},
671 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
694 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
672 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
695 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
673 {"dm_nonnormal", (getter)dm_nonnormal, NULL, "dm_nonnormal", NULL},
696 {"dm_nonnormal", (getter)dm_nonnormal, NULL, "dm_nonnormal", NULL},
674 {"dm_otherparent", (getter)dm_otherparent, NULL, "dm_otherparent", NULL},
697 {"dm_otherparent", (getter)dm_otherparent, NULL, "dm_otherparent", NULL},
675 {NULL} /* Sentinel */
698 {NULL} /* Sentinel */
676 };
699 };
677
700
678 PyTypeObject dirstateItemType = {
701 PyTypeObject dirstateItemType = {
679 PyVarObject_HEAD_INIT(NULL, 0) /* header */
702 PyVarObject_HEAD_INIT(NULL, 0) /* header */
680 "dirstate_tuple", /* tp_name */
703 "dirstate_tuple", /* tp_name */
681 sizeof(dirstateItemObject), /* tp_basicsize */
704 sizeof(dirstateItemObject), /* tp_basicsize */
682 0, /* tp_itemsize */
705 0, /* tp_itemsize */
683 (destructor)dirstate_item_dealloc, /* tp_dealloc */
706 (destructor)dirstate_item_dealloc, /* tp_dealloc */
684 0, /* tp_print */
707 0, /* tp_print */
685 0, /* tp_getattr */
708 0, /* tp_getattr */
686 0, /* tp_setattr */
709 0, /* tp_setattr */
687 0, /* tp_compare */
710 0, /* tp_compare */
688 0, /* tp_repr */
711 0, /* tp_repr */
689 0, /* tp_as_number */
712 0, /* tp_as_number */
690 0, /* tp_as_sequence */
713 0, /* tp_as_sequence */
691 0, /* tp_as_mapping */
714 0, /* tp_as_mapping */
692 0, /* tp_hash */
715 0, /* tp_hash */
693 0, /* tp_call */
716 0, /* tp_call */
694 0, /* tp_str */
717 0, /* tp_str */
695 0, /* tp_getattro */
718 0, /* tp_getattro */
696 0, /* tp_setattro */
719 0, /* tp_setattro */
697 0, /* tp_as_buffer */
720 0, /* tp_as_buffer */
698 Py_TPFLAGS_DEFAULT, /* tp_flags */
721 Py_TPFLAGS_DEFAULT, /* tp_flags */
699 "dirstate tuple", /* tp_doc */
722 "dirstate tuple", /* tp_doc */
700 0, /* tp_traverse */
723 0, /* tp_traverse */
701 0, /* tp_clear */
724 0, /* tp_clear */
702 0, /* tp_richcompare */
725 0, /* tp_richcompare */
703 0, /* tp_weaklistoffset */
726 0, /* tp_weaklistoffset */
704 0, /* tp_iter */
727 0, /* tp_iter */
705 0, /* tp_iternext */
728 0, /* tp_iternext */
706 dirstate_item_methods, /* tp_methods */
729 dirstate_item_methods, /* tp_methods */
707 0, /* tp_members */
730 0, /* tp_members */
708 dirstate_item_getset, /* tp_getset */
731 dirstate_item_getset, /* tp_getset */
709 0, /* tp_base */
732 0, /* tp_base */
710 0, /* tp_dict */
733 0, /* tp_dict */
711 0, /* tp_descr_get */
734 0, /* tp_descr_get */
712 0, /* tp_descr_set */
735 0, /* tp_descr_set */
713 0, /* tp_dictoffset */
736 0, /* tp_dictoffset */
714 0, /* tp_init */
737 0, /* tp_init */
715 0, /* tp_alloc */
738 0, /* tp_alloc */
716 dirstate_item_new, /* tp_new */
739 dirstate_item_new, /* tp_new */
717 };
740 };
718
741
719 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
742 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
720 {
743 {
721 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
744 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
722 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
745 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
723 char state, *cur, *str, *cpos;
746 char state, *cur, *str, *cpos;
724 int mode, size, mtime;
747 int mode, size, mtime;
725 unsigned int flen, pos = 40;
748 unsigned int flen, pos = 40;
726 Py_ssize_t len = 40;
749 Py_ssize_t len = 40;
727 Py_ssize_t readlen;
750 Py_ssize_t readlen;
728
751
729 if (!PyArg_ParseTuple(
752 if (!PyArg_ParseTuple(
730 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
753 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
731 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
754 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
732 goto quit;
755 goto quit;
733 }
756 }
734
757
735 len = readlen;
758 len = readlen;
736
759
737 /* read parents */
760 /* read parents */
738 if (len < 40) {
761 if (len < 40) {
739 PyErr_SetString(PyExc_ValueError,
762 PyErr_SetString(PyExc_ValueError,
740 "too little data for parents");
763 "too little data for parents");
741 goto quit;
764 goto quit;
742 }
765 }
743
766
744 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
767 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
745 str + 20, (Py_ssize_t)20);
768 str + 20, (Py_ssize_t)20);
746 if (!parents) {
769 if (!parents) {
747 goto quit;
770 goto quit;
748 }
771 }
749
772
750 /* read filenames */
773 /* read filenames */
751 while (pos >= 40 && pos < len) {
774 while (pos >= 40 && pos < len) {
752 if (pos + 17 > len) {
775 if (pos + 17 > len) {
753 PyErr_SetString(PyExc_ValueError,
776 PyErr_SetString(PyExc_ValueError,
754 "overflow in dirstate");
777 "overflow in dirstate");
755 goto quit;
778 goto quit;
756 }
779 }
757 cur = str + pos;
780 cur = str + pos;
758 /* unpack header */
781 /* unpack header */
759 state = *cur;
782 state = *cur;
760 mode = getbe32(cur + 1);
783 mode = getbe32(cur + 1);
761 size = getbe32(cur + 5);
784 size = getbe32(cur + 5);
762 mtime = getbe32(cur + 9);
785 mtime = getbe32(cur + 9);
763 flen = getbe32(cur + 13);
786 flen = getbe32(cur + 13);
764 pos += 17;
787 pos += 17;
765 cur += 17;
788 cur += 17;
766 if (flen > len - pos) {
789 if (flen > len - pos) {
767 PyErr_SetString(PyExc_ValueError,
790 PyErr_SetString(PyExc_ValueError,
768 "overflow in dirstate");
791 "overflow in dirstate");
769 goto quit;
792 goto quit;
770 }
793 }
771
794
772 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
795 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
773 size, mtime);
796 size, mtime);
774 if (!entry)
797 if (!entry)
775 goto quit;
798 goto quit;
776 cpos = memchr(cur, 0, flen);
799 cpos = memchr(cur, 0, flen);
777 if (cpos) {
800 if (cpos) {
778 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
801 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
779 cname = PyBytes_FromStringAndSize(
802 cname = PyBytes_FromStringAndSize(
780 cpos + 1, flen - (cpos - cur) - 1);
803 cpos + 1, flen - (cpos - cur) - 1);
781 if (!fname || !cname ||
804 if (!fname || !cname ||
782 PyDict_SetItem(cmap, fname, cname) == -1 ||
805 PyDict_SetItem(cmap, fname, cname) == -1 ||
783 PyDict_SetItem(dmap, fname, entry) == -1) {
806 PyDict_SetItem(dmap, fname, entry) == -1) {
784 goto quit;
807 goto quit;
785 }
808 }
786 Py_DECREF(cname);
809 Py_DECREF(cname);
787 } else {
810 } else {
788 fname = PyBytes_FromStringAndSize(cur, flen);
811 fname = PyBytes_FromStringAndSize(cur, flen);
789 if (!fname ||
812 if (!fname ||
790 PyDict_SetItem(dmap, fname, entry) == -1) {
813 PyDict_SetItem(dmap, fname, entry) == -1) {
791 goto quit;
814 goto quit;
792 }
815 }
793 }
816 }
794 Py_DECREF(fname);
817 Py_DECREF(fname);
795 Py_DECREF(entry);
818 Py_DECREF(entry);
796 fname = cname = entry = NULL;
819 fname = cname = entry = NULL;
797 pos += flen;
820 pos += flen;
798 }
821 }
799
822
800 ret = parents;
823 ret = parents;
801 Py_INCREF(ret);
824 Py_INCREF(ret);
802 quit:
825 quit:
803 Py_XDECREF(fname);
826 Py_XDECREF(fname);
804 Py_XDECREF(cname);
827 Py_XDECREF(cname);
805 Py_XDECREF(entry);
828 Py_XDECREF(entry);
806 Py_XDECREF(parents);
829 Py_XDECREF(parents);
807 return ret;
830 return ret;
808 }
831 }
809
832
810 /*
833 /*
811 * Build a set of non-normal and other parent entries from the dirstate dmap
834 * Build a set of non-normal and other parent entries from the dirstate dmap
812 */
835 */
813 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
836 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
814 {
837 {
815 PyObject *dmap, *fname, *v;
838 PyObject *dmap, *fname, *v;
816 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
839 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
817 Py_ssize_t pos;
840 Py_ssize_t pos;
818
841
819 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
842 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
820 &dmap)) {
843 &dmap)) {
821 goto bail;
844 goto bail;
822 }
845 }
823
846
824 nonnset = PySet_New(NULL);
847 nonnset = PySet_New(NULL);
825 if (nonnset == NULL) {
848 if (nonnset == NULL) {
826 goto bail;
849 goto bail;
827 }
850 }
828
851
829 otherpset = PySet_New(NULL);
852 otherpset = PySet_New(NULL);
830 if (otherpset == NULL) {
853 if (otherpset == NULL) {
831 goto bail;
854 goto bail;
832 }
855 }
833
856
834 pos = 0;
857 pos = 0;
835 while (PyDict_Next(dmap, &pos, &fname, &v)) {
858 while (PyDict_Next(dmap, &pos, &fname, &v)) {
836 dirstateItemObject *t;
859 dirstateItemObject *t;
837 if (!dirstate_tuple_check(v)) {
860 if (!dirstate_tuple_check(v)) {
838 PyErr_SetString(PyExc_TypeError,
861 PyErr_SetString(PyExc_TypeError,
839 "expected a dirstate tuple");
862 "expected a dirstate tuple");
840 goto bail;
863 goto bail;
841 }
864 }
842 t = (dirstateItemObject *)v;
865 t = (dirstateItemObject *)v;
843
866
844 if (dirstate_item_c_from_p2(t)) {
867 if (dirstate_item_c_from_p2(t)) {
845 if (PySet_Add(otherpset, fname) == -1) {
868 if (PySet_Add(otherpset, fname) == -1) {
846 goto bail;
869 goto bail;
847 }
870 }
848 }
871 }
849 if (!(t->flags & dirstate_flag_wc_tracked) ||
872 if (!(t->flags & dirstate_flag_wc_tracked) ||
850 !(t->flags &
873 !(t->flags &
851 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked)) ||
874 (dirstate_flag_p1_tracked | dirstate_flag_p2_tracked)) ||
852 (t->flags &
875 (t->flags &
853 (dirstate_flag_possibly_dirty | dirstate_flag_merged))) {
876 (dirstate_flag_possibly_dirty | dirstate_flag_merged))) {
854 if (PySet_Add(nonnset, fname) == -1) {
877 if (PySet_Add(nonnset, fname) == -1) {
855 goto bail;
878 goto bail;
856 }
879 }
857 }
880 }
858 }
881 }
859
882
860 result = Py_BuildValue("(OO)", nonnset, otherpset);
883 result = Py_BuildValue("(OO)", nonnset, otherpset);
861 if (result == NULL) {
884 if (result == NULL) {
862 goto bail;
885 goto bail;
863 }
886 }
864 Py_DECREF(nonnset);
887 Py_DECREF(nonnset);
865 Py_DECREF(otherpset);
888 Py_DECREF(otherpset);
866 return result;
889 return result;
867 bail:
890 bail:
868 Py_XDECREF(nonnset);
891 Py_XDECREF(nonnset);
869 Py_XDECREF(otherpset);
892 Py_XDECREF(otherpset);
870 Py_XDECREF(result);
893 Py_XDECREF(result);
871 return NULL;
894 return NULL;
872 }
895 }
873
896
874 /*
897 /*
875 * Efficiently pack a dirstate object into its on-disk format.
898 * Efficiently pack a dirstate object into its on-disk format.
876 */
899 */
877 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
900 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
878 {
901 {
879 PyObject *packobj = NULL;
902 PyObject *packobj = NULL;
880 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
903 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
881 Py_ssize_t nbytes, pos, l;
904 Py_ssize_t nbytes, pos, l;
882 PyObject *k, *v = NULL, *pn;
905 PyObject *k, *v = NULL, *pn;
883 char *p, *s;
906 char *p, *s;
884 int now;
907 int now;
885
908
886 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
909 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
887 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
910 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
888 &now)) {
911 &now)) {
889 return NULL;
912 return NULL;
890 }
913 }
891
914
892 if (PyTuple_Size(pl) != 2) {
915 if (PyTuple_Size(pl) != 2) {
893 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
916 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
894 return NULL;
917 return NULL;
895 }
918 }
896
919
897 /* Figure out how much we need to allocate. */
920 /* Figure out how much we need to allocate. */
898 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
921 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
899 PyObject *c;
922 PyObject *c;
900 if (!PyBytes_Check(k)) {
923 if (!PyBytes_Check(k)) {
901 PyErr_SetString(PyExc_TypeError, "expected string key");
924 PyErr_SetString(PyExc_TypeError, "expected string key");
902 goto bail;
925 goto bail;
903 }
926 }
904 nbytes += PyBytes_GET_SIZE(k) + 17;
927 nbytes += PyBytes_GET_SIZE(k) + 17;
905 c = PyDict_GetItem(copymap, k);
928 c = PyDict_GetItem(copymap, k);
906 if (c) {
929 if (c) {
907 if (!PyBytes_Check(c)) {
930 if (!PyBytes_Check(c)) {
908 PyErr_SetString(PyExc_TypeError,
931 PyErr_SetString(PyExc_TypeError,
909 "expected string key");
932 "expected string key");
910 goto bail;
933 goto bail;
911 }
934 }
912 nbytes += PyBytes_GET_SIZE(c) + 1;
935 nbytes += PyBytes_GET_SIZE(c) + 1;
913 }
936 }
914 }
937 }
915
938
916 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
939 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
917 if (packobj == NULL) {
940 if (packobj == NULL) {
918 goto bail;
941 goto bail;
919 }
942 }
920
943
921 p = PyBytes_AS_STRING(packobj);
944 p = PyBytes_AS_STRING(packobj);
922
945
923 pn = PyTuple_GET_ITEM(pl, 0);
946 pn = PyTuple_GET_ITEM(pl, 0);
924 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
947 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
925 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
948 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
926 goto bail;
949 goto bail;
927 }
950 }
928 memcpy(p, s, l);
951 memcpy(p, s, l);
929 p += 20;
952 p += 20;
930 pn = PyTuple_GET_ITEM(pl, 1);
953 pn = PyTuple_GET_ITEM(pl, 1);
931 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
954 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
932 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
955 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
933 goto bail;
956 goto bail;
934 }
957 }
935 memcpy(p, s, l);
958 memcpy(p, s, l);
936 p += 20;
959 p += 20;
937
960
938 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
961 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
939 dirstateItemObject *tuple;
962 dirstateItemObject *tuple;
940 char state;
963 char state;
941 int mode, size, mtime;
964 int mode, size, mtime;
942 Py_ssize_t len, l;
965 Py_ssize_t len, l;
943 PyObject *o;
966 PyObject *o;
944 char *t;
967 char *t;
945
968
946 if (!dirstate_tuple_check(v)) {
969 if (!dirstate_tuple_check(v)) {
947 PyErr_SetString(PyExc_TypeError,
970 PyErr_SetString(PyExc_TypeError,
948 "expected a dirstate tuple");
971 "expected a dirstate tuple");
949 goto bail;
972 goto bail;
950 }
973 }
951 tuple = (dirstateItemObject *)v;
974 tuple = (dirstateItemObject *)v;
952
975
953 state = dirstate_item_c_v1_state(tuple);
976 state = dirstate_item_c_v1_state(tuple);
954 mode = dirstate_item_c_v1_mode(tuple);
977 mode = dirstate_item_c_v1_mode(tuple);
955 size = dirstate_item_c_v1_size(tuple);
978 size = dirstate_item_c_v1_size(tuple);
956 mtime = dirstate_item_c_v1_mtime(tuple);
979 mtime = dirstate_item_c_v1_mtime(tuple);
957 if (state == 'n' && mtime == now) {
980 if (state == 'n' && mtime == now) {
958 /* See pure/parsers.py:pack_dirstate for why we do
981 /* See pure/parsers.py:pack_dirstate for why we do
959 * this. */
982 * this. */
960 mtime = -1;
983 mtime = -1;
961 mtime_unset = (PyObject *)dirstate_item_from_v1_data(
984 mtime_unset = (PyObject *)dirstate_item_from_v1_data(
962 state, mode, size, mtime);
985 state, mode, size, mtime);
963 if (!mtime_unset) {
986 if (!mtime_unset) {
964 goto bail;
987 goto bail;
965 }
988 }
966 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
989 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
967 goto bail;
990 goto bail;
968 }
991 }
969 Py_DECREF(mtime_unset);
992 Py_DECREF(mtime_unset);
970 mtime_unset = NULL;
993 mtime_unset = NULL;
971 }
994 }
972 *p++ = state;
995 *p++ = state;
973 putbe32((uint32_t)mode, p);
996 putbe32((uint32_t)mode, p);
974 putbe32((uint32_t)size, p + 4);
997 putbe32((uint32_t)size, p + 4);
975 putbe32((uint32_t)mtime, p + 8);
998 putbe32((uint32_t)mtime, p + 8);
976 t = p + 12;
999 t = p + 12;
977 p += 16;
1000 p += 16;
978 len = PyBytes_GET_SIZE(k);
1001 len = PyBytes_GET_SIZE(k);
979 memcpy(p, PyBytes_AS_STRING(k), len);
1002 memcpy(p, PyBytes_AS_STRING(k), len);
980 p += len;
1003 p += len;
981 o = PyDict_GetItem(copymap, k);
1004 o = PyDict_GetItem(copymap, k);
982 if (o) {
1005 if (o) {
983 *p++ = '\0';
1006 *p++ = '\0';
984 l = PyBytes_GET_SIZE(o);
1007 l = PyBytes_GET_SIZE(o);
985 memcpy(p, PyBytes_AS_STRING(o), l);
1008 memcpy(p, PyBytes_AS_STRING(o), l);
986 p += l;
1009 p += l;
987 len += l + 1;
1010 len += l + 1;
988 }
1011 }
989 putbe32((uint32_t)len, t);
1012 putbe32((uint32_t)len, t);
990 }
1013 }
991
1014
992 pos = p - PyBytes_AS_STRING(packobj);
1015 pos = p - PyBytes_AS_STRING(packobj);
993 if (pos != nbytes) {
1016 if (pos != nbytes) {
994 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
1017 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
995 (long)pos, (long)nbytes);
1018 (long)pos, (long)nbytes);
996 goto bail;
1019 goto bail;
997 }
1020 }
998
1021
999 return packobj;
1022 return packobj;
1000 bail:
1023 bail:
1001 Py_XDECREF(mtime_unset);
1024 Py_XDECREF(mtime_unset);
1002 Py_XDECREF(packobj);
1025 Py_XDECREF(packobj);
1003 Py_XDECREF(v);
1026 Py_XDECREF(v);
1004 return NULL;
1027 return NULL;
1005 }
1028 }
1006
1029
1007 #define BUMPED_FIX 1
1030 #define BUMPED_FIX 1
1008 #define USING_SHA_256 2
1031 #define USING_SHA_256 2
1009 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
1032 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
1010
1033
1011 static PyObject *readshas(const char *source, unsigned char num,
1034 static PyObject *readshas(const char *source, unsigned char num,
1012 Py_ssize_t hashwidth)
1035 Py_ssize_t hashwidth)
1013 {
1036 {
1014 int i;
1037 int i;
1015 PyObject *list = PyTuple_New(num);
1038 PyObject *list = PyTuple_New(num);
1016 if (list == NULL) {
1039 if (list == NULL) {
1017 return NULL;
1040 return NULL;
1018 }
1041 }
1019 for (i = 0; i < num; i++) {
1042 for (i = 0; i < num; i++) {
1020 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
1043 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
1021 if (hash == NULL) {
1044 if (hash == NULL) {
1022 Py_DECREF(list);
1045 Py_DECREF(list);
1023 return NULL;
1046 return NULL;
1024 }
1047 }
1025 PyTuple_SET_ITEM(list, i, hash);
1048 PyTuple_SET_ITEM(list, i, hash);
1026 source += hashwidth;
1049 source += hashwidth;
1027 }
1050 }
1028 return list;
1051 return list;
1029 }
1052 }
1030
1053
1031 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
1054 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
1032 uint32_t *msize)
1055 uint32_t *msize)
1033 {
1056 {
1034 const char *data = databegin;
1057 const char *data = databegin;
1035 const char *meta;
1058 const char *meta;
1036
1059
1037 double mtime;
1060 double mtime;
1038 int16_t tz;
1061 int16_t tz;
1039 uint16_t flags;
1062 uint16_t flags;
1040 unsigned char nsuccs, nparents, nmetadata;
1063 unsigned char nsuccs, nparents, nmetadata;
1041 Py_ssize_t hashwidth = 20;
1064 Py_ssize_t hashwidth = 20;
1042
1065
1043 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
1066 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
1044 PyObject *metadata = NULL, *ret = NULL;
1067 PyObject *metadata = NULL, *ret = NULL;
1045 int i;
1068 int i;
1046
1069
1047 if (data + FM1_HEADER_SIZE > dataend) {
1070 if (data + FM1_HEADER_SIZE > dataend) {
1048 goto overflow;
1071 goto overflow;
1049 }
1072 }
1050
1073
1051 *msize = getbe32(data);
1074 *msize = getbe32(data);
1052 data += 4;
1075 data += 4;
1053 mtime = getbefloat64(data);
1076 mtime = getbefloat64(data);
1054 data += 8;
1077 data += 8;
1055 tz = getbeint16(data);
1078 tz = getbeint16(data);
1056 data += 2;
1079 data += 2;
1057 flags = getbeuint16(data);
1080 flags = getbeuint16(data);
1058 data += 2;
1081 data += 2;
1059
1082
1060 if (flags & USING_SHA_256) {
1083 if (flags & USING_SHA_256) {
1061 hashwidth = 32;
1084 hashwidth = 32;
1062 }
1085 }
1063
1086
1064 nsuccs = (unsigned char)(*data++);
1087 nsuccs = (unsigned char)(*data++);
1065 nparents = (unsigned char)(*data++);
1088 nparents = (unsigned char)(*data++);
1066 nmetadata = (unsigned char)(*data++);
1089 nmetadata = (unsigned char)(*data++);
1067
1090
1068 if (databegin + *msize > dataend) {
1091 if (databegin + *msize > dataend) {
1069 goto overflow;
1092 goto overflow;
1070 }
1093 }
1071 dataend = databegin + *msize; /* narrow down to marker size */
1094 dataend = databegin + *msize; /* narrow down to marker size */
1072
1095
1073 if (data + hashwidth > dataend) {
1096 if (data + hashwidth > dataend) {
1074 goto overflow;
1097 goto overflow;
1075 }
1098 }
1076 prec = PyBytes_FromStringAndSize(data, hashwidth);
1099 prec = PyBytes_FromStringAndSize(data, hashwidth);
1077 data += hashwidth;
1100 data += hashwidth;
1078 if (prec == NULL) {
1101 if (prec == NULL) {
1079 goto bail;
1102 goto bail;
1080 }
1103 }
1081
1104
1082 if (data + nsuccs * hashwidth > dataend) {
1105 if (data + nsuccs * hashwidth > dataend) {
1083 goto overflow;
1106 goto overflow;
1084 }
1107 }
1085 succs = readshas(data, nsuccs, hashwidth);
1108 succs = readshas(data, nsuccs, hashwidth);
1086 if (succs == NULL) {
1109 if (succs == NULL) {
1087 goto bail;
1110 goto bail;
1088 }
1111 }
1089 data += nsuccs * hashwidth;
1112 data += nsuccs * hashwidth;
1090
1113
1091 if (nparents == 1 || nparents == 2) {
1114 if (nparents == 1 || nparents == 2) {
1092 if (data + nparents * hashwidth > dataend) {
1115 if (data + nparents * hashwidth > dataend) {
1093 goto overflow;
1116 goto overflow;
1094 }
1117 }
1095 parents = readshas(data, nparents, hashwidth);
1118 parents = readshas(data, nparents, hashwidth);
1096 if (parents == NULL) {
1119 if (parents == NULL) {
1097 goto bail;
1120 goto bail;
1098 }
1121 }
1099 data += nparents * hashwidth;
1122 data += nparents * hashwidth;
1100 } else {
1123 } else {
1101 parents = Py_None;
1124 parents = Py_None;
1102 Py_INCREF(parents);
1125 Py_INCREF(parents);
1103 }
1126 }
1104
1127
1105 if (data + 2 * nmetadata > dataend) {
1128 if (data + 2 * nmetadata > dataend) {
1106 goto overflow;
1129 goto overflow;
1107 }
1130 }
1108 meta = data + (2 * nmetadata);
1131 meta = data + (2 * nmetadata);
1109 metadata = PyTuple_New(nmetadata);
1132 metadata = PyTuple_New(nmetadata);
1110 if (metadata == NULL) {
1133 if (metadata == NULL) {
1111 goto bail;
1134 goto bail;
1112 }
1135 }
1113 for (i = 0; i < nmetadata; i++) {
1136 for (i = 0; i < nmetadata; i++) {
1114 PyObject *tmp, *left = NULL, *right = NULL;
1137 PyObject *tmp, *left = NULL, *right = NULL;
1115 Py_ssize_t leftsize = (unsigned char)(*data++);
1138 Py_ssize_t leftsize = (unsigned char)(*data++);
1116 Py_ssize_t rightsize = (unsigned char)(*data++);
1139 Py_ssize_t rightsize = (unsigned char)(*data++);
1117 if (meta + leftsize + rightsize > dataend) {
1140 if (meta + leftsize + rightsize > dataend) {
1118 goto overflow;
1141 goto overflow;
1119 }
1142 }
1120 left = PyBytes_FromStringAndSize(meta, leftsize);
1143 left = PyBytes_FromStringAndSize(meta, leftsize);
1121 meta += leftsize;
1144 meta += leftsize;
1122 right = PyBytes_FromStringAndSize(meta, rightsize);
1145 right = PyBytes_FromStringAndSize(meta, rightsize);
1123 meta += rightsize;
1146 meta += rightsize;
1124 tmp = PyTuple_New(2);
1147 tmp = PyTuple_New(2);
1125 if (!left || !right || !tmp) {
1148 if (!left || !right || !tmp) {
1126 Py_XDECREF(left);
1149 Py_XDECREF(left);
1127 Py_XDECREF(right);
1150 Py_XDECREF(right);
1128 Py_XDECREF(tmp);
1151 Py_XDECREF(tmp);
1129 goto bail;
1152 goto bail;
1130 }
1153 }
1131 PyTuple_SET_ITEM(tmp, 0, left);
1154 PyTuple_SET_ITEM(tmp, 0, left);
1132 PyTuple_SET_ITEM(tmp, 1, right);
1155 PyTuple_SET_ITEM(tmp, 1, right);
1133 PyTuple_SET_ITEM(metadata, i, tmp);
1156 PyTuple_SET_ITEM(metadata, i, tmp);
1134 }
1157 }
1135 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
1158 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
1136 (int)tz * 60, parents);
1159 (int)tz * 60, parents);
1137 goto bail; /* return successfully */
1160 goto bail; /* return successfully */
1138
1161
1139 overflow:
1162 overflow:
1140 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
1163 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
1141 bail:
1164 bail:
1142 Py_XDECREF(prec);
1165 Py_XDECREF(prec);
1143 Py_XDECREF(succs);
1166 Py_XDECREF(succs);
1144 Py_XDECREF(metadata);
1167 Py_XDECREF(metadata);
1145 Py_XDECREF(parents);
1168 Py_XDECREF(parents);
1146 return ret;
1169 return ret;
1147 }
1170 }
1148
1171
1149 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
1172 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
1150 {
1173 {
1151 const char *data, *dataend;
1174 const char *data, *dataend;
1152 Py_ssize_t datalen, offset, stop;
1175 Py_ssize_t datalen, offset, stop;
1153 PyObject *markers = NULL;
1176 PyObject *markers = NULL;
1154
1177
1155 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1178 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1156 &offset, &stop)) {
1179 &offset, &stop)) {
1157 return NULL;
1180 return NULL;
1158 }
1181 }
1159 if (offset < 0) {
1182 if (offset < 0) {
1160 PyErr_SetString(PyExc_ValueError,
1183 PyErr_SetString(PyExc_ValueError,
1161 "invalid negative offset in fm1readmarkers");
1184 "invalid negative offset in fm1readmarkers");
1162 return NULL;
1185 return NULL;
1163 }
1186 }
1164 if (stop > datalen) {
1187 if (stop > datalen) {
1165 PyErr_SetString(
1188 PyErr_SetString(
1166 PyExc_ValueError,
1189 PyExc_ValueError,
1167 "stop longer than data length in fm1readmarkers");
1190 "stop longer than data length in fm1readmarkers");
1168 return NULL;
1191 return NULL;
1169 }
1192 }
1170 dataend = data + datalen;
1193 dataend = data + datalen;
1171 data += offset;
1194 data += offset;
1172 markers = PyList_New(0);
1195 markers = PyList_New(0);
1173 if (!markers) {
1196 if (!markers) {
1174 return NULL;
1197 return NULL;
1175 }
1198 }
1176 while (offset < stop) {
1199 while (offset < stop) {
1177 uint32_t msize;
1200 uint32_t msize;
1178 int error;
1201 int error;
1179 PyObject *record = fm1readmarker(data, dataend, &msize);
1202 PyObject *record = fm1readmarker(data, dataend, &msize);
1180 if (!record) {
1203 if (!record) {
1181 goto bail;
1204 goto bail;
1182 }
1205 }
1183 error = PyList_Append(markers, record);
1206 error = PyList_Append(markers, record);
1184 Py_DECREF(record);
1207 Py_DECREF(record);
1185 if (error) {
1208 if (error) {
1186 goto bail;
1209 goto bail;
1187 }
1210 }
1188 data += msize;
1211 data += msize;
1189 offset += msize;
1212 offset += msize;
1190 }
1213 }
1191 return markers;
1214 return markers;
1192 bail:
1215 bail:
1193 Py_DECREF(markers);
1216 Py_DECREF(markers);
1194 return NULL;
1217 return NULL;
1195 }
1218 }
1196
1219
1197 static char parsers_doc[] = "Efficient content parsing.";
1220 static char parsers_doc[] = "Efficient content parsing.";
1198
1221
1199 PyObject *encodedir(PyObject *self, PyObject *args);
1222 PyObject *encodedir(PyObject *self, PyObject *args);
1200 PyObject *pathencode(PyObject *self, PyObject *args);
1223 PyObject *pathencode(PyObject *self, PyObject *args);
1201 PyObject *lowerencode(PyObject *self, PyObject *args);
1224 PyObject *lowerencode(PyObject *self, PyObject *args);
1202 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
1225 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
1203
1226
1204 static PyMethodDef methods[] = {
1227 static PyMethodDef methods[] = {
1205 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
1228 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
1206 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
1229 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
1207 "create a set containing non-normal and other parent entries of given "
1230 "create a set containing non-normal and other parent entries of given "
1208 "dirstate\n"},
1231 "dirstate\n"},
1209 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
1232 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
1210 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
1233 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
1211 "parse a revlog index\n"},
1234 "parse a revlog index\n"},
1212 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
1235 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
1213 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
1236 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
1214 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
1237 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
1215 {"dict_new_presized", dict_new_presized, METH_VARARGS,
1238 {"dict_new_presized", dict_new_presized, METH_VARARGS,
1216 "construct a dict with an expected size\n"},
1239 "construct a dict with an expected size\n"},
1217 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
1240 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
1218 "make file foldmap\n"},
1241 "make file foldmap\n"},
1219 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
1242 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
1220 "escape a UTF-8 byte string to JSON (fast path)\n"},
1243 "escape a UTF-8 byte string to JSON (fast path)\n"},
1221 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
1244 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
1222 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
1245 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
1223 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
1246 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
1224 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
1247 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
1225 "parse v1 obsolete markers\n"},
1248 "parse v1 obsolete markers\n"},
1226 {NULL, NULL}};
1249 {NULL, NULL}};
1227
1250
1228 void dirs_module_init(PyObject *mod);
1251 void dirs_module_init(PyObject *mod);
1229 void manifest_module_init(PyObject *mod);
1252 void manifest_module_init(PyObject *mod);
1230 void revlog_module_init(PyObject *mod);
1253 void revlog_module_init(PyObject *mod);
1231
1254
1232 static const int version = 20;
1255 static const int version = 20;
1233
1256
1234 static void module_init(PyObject *mod)
1257 static void module_init(PyObject *mod)
1235 {
1258 {
1236 PyModule_AddIntConstant(mod, "version", version);
1259 PyModule_AddIntConstant(mod, "version", version);
1237
1260
1238 /* This module constant has two purposes. First, it lets us unit test
1261 /* This module constant has two purposes. First, it lets us unit test
1239 * the ImportError raised without hard-coding any error text. This
1262 * the ImportError raised without hard-coding any error text. This
1240 * means we can change the text in the future without breaking tests,
1263 * means we can change the text in the future without breaking tests,
1241 * even across changesets without a recompile. Second, its presence
1264 * even across changesets without a recompile. Second, its presence
1242 * can be used to determine whether the version-checking logic is
1265 * can be used to determine whether the version-checking logic is
1243 * present, which also helps in testing across changesets without a
1266 * present, which also helps in testing across changesets without a
1244 * recompile. Note that this means the pure-Python version of parsers
1267 * recompile. Note that this means the pure-Python version of parsers
1245 * should not have this module constant. */
1268 * should not have this module constant. */
1246 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
1269 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
1247
1270
1248 dirs_module_init(mod);
1271 dirs_module_init(mod);
1249 manifest_module_init(mod);
1272 manifest_module_init(mod);
1250 revlog_module_init(mod);
1273 revlog_module_init(mod);
1251
1274
1252 if (PyType_Ready(&dirstateItemType) < 0) {
1275 if (PyType_Ready(&dirstateItemType) < 0) {
1253 return;
1276 return;
1254 }
1277 }
1255 Py_INCREF(&dirstateItemType);
1278 Py_INCREF(&dirstateItemType);
1256 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
1279 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
1257 }
1280 }
1258
1281
1259 static int check_python_version(void)
1282 static int check_python_version(void)
1260 {
1283 {
1261 PyObject *sys = PyImport_ImportModule("sys"), *ver;
1284 PyObject *sys = PyImport_ImportModule("sys"), *ver;
1262 long hexversion;
1285 long hexversion;
1263 if (!sys) {
1286 if (!sys) {
1264 return -1;
1287 return -1;
1265 }
1288 }
1266 ver = PyObject_GetAttrString(sys, "hexversion");
1289 ver = PyObject_GetAttrString(sys, "hexversion");
1267 Py_DECREF(sys);
1290 Py_DECREF(sys);
1268 if (!ver) {
1291 if (!ver) {
1269 return -1;
1292 return -1;
1270 }
1293 }
1271 hexversion = PyInt_AsLong(ver);
1294 hexversion = PyInt_AsLong(ver);
1272 Py_DECREF(ver);
1295 Py_DECREF(ver);
1273 /* sys.hexversion is a 32-bit number by default, so the -1 case
1296 /* sys.hexversion is a 32-bit number by default, so the -1 case
1274 * should only occur in unusual circumstances (e.g. if sys.hexversion
1297 * should only occur in unusual circumstances (e.g. if sys.hexversion
1275 * is manually set to an invalid value). */
1298 * is manually set to an invalid value). */
1276 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
1299 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
1277 PyErr_Format(PyExc_ImportError,
1300 PyErr_Format(PyExc_ImportError,
1278 "%s: The Mercurial extension "
1301 "%s: The Mercurial extension "
1279 "modules were compiled with Python " PY_VERSION
1302 "modules were compiled with Python " PY_VERSION
1280 ", but "
1303 ", but "
1281 "Mercurial is currently using Python with "
1304 "Mercurial is currently using Python with "
1282 "sys.hexversion=%ld: "
1305 "sys.hexversion=%ld: "
1283 "Python %s\n at: %s",
1306 "Python %s\n at: %s",
1284 versionerrortext, hexversion, Py_GetVersion(),
1307 versionerrortext, hexversion, Py_GetVersion(),
1285 Py_GetProgramFullPath());
1308 Py_GetProgramFullPath());
1286 return -1;
1309 return -1;
1287 }
1310 }
1288 return 0;
1311 return 0;
1289 }
1312 }
1290
1313
1291 #ifdef IS_PY3K
1314 #ifdef IS_PY3K
1292 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1315 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1293 parsers_doc, -1, methods};
1316 parsers_doc, -1, methods};
1294
1317
1295 PyMODINIT_FUNC PyInit_parsers(void)
1318 PyMODINIT_FUNC PyInit_parsers(void)
1296 {
1319 {
1297 PyObject *mod;
1320 PyObject *mod;
1298
1321
1299 if (check_python_version() == -1)
1322 if (check_python_version() == -1)
1300 return NULL;
1323 return NULL;
1301 mod = PyModule_Create(&parsers_module);
1324 mod = PyModule_Create(&parsers_module);
1302 module_init(mod);
1325 module_init(mod);
1303 return mod;
1326 return mod;
1304 }
1327 }
1305 #else
1328 #else
1306 PyMODINIT_FUNC initparsers(void)
1329 PyMODINIT_FUNC initparsers(void)
1307 {
1330 {
1308 PyObject *mod;
1331 PyObject *mod;
1309
1332
1310 if (check_python_version() == -1) {
1333 if (check_python_version() == -1) {
1311 return;
1334 return;
1312 }
1335 }
1313 mod = Py_InitModule3("parsers", methods, parsers_doc);
1336 mod = Py_InitModule3("parsers", methods, parsers_doc);
1314 module_init(mod);
1337 module_init(mod);
1315 }
1338 }
1316 #endif
1339 #endif
@@ -1,1021 +1,1006 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 from .dirstateutils import (
21 from .dirstateutils import (
22 docket as docketmod,
22 docket as docketmod,
23 )
23 )
24
24
25 parsers = policy.importmod('parsers')
25 parsers = policy.importmod('parsers')
26 rustmod = policy.importrust('dirstate')
26 rustmod = policy.importrust('dirstate')
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 if rustmod is None:
30 if rustmod is None:
31 DirstateItem = parsers.DirstateItem
31 DirstateItem = parsers.DirstateItem
32 else:
32 else:
33 DirstateItem = rustmod.DirstateItem
33 DirstateItem = rustmod.DirstateItem
34
34
35 rangemask = 0x7FFFFFFF
35 rangemask = 0x7FFFFFFF
36
36
37
37
38 class dirstatemap(object):
38 class dirstatemap(object):
39 """Map encapsulating the dirstate's contents.
39 """Map encapsulating the dirstate's contents.
40
40
41 The dirstate contains the following state:
41 The dirstate contains the following state:
42
42
43 - `identity` is the identity of the dirstate file, which can be used to
43 - `identity` is the identity of the dirstate file, which can be used to
44 detect when changes have occurred to the dirstate file.
44 detect when changes have occurred to the dirstate file.
45
45
46 - `parents` is a pair containing the parents of the working copy. The
46 - `parents` is a pair containing the parents of the working copy. The
47 parents are updated by calling `setparents`.
47 parents are updated by calling `setparents`.
48
48
49 - the state map maps filenames to tuples of (state, mode, size, mtime),
49 - the state map maps filenames to tuples of (state, mode, size, mtime),
50 where state is a single character representing 'normal', 'added',
50 where state is a single character representing 'normal', 'added',
51 'removed', or 'merged'. It is read by treating the dirstate as a
51 'removed', or 'merged'. It is read by treating the dirstate as a
52 dict. File state is updated by calling various methods (see each
52 dict. File state is updated by calling various methods (see each
53 documentation for details):
53 documentation for details):
54
54
55 - `reset_state`,
55 - `reset_state`,
56 - `set_tracked`
56 - `set_tracked`
57 - `set_untracked`
57 - `set_untracked`
58 - `set_clean`
58 - `set_clean`
59 - `set_possibly_dirty`
59 - `set_possibly_dirty`
60
60
61 - `copymap` maps destination filenames to their source filename.
61 - `copymap` maps destination filenames to their source filename.
62
62
63 The dirstate also provides the following views onto the state:
63 The dirstate also provides the following views onto the state:
64
64
65 - `nonnormalset` is a set of the filenames that have state other
65 - `nonnormalset` is a set of the filenames that have state other
66 than 'normal', or are normal but have an mtime of -1 ('normallookup').
66 than 'normal', or are normal but have an mtime of -1 ('normallookup').
67
67
68 - `otherparentset` is a set of the filenames that are marked as coming
68 - `otherparentset` is a set of the filenames that are marked as coming
69 from the second parent when the dirstate is currently being merged.
69 from the second parent when the dirstate is currently being merged.
70
70
71 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
71 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
72 form that they appear as in the dirstate.
72 form that they appear as in the dirstate.
73
73
74 - `dirfoldmap` is a dict mapping normalized directory names to the
74 - `dirfoldmap` is a dict mapping normalized directory names to the
75 denormalized form that they appear as in the dirstate.
75 denormalized form that they appear as in the dirstate.
76 """
76 """
77
77
78 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
78 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
79 self._ui = ui
79 self._ui = ui
80 self._opener = opener
80 self._opener = opener
81 self._root = root
81 self._root = root
82 self._filename = b'dirstate'
82 self._filename = b'dirstate'
83 self._nodelen = 20
83 self._nodelen = 20
84 self._nodeconstants = nodeconstants
84 self._nodeconstants = nodeconstants
85 assert (
85 assert (
86 not use_dirstate_v2
86 not use_dirstate_v2
87 ), "should have detected unsupported requirement"
87 ), "should have detected unsupported requirement"
88
88
89 self._parents = None
89 self._parents = None
90 self._dirtyparents = False
90 self._dirtyparents = False
91
91
92 # for consistent view between _pl() and _read() invocations
92 # for consistent view between _pl() and _read() invocations
93 self._pendingmode = None
93 self._pendingmode = None
94
94
95 @propertycache
95 @propertycache
96 def _map(self):
96 def _map(self):
97 self._map = {}
97 self._map = {}
98 self.read()
98 self.read()
99 return self._map
99 return self._map
100
100
101 @propertycache
101 @propertycache
102 def copymap(self):
102 def copymap(self):
103 self.copymap = {}
103 self.copymap = {}
104 self._map
104 self._map
105 return self.copymap
105 return self.copymap
106
106
107 def clear(self):
107 def clear(self):
108 self._map.clear()
108 self._map.clear()
109 self.copymap.clear()
109 self.copymap.clear()
110 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
110 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
111 util.clearcachedproperty(self, b"_dirs")
111 util.clearcachedproperty(self, b"_dirs")
112 util.clearcachedproperty(self, b"_alldirs")
112 util.clearcachedproperty(self, b"_alldirs")
113 util.clearcachedproperty(self, b"filefoldmap")
113 util.clearcachedproperty(self, b"filefoldmap")
114 util.clearcachedproperty(self, b"dirfoldmap")
114 util.clearcachedproperty(self, b"dirfoldmap")
115 util.clearcachedproperty(self, b"nonnormalset")
115 util.clearcachedproperty(self, b"nonnormalset")
116 util.clearcachedproperty(self, b"otherparentset")
116 util.clearcachedproperty(self, b"otherparentset")
117
117
118 def items(self):
118 def items(self):
119 return pycompat.iteritems(self._map)
119 return pycompat.iteritems(self._map)
120
120
121 # forward for python2,3 compat
121 # forward for python2,3 compat
122 iteritems = items
122 iteritems = items
123
123
124 def debug_iter(self, all):
124 def debug_iter(self, all):
125 """
125 """
126 Return an iterator of (filename, state, mode, size, mtime) tuples
126 Return an iterator of (filename, state, mode, size, mtime) tuples
127
127
128 `all` is unused when Rust is not enabled
128 `all` is unused when Rust is not enabled
129 """
129 """
130 for (filename, item) in self.items():
130 for (filename, item) in self.items():
131 yield (filename, item.state, item.mode, item.size, item.mtime)
131 yield (filename, item.state, item.mode, item.size, item.mtime)
132
132
133 def __len__(self):
133 def __len__(self):
134 return len(self._map)
134 return len(self._map)
135
135
136 def __iter__(self):
136 def __iter__(self):
137 return iter(self._map)
137 return iter(self._map)
138
138
139 def get(self, key, default=None):
139 def get(self, key, default=None):
140 return self._map.get(key, default)
140 return self._map.get(key, default)
141
141
142 def __contains__(self, key):
142 def __contains__(self, key):
143 return key in self._map
143 return key in self._map
144
144
145 def __getitem__(self, key):
145 def __getitem__(self, key):
146 return self._map[key]
146 return self._map[key]
147
147
148 def keys(self):
148 def keys(self):
149 return self._map.keys()
149 return self._map.keys()
150
150
151 def preload(self):
151 def preload(self):
152 """Loads the underlying data, if it's not already loaded"""
152 """Loads the underlying data, if it's not already loaded"""
153 self._map
153 self._map
154
154
155 def _dirs_incr(self, filename, old_entry=None):
155 def _dirs_incr(self, filename, old_entry=None):
156 """incremente the dirstate counter if applicable"""
156 """incremente the dirstate counter if applicable"""
157 if (
157 if (
158 old_entry is None or old_entry.removed
158 old_entry is None or old_entry.removed
159 ) and "_dirs" in self.__dict__:
159 ) and "_dirs" in self.__dict__:
160 self._dirs.addpath(filename)
160 self._dirs.addpath(filename)
161 if old_entry is None and "_alldirs" in self.__dict__:
161 if old_entry is None and "_alldirs" in self.__dict__:
162 self._alldirs.addpath(filename)
162 self._alldirs.addpath(filename)
163
163
164 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
164 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
165 """decremente the dirstate counter if applicable"""
165 """decremente the dirstate counter if applicable"""
166 if old_entry is not None:
166 if old_entry is not None:
167 if "_dirs" in self.__dict__ and not old_entry.removed:
167 if "_dirs" in self.__dict__ and not old_entry.removed:
168 self._dirs.delpath(filename)
168 self._dirs.delpath(filename)
169 if "_alldirs" in self.__dict__ and not remove_variant:
169 if "_alldirs" in self.__dict__ and not remove_variant:
170 self._alldirs.delpath(filename)
170 self._alldirs.delpath(filename)
171 elif remove_variant and "_alldirs" in self.__dict__:
171 elif remove_variant and "_alldirs" in self.__dict__:
172 self._alldirs.addpath(filename)
172 self._alldirs.addpath(filename)
173 if "filefoldmap" in self.__dict__:
173 if "filefoldmap" in self.__dict__:
174 normed = util.normcase(filename)
174 normed = util.normcase(filename)
175 self.filefoldmap.pop(normed, None)
175 self.filefoldmap.pop(normed, None)
176
176
177 def set_possibly_dirty(self, filename):
177 def set_possibly_dirty(self, filename):
178 """record that the current state of the file on disk is unknown"""
178 """record that the current state of the file on disk is unknown"""
179 self[filename].set_possibly_dirty()
179 self[filename].set_possibly_dirty()
180
180
181 def set_clean(self, filename, mode, size, mtime):
181 def set_clean(self, filename, mode, size, mtime):
182 """mark a file as back to a clean state"""
182 """mark a file as back to a clean state"""
183 entry = self[filename]
183 entry = self[filename]
184 mtime = mtime & rangemask
184 mtime = mtime & rangemask
185 size = size & rangemask
185 size = size & rangemask
186 entry.set_clean(mode, size, mtime)
186 entry.set_clean(mode, size, mtime)
187 self.copymap.pop(filename, None)
187 self.copymap.pop(filename, None)
188 self.nonnormalset.discard(filename)
188 self.nonnormalset.discard(filename)
189
189
190 def reset_state(
190 def reset_state(
191 self,
191 self,
192 filename,
192 filename,
193 wc_tracked=False,
193 wc_tracked=False,
194 p1_tracked=False,
194 p1_tracked=False,
195 p2_tracked=False,
195 p2_tracked=False,
196 merged=False,
196 merged=False,
197 clean_p1=False,
197 clean_p1=False,
198 clean_p2=False,
198 clean_p2=False,
199 possibly_dirty=False,
199 possibly_dirty=False,
200 parentfiledata=None,
200 parentfiledata=None,
201 ):
201 ):
202 """Set a entry to a given state, diregarding all previous state
202 """Set a entry to a given state, diregarding all previous state
203
203
204 This is to be used by the part of the dirstate API dedicated to
204 This is to be used by the part of the dirstate API dedicated to
205 adjusting the dirstate after a update/merge.
205 adjusting the dirstate after a update/merge.
206
206
207 note: calling this might result to no entry existing at all if the
207 note: calling this might result to no entry existing at all if the
208 dirstate map does not see any point at having one for this file
208 dirstate map does not see any point at having one for this file
209 anymore.
209 anymore.
210 """
210 """
211 if merged and (clean_p1 or clean_p2):
211 if merged and (clean_p1 or clean_p2):
212 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
212 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
213 raise error.ProgrammingError(msg)
213 raise error.ProgrammingError(msg)
214 # copy information are now outdated
214 # copy information are now outdated
215 # (maybe new information should be in directly passed to this function)
215 # (maybe new information should be in directly passed to this function)
216 self.copymap.pop(filename, None)
216 self.copymap.pop(filename, None)
217
217
218 if not (p1_tracked or p2_tracked or wc_tracked):
218 if not (p1_tracked or p2_tracked or wc_tracked):
219 old_entry = self._map.pop(filename, None)
219 old_entry = self._map.pop(filename, None)
220 self._dirs_decr(filename, old_entry=old_entry)
220 self._dirs_decr(filename, old_entry=old_entry)
221 self.nonnormalset.discard(filename)
221 self.nonnormalset.discard(filename)
222 self.copymap.pop(filename, None)
222 self.copymap.pop(filename, None)
223 return
223 return
224 elif merged:
224 elif merged:
225 # XXX might be merged and removed ?
225 # XXX might be merged and removed ?
226 entry = self.get(filename)
226 entry = self.get(filename)
227 if entry is None or not entry.tracked:
227 if entry is None or not entry.tracked:
228 # XXX mostly replicate dirstate.other parent. We should get
228 # XXX mostly replicate dirstate.other parent. We should get
229 # the higher layer to pass us more reliable data where `merged`
229 # the higher layer to pass us more reliable data where `merged`
230 # actually mean merged. Dropping this clause will show failure
230 # actually mean merged. Dropping this clause will show failure
231 # in `test-graft.t`
231 # in `test-graft.t`
232 merged = False
232 merged = False
233 clean_p2 = True
233 clean_p2 = True
234 elif not (p1_tracked or p2_tracked) and wc_tracked:
234 elif not (p1_tracked or p2_tracked) and wc_tracked:
235 pass # file is added, nothing special to adjust
235 pass # file is added, nothing special to adjust
236 elif (p1_tracked or p2_tracked) and not wc_tracked:
236 elif (p1_tracked or p2_tracked) and not wc_tracked:
237 pass
237 pass
238 elif clean_p2 and wc_tracked:
238 elif clean_p2 and wc_tracked:
239 if p1_tracked or self.get(filename) is not None:
239 if p1_tracked or self.get(filename) is not None:
240 # XXX the `self.get` call is catching some case in
240 # XXX the `self.get` call is catching some case in
241 # `test-merge-remove.t` where the file is tracked in p1, the
241 # `test-merge-remove.t` where the file is tracked in p1, the
242 # p1_tracked argument is False.
242 # p1_tracked argument is False.
243 #
243 #
244 # In addition, this seems to be a case where the file is marked
244 # In addition, this seems to be a case where the file is marked
245 # as merged without actually being the result of a merge
245 # as merged without actually being the result of a merge
246 # action. So thing are not ideal here.
246 # action. So thing are not ideal here.
247 merged = True
247 merged = True
248 clean_p2 = False
248 clean_p2 = False
249 elif not p1_tracked and p2_tracked and wc_tracked:
249 elif not p1_tracked and p2_tracked and wc_tracked:
250 clean_p2 = True
250 clean_p2 = True
251 elif possibly_dirty:
251 elif possibly_dirty:
252 pass
252 pass
253 elif wc_tracked:
253 elif wc_tracked:
254 # this is a "normal" file
254 # this is a "normal" file
255 if parentfiledata is None:
255 if parentfiledata is None:
256 msg = b'failed to pass parentfiledata for a normal file: %s'
256 msg = b'failed to pass parentfiledata for a normal file: %s'
257 msg %= filename
257 msg %= filename
258 raise error.ProgrammingError(msg)
258 raise error.ProgrammingError(msg)
259 else:
259 else:
260 assert False, 'unreachable'
260 assert False, 'unreachable'
261
261
262 old_entry = self._map.get(filename)
262 old_entry = self._map.get(filename)
263 self._dirs_incr(filename, old_entry)
263 self._dirs_incr(filename, old_entry)
264 entry = DirstateItem(
264 entry = DirstateItem(
265 wc_tracked=wc_tracked,
265 wc_tracked=wc_tracked,
266 p1_tracked=p1_tracked,
266 p1_tracked=p1_tracked,
267 p2_tracked=p2_tracked,
267 p2_tracked=p2_tracked,
268 merged=merged,
268 merged=merged,
269 clean_p1=clean_p1,
269 clean_p1=clean_p1,
270 clean_p2=clean_p2,
270 clean_p2=clean_p2,
271 possibly_dirty=possibly_dirty,
271 possibly_dirty=possibly_dirty,
272 parentfiledata=parentfiledata,
272 parentfiledata=parentfiledata,
273 )
273 )
274 if entry.dm_nonnormal:
274 if entry.dm_nonnormal:
275 self.nonnormalset.add(filename)
275 self.nonnormalset.add(filename)
276 else:
276 else:
277 self.nonnormalset.discard(filename)
277 self.nonnormalset.discard(filename)
278 if entry.dm_otherparent:
278 if entry.dm_otherparent:
279 self.otherparentset.add(filename)
279 self.otherparentset.add(filename)
280 else:
280 else:
281 self.otherparentset.discard(filename)
281 self.otherparentset.discard(filename)
282 self._map[filename] = entry
282 self._map[filename] = entry
283
283
284 def set_tracked(self, filename):
284 def set_tracked(self, filename):
285 new = False
285 new = False
286 entry = self.get(filename)
286 entry = self.get(filename)
287 if entry is None:
287 if entry is None:
288 self._dirs_incr(filename)
288 self._dirs_incr(filename)
289 entry = DirstateItem(
289 entry = DirstateItem(
290 p1_tracked=False,
290 p1_tracked=False,
291 p2_tracked=False,
291 p2_tracked=False,
292 wc_tracked=True,
292 wc_tracked=True,
293 merged=False,
293 merged=False,
294 clean_p1=False,
294 clean_p1=False,
295 clean_p2=False,
295 clean_p2=False,
296 possibly_dirty=False,
296 possibly_dirty=False,
297 parentfiledata=None,
297 parentfiledata=None,
298 )
298 )
299 self._map[filename] = entry
299 self._map[filename] = entry
300 if entry.dm_nonnormal:
300 if entry.dm_nonnormal:
301 self.nonnormalset.add(filename)
301 self.nonnormalset.add(filename)
302 new = True
302 new = True
303 elif not entry.tracked:
303 elif not entry.tracked:
304 self._dirs_incr(filename, entry)
304 self._dirs_incr(filename, entry)
305 entry.set_tracked()
305 entry.set_tracked()
306 new = True
306 new = True
307 else:
307 else:
308 # XXX This is probably overkill for more case, but we need this to
308 # XXX This is probably overkill for more case, but we need this to
309 # fully replace the `normallookup` call with `set_tracked` one.
309 # fully replace the `normallookup` call with `set_tracked` one.
310 # Consider smoothing this in the future.
310 # Consider smoothing this in the future.
311 self.set_possibly_dirty(filename)
311 self.set_possibly_dirty(filename)
312 return new
312 return new
313
313
314 def set_untracked(self, f):
314 def set_untracked(self, f):
315 """Mark a file as no longer tracked in the dirstate map"""
315 """Mark a file as no longer tracked in the dirstate map"""
316 entry = self.get(f)
316 entry = self.get(f)
317 if entry is None:
317 if entry is None:
318 return False
318 return False
319 else:
319 else:
320 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
320 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
321 if not entry.merged:
321 if not entry.merged:
322 self.copymap.pop(f, None)
322 self.copymap.pop(f, None)
323 if entry.added:
323 if entry.added:
324 self.nonnormalset.discard(f)
324 self.nonnormalset.discard(f)
325 self._map.pop(f, None)
325 self._map.pop(f, None)
326 else:
326 else:
327 self.nonnormalset.add(f)
327 self.nonnormalset.add(f)
328 if entry.from_p2:
328 if entry.from_p2:
329 self.otherparentset.add(f)
329 self.otherparentset.add(f)
330 entry.set_untracked()
330 entry.set_untracked()
331 return True
331 return True
332
332
333 def nonnormalentries(self):
333 def nonnormalentries(self):
334 '''Compute the nonnormal dirstate entries from the dmap'''
334 '''Compute the nonnormal dirstate entries from the dmap'''
335 try:
335 try:
336 return parsers.nonnormalotherparententries(self._map)
336 return parsers.nonnormalotherparententries(self._map)
337 except AttributeError:
337 except AttributeError:
338 nonnorm = set()
338 nonnorm = set()
339 otherparent = set()
339 otherparent = set()
340 for fname, e in pycompat.iteritems(self._map):
340 for fname, e in pycompat.iteritems(self._map):
341 if e.dm_nonnormal:
341 if e.dm_nonnormal:
342 nonnorm.add(fname)
342 nonnorm.add(fname)
343 if e.from_p2:
343 if e.from_p2:
344 otherparent.add(fname)
344 otherparent.add(fname)
345 return nonnorm, otherparent
345 return nonnorm, otherparent
346
346
347 @propertycache
347 @propertycache
348 def filefoldmap(self):
348 def filefoldmap(self):
349 """Returns a dictionary mapping normalized case paths to their
349 """Returns a dictionary mapping normalized case paths to their
350 non-normalized versions.
350 non-normalized versions.
351 """
351 """
352 try:
352 try:
353 makefilefoldmap = parsers.make_file_foldmap
353 makefilefoldmap = parsers.make_file_foldmap
354 except AttributeError:
354 except AttributeError:
355 pass
355 pass
356 else:
356 else:
357 return makefilefoldmap(
357 return makefilefoldmap(
358 self._map, util.normcasespec, util.normcasefallback
358 self._map, util.normcasespec, util.normcasefallback
359 )
359 )
360
360
361 f = {}
361 f = {}
362 normcase = util.normcase
362 normcase = util.normcase
363 for name, s in pycompat.iteritems(self._map):
363 for name, s in pycompat.iteritems(self._map):
364 if not s.removed:
364 if not s.removed:
365 f[normcase(name)] = name
365 f[normcase(name)] = name
366 f[b'.'] = b'.' # prevents useless util.fspath() invocation
366 f[b'.'] = b'.' # prevents useless util.fspath() invocation
367 return f
367 return f
368
368
369 def hastrackeddir(self, d):
369 def hastrackeddir(self, d):
370 """
370 """
371 Returns True if the dirstate contains a tracked (not removed) file
371 Returns True if the dirstate contains a tracked (not removed) file
372 in this directory.
372 in this directory.
373 """
373 """
374 return d in self._dirs
374 return d in self._dirs
375
375
376 def hasdir(self, d):
376 def hasdir(self, d):
377 """
377 """
378 Returns True if the dirstate contains a file (tracked or removed)
378 Returns True if the dirstate contains a file (tracked or removed)
379 in this directory.
379 in this directory.
380 """
380 """
381 return d in self._alldirs
381 return d in self._alldirs
382
382
383 @propertycache
383 @propertycache
384 def _dirs(self):
384 def _dirs(self):
385 return pathutil.dirs(self._map, only_tracked=True)
385 return pathutil.dirs(self._map, only_tracked=True)
386
386
387 @propertycache
387 @propertycache
388 def _alldirs(self):
388 def _alldirs(self):
389 return pathutil.dirs(self._map)
389 return pathutil.dirs(self._map)
390
390
391 def _opendirstatefile(self):
391 def _opendirstatefile(self):
392 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
392 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
393 if self._pendingmode is not None and self._pendingmode != mode:
393 if self._pendingmode is not None and self._pendingmode != mode:
394 fp.close()
394 fp.close()
395 raise error.Abort(
395 raise error.Abort(
396 _(b'working directory state may be changed parallelly')
396 _(b'working directory state may be changed parallelly')
397 )
397 )
398 self._pendingmode = mode
398 self._pendingmode = mode
399 return fp
399 return fp
400
400
401 def parents(self):
401 def parents(self):
402 if not self._parents:
402 if not self._parents:
403 try:
403 try:
404 fp = self._opendirstatefile()
404 fp = self._opendirstatefile()
405 st = fp.read(2 * self._nodelen)
405 st = fp.read(2 * self._nodelen)
406 fp.close()
406 fp.close()
407 except IOError as err:
407 except IOError as err:
408 if err.errno != errno.ENOENT:
408 if err.errno != errno.ENOENT:
409 raise
409 raise
410 # File doesn't exist, so the current state is empty
410 # File doesn't exist, so the current state is empty
411 st = b''
411 st = b''
412
412
413 l = len(st)
413 l = len(st)
414 if l == self._nodelen * 2:
414 if l == self._nodelen * 2:
415 self._parents = (
415 self._parents = (
416 st[: self._nodelen],
416 st[: self._nodelen],
417 st[self._nodelen : 2 * self._nodelen],
417 st[self._nodelen : 2 * self._nodelen],
418 )
418 )
419 elif l == 0:
419 elif l == 0:
420 self._parents = (
420 self._parents = (
421 self._nodeconstants.nullid,
421 self._nodeconstants.nullid,
422 self._nodeconstants.nullid,
422 self._nodeconstants.nullid,
423 )
423 )
424 else:
424 else:
425 raise error.Abort(
425 raise error.Abort(
426 _(b'working directory state appears damaged!')
426 _(b'working directory state appears damaged!')
427 )
427 )
428
428
429 return self._parents
429 return self._parents
430
430
431 def setparents(self, p1, p2, fold_p2=False):
431 def setparents(self, p1, p2, fold_p2=False):
432 self._parents = (p1, p2)
432 self._parents = (p1, p2)
433 self._dirtyparents = True
433 self._dirtyparents = True
434 copies = {}
434 copies = {}
435 if fold_p2:
435 if fold_p2:
436 candidatefiles = self.non_normal_or_other_parent_paths()
436 candidatefiles = self.non_normal_or_other_parent_paths()
437
437
438 for f in candidatefiles:
438 for f in candidatefiles:
439 s = self.get(f)
439 s = self.get(f)
440 if s is None:
440 if s is None:
441 continue
441 continue
442
442
443 # Discard "merged" markers when moving away from a merge state
443 # Discard "merged" markers when moving away from a merge state
444 if s.merged:
444 if s.merged or s.from_p2:
445 source = self.copymap.get(f)
445 source = self.copymap.pop(f, None)
446 if source:
446 if source:
447 copies[f] = source
447 copies[f] = source
448 self.reset_state(
448 s.drop_merge_data()
449 f,
450 wc_tracked=True,
451 p1_tracked=True,
452 possibly_dirty=True,
453 )
454 # Also fix up otherparent markers
455 elif s.from_p2:
456 source = self.copymap.get(f)
457 if source:
458 copies[f] = source
459 self.reset_state(
460 f,
461 p1_tracked=False,
462 wc_tracked=True,
463 )
464 return copies
449 return copies
465
450
466 def read(self):
451 def read(self):
467 # ignore HG_PENDING because identity is used only for writing
452 # ignore HG_PENDING because identity is used only for writing
468 self.identity = util.filestat.frompath(
453 self.identity = util.filestat.frompath(
469 self._opener.join(self._filename)
454 self._opener.join(self._filename)
470 )
455 )
471
456
472 try:
457 try:
473 fp = self._opendirstatefile()
458 fp = self._opendirstatefile()
474 try:
459 try:
475 st = fp.read()
460 st = fp.read()
476 finally:
461 finally:
477 fp.close()
462 fp.close()
478 except IOError as err:
463 except IOError as err:
479 if err.errno != errno.ENOENT:
464 if err.errno != errno.ENOENT:
480 raise
465 raise
481 return
466 return
482 if not st:
467 if not st:
483 return
468 return
484
469
485 if util.safehasattr(parsers, b'dict_new_presized'):
470 if util.safehasattr(parsers, b'dict_new_presized'):
486 # Make an estimate of the number of files in the dirstate based on
471 # Make an estimate of the number of files in the dirstate based on
487 # its size. This trades wasting some memory for avoiding costly
472 # its size. This trades wasting some memory for avoiding costly
488 # resizes. Each entry have a prefix of 17 bytes followed by one or
473 # resizes. Each entry have a prefix of 17 bytes followed by one or
489 # two path names. Studies on various large-scale real-world repositories
474 # two path names. Studies on various large-scale real-world repositories
490 # found 54 bytes a reasonable upper limit for the average path names.
475 # found 54 bytes a reasonable upper limit for the average path names.
491 # Copy entries are ignored for the sake of this estimate.
476 # Copy entries are ignored for the sake of this estimate.
492 self._map = parsers.dict_new_presized(len(st) // 71)
477 self._map = parsers.dict_new_presized(len(st) // 71)
493
478
494 # Python's garbage collector triggers a GC each time a certain number
479 # Python's garbage collector triggers a GC each time a certain number
495 # of container objects (the number being defined by
480 # of container objects (the number being defined by
496 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
481 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
497 # for each file in the dirstate. The C version then immediately marks
482 # for each file in the dirstate. The C version then immediately marks
498 # them as not to be tracked by the collector. However, this has no
483 # them as not to be tracked by the collector. However, this has no
499 # effect on when GCs are triggered, only on what objects the GC looks
484 # effect on when GCs are triggered, only on what objects the GC looks
500 # into. This means that O(number of files) GCs are unavoidable.
485 # into. This means that O(number of files) GCs are unavoidable.
501 # Depending on when in the process's lifetime the dirstate is parsed,
486 # Depending on when in the process's lifetime the dirstate is parsed,
502 # this can get very expensive. As a workaround, disable GC while
487 # this can get very expensive. As a workaround, disable GC while
503 # parsing the dirstate.
488 # parsing the dirstate.
504 #
489 #
505 # (we cannot decorate the function directly since it is in a C module)
490 # (we cannot decorate the function directly since it is in a C module)
506 parse_dirstate = util.nogc(parsers.parse_dirstate)
491 parse_dirstate = util.nogc(parsers.parse_dirstate)
507 p = parse_dirstate(self._map, self.copymap, st)
492 p = parse_dirstate(self._map, self.copymap, st)
508 if not self._dirtyparents:
493 if not self._dirtyparents:
509 self.setparents(*p)
494 self.setparents(*p)
510
495
511 # Avoid excess attribute lookups by fast pathing certain checks
496 # Avoid excess attribute lookups by fast pathing certain checks
512 self.__contains__ = self._map.__contains__
497 self.__contains__ = self._map.__contains__
513 self.__getitem__ = self._map.__getitem__
498 self.__getitem__ = self._map.__getitem__
514 self.get = self._map.get
499 self.get = self._map.get
515
500
516 def write(self, _tr, st, now):
501 def write(self, _tr, st, now):
517 st.write(
502 st.write(
518 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
503 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
519 )
504 )
520 st.close()
505 st.close()
521 self._dirtyparents = False
506 self._dirtyparents = False
522 self.nonnormalset, self.otherparentset = self.nonnormalentries()
507 self.nonnormalset, self.otherparentset = self.nonnormalentries()
523
508
524 @propertycache
509 @propertycache
525 def nonnormalset(self):
510 def nonnormalset(self):
526 nonnorm, otherparents = self.nonnormalentries()
511 nonnorm, otherparents = self.nonnormalentries()
527 self.otherparentset = otherparents
512 self.otherparentset = otherparents
528 return nonnorm
513 return nonnorm
529
514
530 @propertycache
515 @propertycache
531 def otherparentset(self):
516 def otherparentset(self):
532 nonnorm, otherparents = self.nonnormalentries()
517 nonnorm, otherparents = self.nonnormalentries()
533 self.nonnormalset = nonnorm
518 self.nonnormalset = nonnorm
534 return otherparents
519 return otherparents
535
520
536 def non_normal_or_other_parent_paths(self):
521 def non_normal_or_other_parent_paths(self):
537 return self.nonnormalset.union(self.otherparentset)
522 return self.nonnormalset.union(self.otherparentset)
538
523
539 @propertycache
524 @propertycache
540 def identity(self):
525 def identity(self):
541 self._map
526 self._map
542 return self.identity
527 return self.identity
543
528
544 @propertycache
529 @propertycache
545 def dirfoldmap(self):
530 def dirfoldmap(self):
546 f = {}
531 f = {}
547 normcase = util.normcase
532 normcase = util.normcase
548 for name in self._dirs:
533 for name in self._dirs:
549 f[normcase(name)] = name
534 f[normcase(name)] = name
550 return f
535 return f
551
536
552
537
553 if rustmod is not None:
538 if rustmod is not None:
554
539
555 class dirstatemap(object):
540 class dirstatemap(object):
556 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
541 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
557 self._use_dirstate_v2 = use_dirstate_v2
542 self._use_dirstate_v2 = use_dirstate_v2
558 self._nodeconstants = nodeconstants
543 self._nodeconstants = nodeconstants
559 self._ui = ui
544 self._ui = ui
560 self._opener = opener
545 self._opener = opener
561 self._root = root
546 self._root = root
562 self._filename = b'dirstate'
547 self._filename = b'dirstate'
563 self._nodelen = 20 # Also update Rust code when changing this!
548 self._nodelen = 20 # Also update Rust code when changing this!
564 self._parents = None
549 self._parents = None
565 self._dirtyparents = False
550 self._dirtyparents = False
566 self._docket = None
551 self._docket = None
567
552
568 # for consistent view between _pl() and _read() invocations
553 # for consistent view between _pl() and _read() invocations
569 self._pendingmode = None
554 self._pendingmode = None
570
555
571 self._use_dirstate_tree = self._ui.configbool(
556 self._use_dirstate_tree = self._ui.configbool(
572 b"experimental",
557 b"experimental",
573 b"dirstate-tree.in-memory",
558 b"dirstate-tree.in-memory",
574 False,
559 False,
575 )
560 )
576
561
577 def addfile(
562 def addfile(
578 self,
563 self,
579 f,
564 f,
580 mode=0,
565 mode=0,
581 size=None,
566 size=None,
582 mtime=None,
567 mtime=None,
583 added=False,
568 added=False,
584 merged=False,
569 merged=False,
585 from_p2=False,
570 from_p2=False,
586 possibly_dirty=False,
571 possibly_dirty=False,
587 ):
572 ):
588 if added:
573 if added:
589 assert not possibly_dirty
574 assert not possibly_dirty
590 assert not from_p2
575 assert not from_p2
591 item = DirstateItem.new_added()
576 item = DirstateItem.new_added()
592 elif merged:
577 elif merged:
593 assert not possibly_dirty
578 assert not possibly_dirty
594 assert not from_p2
579 assert not from_p2
595 item = DirstateItem.new_merged()
580 item = DirstateItem.new_merged()
596 elif from_p2:
581 elif from_p2:
597 assert not possibly_dirty
582 assert not possibly_dirty
598 item = DirstateItem.new_from_p2()
583 item = DirstateItem.new_from_p2()
599 elif possibly_dirty:
584 elif possibly_dirty:
600 item = DirstateItem.new_possibly_dirty()
585 item = DirstateItem.new_possibly_dirty()
601 else:
586 else:
602 assert size is not None
587 assert size is not None
603 assert mtime is not None
588 assert mtime is not None
604 size = size & rangemask
589 size = size & rangemask
605 mtime = mtime & rangemask
590 mtime = mtime & rangemask
606 item = DirstateItem.new_normal(mode, size, mtime)
591 item = DirstateItem.new_normal(mode, size, mtime)
607 self._rustmap.addfile(f, item)
592 self._rustmap.addfile(f, item)
608 if added:
593 if added:
609 self.copymap.pop(f, None)
594 self.copymap.pop(f, None)
610
595
611 def reset_state(
596 def reset_state(
612 self,
597 self,
613 filename,
598 filename,
614 wc_tracked=False,
599 wc_tracked=False,
615 p1_tracked=False,
600 p1_tracked=False,
616 p2_tracked=False,
601 p2_tracked=False,
617 merged=False,
602 merged=False,
618 clean_p1=False,
603 clean_p1=False,
619 clean_p2=False,
604 clean_p2=False,
620 possibly_dirty=False,
605 possibly_dirty=False,
621 parentfiledata=None,
606 parentfiledata=None,
622 ):
607 ):
623 """Set a entry to a given state, disregarding all previous state
608 """Set a entry to a given state, disregarding all previous state
624
609
625 This is to be used by the part of the dirstate API dedicated to
610 This is to be used by the part of the dirstate API dedicated to
626 adjusting the dirstate after a update/merge.
611 adjusting the dirstate after a update/merge.
627
612
628 note: calling this might result to no entry existing at all if the
613 note: calling this might result to no entry existing at all if the
629 dirstate map does not see any point at having one for this file
614 dirstate map does not see any point at having one for this file
630 anymore.
615 anymore.
631 """
616 """
632 if merged and (clean_p1 or clean_p2):
617 if merged and (clean_p1 or clean_p2):
633 msg = (
618 msg = (
634 b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
619 b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
635 )
620 )
636 raise error.ProgrammingError(msg)
621 raise error.ProgrammingError(msg)
637 # copy information are now outdated
622 # copy information are now outdated
638 # (maybe new information should be in directly passed to this function)
623 # (maybe new information should be in directly passed to this function)
639 self.copymap.pop(filename, None)
624 self.copymap.pop(filename, None)
640
625
641 if not (p1_tracked or p2_tracked or wc_tracked):
626 if not (p1_tracked or p2_tracked or wc_tracked):
642 self._rustmap.drop_item_and_copy_source(filename)
627 self._rustmap.drop_item_and_copy_source(filename)
643 elif merged:
628 elif merged:
644 # XXX might be merged and removed ?
629 # XXX might be merged and removed ?
645 entry = self.get(filename)
630 entry = self.get(filename)
646 if entry is not None and entry.tracked:
631 if entry is not None and entry.tracked:
647 # XXX mostly replicate dirstate.other parent. We should get
632 # XXX mostly replicate dirstate.other parent. We should get
648 # the higher layer to pass us more reliable data where `merged`
633 # the higher layer to pass us more reliable data where `merged`
649 # actually mean merged. Dropping the else clause will show
634 # actually mean merged. Dropping the else clause will show
650 # failure in `test-graft.t`
635 # failure in `test-graft.t`
651 self.addfile(filename, merged=True)
636 self.addfile(filename, merged=True)
652 else:
637 else:
653 self.addfile(filename, from_p2=True)
638 self.addfile(filename, from_p2=True)
654 elif not (p1_tracked or p2_tracked) and wc_tracked:
639 elif not (p1_tracked or p2_tracked) and wc_tracked:
655 self.addfile(
640 self.addfile(
656 filename, added=True, possibly_dirty=possibly_dirty
641 filename, added=True, possibly_dirty=possibly_dirty
657 )
642 )
658 elif (p1_tracked or p2_tracked) and not wc_tracked:
643 elif (p1_tracked or p2_tracked) and not wc_tracked:
659 # XXX might be merged and removed ?
644 # XXX might be merged and removed ?
660 self[filename] = DirstateItem.from_v1_data(b'r', 0, 0, 0)
645 self[filename] = DirstateItem.from_v1_data(b'r', 0, 0, 0)
661 self.nonnormalset.add(filename)
646 self.nonnormalset.add(filename)
662 elif clean_p2 and wc_tracked:
647 elif clean_p2 and wc_tracked:
663 if p1_tracked or self.get(filename) is not None:
648 if p1_tracked or self.get(filename) is not None:
664 # XXX the `self.get` call is catching some case in
649 # XXX the `self.get` call is catching some case in
665 # `test-merge-remove.t` where the file is tracked in p1, the
650 # `test-merge-remove.t` where the file is tracked in p1, the
666 # p1_tracked argument is False.
651 # p1_tracked argument is False.
667 #
652 #
668 # In addition, this seems to be a case where the file is marked
653 # In addition, this seems to be a case where the file is marked
669 # as merged without actually being the result of a merge
654 # as merged without actually being the result of a merge
670 # action. So thing are not ideal here.
655 # action. So thing are not ideal here.
671 self.addfile(filename, merged=True)
656 self.addfile(filename, merged=True)
672 else:
657 else:
673 self.addfile(filename, from_p2=True)
658 self.addfile(filename, from_p2=True)
674 elif not p1_tracked and p2_tracked and wc_tracked:
659 elif not p1_tracked and p2_tracked and wc_tracked:
675 self.addfile(
660 self.addfile(
676 filename, from_p2=True, possibly_dirty=possibly_dirty
661 filename, from_p2=True, possibly_dirty=possibly_dirty
677 )
662 )
678 elif possibly_dirty:
663 elif possibly_dirty:
679 self.addfile(filename, possibly_dirty=possibly_dirty)
664 self.addfile(filename, possibly_dirty=possibly_dirty)
680 elif wc_tracked:
665 elif wc_tracked:
681 # this is a "normal" file
666 # this is a "normal" file
682 if parentfiledata is None:
667 if parentfiledata is None:
683 msg = b'failed to pass parentfiledata for a normal file: %s'
668 msg = b'failed to pass parentfiledata for a normal file: %s'
684 msg %= filename
669 msg %= filename
685 raise error.ProgrammingError(msg)
670 raise error.ProgrammingError(msg)
686 mode, size, mtime = parentfiledata
671 mode, size, mtime = parentfiledata
687 self.addfile(filename, mode=mode, size=size, mtime=mtime)
672 self.addfile(filename, mode=mode, size=size, mtime=mtime)
688 self.nonnormalset.discard(filename)
673 self.nonnormalset.discard(filename)
689 else:
674 else:
690 assert False, 'unreachable'
675 assert False, 'unreachable'
691
676
692 def set_tracked(self, filename):
677 def set_tracked(self, filename):
693 new = False
678 new = False
694 entry = self.get(filename)
679 entry = self.get(filename)
695 if entry is None:
680 if entry is None:
696 self.addfile(filename, added=True)
681 self.addfile(filename, added=True)
697 new = True
682 new = True
698 elif not entry.tracked:
683 elif not entry.tracked:
699 entry.set_tracked()
684 entry.set_tracked()
700 self._rustmap.set_dirstate_item(filename, entry)
685 self._rustmap.set_dirstate_item(filename, entry)
701 new = True
686 new = True
702 else:
687 else:
703 # XXX This is probably overkill for more case, but we need this to
688 # XXX This is probably overkill for more case, but we need this to
704 # fully replace the `normallookup` call with `set_tracked` one.
689 # fully replace the `normallookup` call with `set_tracked` one.
705 # Consider smoothing this in the future.
690 # Consider smoothing this in the future.
706 self.set_possibly_dirty(filename)
691 self.set_possibly_dirty(filename)
707 return new
692 return new
708
693
709 def set_untracked(self, f):
694 def set_untracked(self, f):
710 """Mark a file as no longer tracked in the dirstate map"""
695 """Mark a file as no longer tracked in the dirstate map"""
711 # in merge is only trigger more logic, so it "fine" to pass it.
696 # in merge is only trigger more logic, so it "fine" to pass it.
712 #
697 #
713 # the inner rust dirstate map code need to be adjusted once the API
698 # the inner rust dirstate map code need to be adjusted once the API
714 # for dirstate/dirstatemap/DirstateItem is a bit more settled
699 # for dirstate/dirstatemap/DirstateItem is a bit more settled
715 entry = self.get(f)
700 entry = self.get(f)
716 if entry is None:
701 if entry is None:
717 return False
702 return False
718 else:
703 else:
719 if entry.added:
704 if entry.added:
720 self._rustmap.drop_item_and_copy_source(f)
705 self._rustmap.drop_item_and_copy_source(f)
721 else:
706 else:
722 self._rustmap.removefile(f, in_merge=True)
707 self._rustmap.removefile(f, in_merge=True)
723 return True
708 return True
724
709
725 def removefile(self, *args, **kwargs):
710 def removefile(self, *args, **kwargs):
726 return self._rustmap.removefile(*args, **kwargs)
711 return self._rustmap.removefile(*args, **kwargs)
727
712
728 def nonnormalentries(self):
713 def nonnormalentries(self):
729 return self._rustmap.nonnormalentries()
714 return self._rustmap.nonnormalentries()
730
715
731 def get(self, *args, **kwargs):
716 def get(self, *args, **kwargs):
732 return self._rustmap.get(*args, **kwargs)
717 return self._rustmap.get(*args, **kwargs)
733
718
734 @property
719 @property
735 def copymap(self):
720 def copymap(self):
736 return self._rustmap.copymap()
721 return self._rustmap.copymap()
737
722
738 def debug_iter(self, all):
723 def debug_iter(self, all):
739 """
724 """
740 Return an iterator of (filename, state, mode, size, mtime) tuples
725 Return an iterator of (filename, state, mode, size, mtime) tuples
741
726
742 `all`: also include with `state == b' '` dirstate tree nodes that
727 `all`: also include with `state == b' '` dirstate tree nodes that
743 don't have an associated `DirstateItem`.
728 don't have an associated `DirstateItem`.
744
729
745 """
730 """
746 return self._rustmap.debug_iter(all)
731 return self._rustmap.debug_iter(all)
747
732
748 def preload(self):
733 def preload(self):
749 self._rustmap
734 self._rustmap
750
735
751 def clear(self):
736 def clear(self):
752 self._rustmap.clear()
737 self._rustmap.clear()
753 self.setparents(
738 self.setparents(
754 self._nodeconstants.nullid, self._nodeconstants.nullid
739 self._nodeconstants.nullid, self._nodeconstants.nullid
755 )
740 )
756 util.clearcachedproperty(self, b"_dirs")
741 util.clearcachedproperty(self, b"_dirs")
757 util.clearcachedproperty(self, b"_alldirs")
742 util.clearcachedproperty(self, b"_alldirs")
758 util.clearcachedproperty(self, b"dirfoldmap")
743 util.clearcachedproperty(self, b"dirfoldmap")
759
744
760 def items(self):
745 def items(self):
761 return self._rustmap.items()
746 return self._rustmap.items()
762
747
763 def keys(self):
748 def keys(self):
764 return iter(self._rustmap)
749 return iter(self._rustmap)
765
750
766 def __contains__(self, key):
751 def __contains__(self, key):
767 return key in self._rustmap
752 return key in self._rustmap
768
753
769 def __getitem__(self, item):
754 def __getitem__(self, item):
770 return self._rustmap[item]
755 return self._rustmap[item]
771
756
772 def __len__(self):
757 def __len__(self):
773 return len(self._rustmap)
758 return len(self._rustmap)
774
759
775 def __iter__(self):
760 def __iter__(self):
776 return iter(self._rustmap)
761 return iter(self._rustmap)
777
762
778 # forward for python2,3 compat
763 # forward for python2,3 compat
779 iteritems = items
764 iteritems = items
780
765
781 def _opendirstatefile(self):
766 def _opendirstatefile(self):
782 fp, mode = txnutil.trypending(
767 fp, mode = txnutil.trypending(
783 self._root, self._opener, self._filename
768 self._root, self._opener, self._filename
784 )
769 )
785 if self._pendingmode is not None and self._pendingmode != mode:
770 if self._pendingmode is not None and self._pendingmode != mode:
786 fp.close()
771 fp.close()
787 raise error.Abort(
772 raise error.Abort(
788 _(b'working directory state may be changed parallelly')
773 _(b'working directory state may be changed parallelly')
789 )
774 )
790 self._pendingmode = mode
775 self._pendingmode = mode
791 return fp
776 return fp
792
777
793 def _readdirstatefile(self, size=-1):
778 def _readdirstatefile(self, size=-1):
794 try:
779 try:
795 with self._opendirstatefile() as fp:
780 with self._opendirstatefile() as fp:
796 return fp.read(size)
781 return fp.read(size)
797 except IOError as err:
782 except IOError as err:
798 if err.errno != errno.ENOENT:
783 if err.errno != errno.ENOENT:
799 raise
784 raise
800 # File doesn't exist, so the current state is empty
785 # File doesn't exist, so the current state is empty
801 return b''
786 return b''
802
787
803 def setparents(self, p1, p2, fold_p2=False):
788 def setparents(self, p1, p2, fold_p2=False):
804 self._parents = (p1, p2)
789 self._parents = (p1, p2)
805 self._dirtyparents = True
790 self._dirtyparents = True
806 copies = {}
791 copies = {}
807 if fold_p2:
792 if fold_p2:
808 candidatefiles = self.non_normal_or_other_parent_paths()
793 candidatefiles = self.non_normal_or_other_parent_paths()
809
794
810 for f in candidatefiles:
795 for f in candidatefiles:
811 s = self.get(f)
796 s = self.get(f)
812 if s is None:
797 if s is None:
813 continue
798 continue
814
799
815 # Discard "merged" markers when moving away from a merge state
800 # Discard "merged" markers when moving away from a merge state
816 if s.merged:
801 if s.merged:
817 source = self.copymap.get(f)
802 source = self.copymap.get(f)
818 if source:
803 if source:
819 copies[f] = source
804 copies[f] = source
820 self.reset_state(
805 self.reset_state(
821 f,
806 f,
822 wc_tracked=True,
807 wc_tracked=True,
823 p1_tracked=True,
808 p1_tracked=True,
824 possibly_dirty=True,
809 possibly_dirty=True,
825 )
810 )
826 # Also fix up otherparent markers
811 # Also fix up otherparent markers
827 elif s.from_p2:
812 elif s.from_p2:
828 source = self.copymap.get(f)
813 source = self.copymap.get(f)
829 if source:
814 if source:
830 copies[f] = source
815 copies[f] = source
831 self.reset_state(
816 self.reset_state(
832 f,
817 f,
833 p1_tracked=False,
818 p1_tracked=False,
834 wc_tracked=True,
819 wc_tracked=True,
835 )
820 )
836 return copies
821 return copies
837
822
838 def parents(self):
823 def parents(self):
839 if not self._parents:
824 if not self._parents:
840 if self._use_dirstate_v2:
825 if self._use_dirstate_v2:
841 self._parents = self.docket.parents
826 self._parents = self.docket.parents
842 else:
827 else:
843 read_len = self._nodelen * 2
828 read_len = self._nodelen * 2
844 st = self._readdirstatefile(read_len)
829 st = self._readdirstatefile(read_len)
845 l = len(st)
830 l = len(st)
846 if l == read_len:
831 if l == read_len:
847 self._parents = (
832 self._parents = (
848 st[: self._nodelen],
833 st[: self._nodelen],
849 st[self._nodelen : 2 * self._nodelen],
834 st[self._nodelen : 2 * self._nodelen],
850 )
835 )
851 elif l == 0:
836 elif l == 0:
852 self._parents = (
837 self._parents = (
853 self._nodeconstants.nullid,
838 self._nodeconstants.nullid,
854 self._nodeconstants.nullid,
839 self._nodeconstants.nullid,
855 )
840 )
856 else:
841 else:
857 raise error.Abort(
842 raise error.Abort(
858 _(b'working directory state appears damaged!')
843 _(b'working directory state appears damaged!')
859 )
844 )
860
845
861 return self._parents
846 return self._parents
862
847
863 @property
848 @property
864 def docket(self):
849 def docket(self):
865 if not self._docket:
850 if not self._docket:
866 if not self._use_dirstate_v2:
851 if not self._use_dirstate_v2:
867 raise error.ProgrammingError(
852 raise error.ProgrammingError(
868 b'dirstate only has a docket in v2 format'
853 b'dirstate only has a docket in v2 format'
869 )
854 )
870 self._docket = docketmod.DirstateDocket.parse(
855 self._docket = docketmod.DirstateDocket.parse(
871 self._readdirstatefile(), self._nodeconstants
856 self._readdirstatefile(), self._nodeconstants
872 )
857 )
873 return self._docket
858 return self._docket
874
859
875 @propertycache
860 @propertycache
876 def _rustmap(self):
861 def _rustmap(self):
877 """
862 """
878 Fills the Dirstatemap when called.
863 Fills the Dirstatemap when called.
879 """
864 """
880 # ignore HG_PENDING because identity is used only for writing
865 # ignore HG_PENDING because identity is used only for writing
881 self.identity = util.filestat.frompath(
866 self.identity = util.filestat.frompath(
882 self._opener.join(self._filename)
867 self._opener.join(self._filename)
883 )
868 )
884
869
885 if self._use_dirstate_v2:
870 if self._use_dirstate_v2:
886 if self.docket.uuid:
871 if self.docket.uuid:
887 # TODO: use mmap when possible
872 # TODO: use mmap when possible
888 data = self._opener.read(self.docket.data_filename())
873 data = self._opener.read(self.docket.data_filename())
889 else:
874 else:
890 data = b''
875 data = b''
891 self._rustmap = rustmod.DirstateMap.new_v2(
876 self._rustmap = rustmod.DirstateMap.new_v2(
892 data, self.docket.data_size, self.docket.tree_metadata
877 data, self.docket.data_size, self.docket.tree_metadata
893 )
878 )
894 parents = self.docket.parents
879 parents = self.docket.parents
895 else:
880 else:
896 self._rustmap, parents = rustmod.DirstateMap.new_v1(
881 self._rustmap, parents = rustmod.DirstateMap.new_v1(
897 self._use_dirstate_tree, self._readdirstatefile()
882 self._use_dirstate_tree, self._readdirstatefile()
898 )
883 )
899
884
900 if parents and not self._dirtyparents:
885 if parents and not self._dirtyparents:
901 self.setparents(*parents)
886 self.setparents(*parents)
902
887
903 self.__contains__ = self._rustmap.__contains__
888 self.__contains__ = self._rustmap.__contains__
904 self.__getitem__ = self._rustmap.__getitem__
889 self.__getitem__ = self._rustmap.__getitem__
905 self.get = self._rustmap.get
890 self.get = self._rustmap.get
906 return self._rustmap
891 return self._rustmap
907
892
908 def write(self, tr, st, now):
893 def write(self, tr, st, now):
909 if not self._use_dirstate_v2:
894 if not self._use_dirstate_v2:
910 p1, p2 = self.parents()
895 p1, p2 = self.parents()
911 packed = self._rustmap.write_v1(p1, p2, now)
896 packed = self._rustmap.write_v1(p1, p2, now)
912 st.write(packed)
897 st.write(packed)
913 st.close()
898 st.close()
914 self._dirtyparents = False
899 self._dirtyparents = False
915 return
900 return
916
901
917 # We can only append to an existing data file if there is one
902 # We can only append to an existing data file if there is one
918 can_append = self.docket.uuid is not None
903 can_append = self.docket.uuid is not None
919 packed, meta, append = self._rustmap.write_v2(now, can_append)
904 packed, meta, append = self._rustmap.write_v2(now, can_append)
920 if append:
905 if append:
921 docket = self.docket
906 docket = self.docket
922 data_filename = docket.data_filename()
907 data_filename = docket.data_filename()
923 if tr:
908 if tr:
924 tr.add(data_filename, docket.data_size)
909 tr.add(data_filename, docket.data_size)
925 with self._opener(data_filename, b'r+b') as fp:
910 with self._opener(data_filename, b'r+b') as fp:
926 fp.seek(docket.data_size)
911 fp.seek(docket.data_size)
927 assert fp.tell() == docket.data_size
912 assert fp.tell() == docket.data_size
928 written = fp.write(packed)
913 written = fp.write(packed)
929 if written is not None: # py2 may return None
914 if written is not None: # py2 may return None
930 assert written == len(packed), (written, len(packed))
915 assert written == len(packed), (written, len(packed))
931 docket.data_size += len(packed)
916 docket.data_size += len(packed)
932 docket.parents = self.parents()
917 docket.parents = self.parents()
933 docket.tree_metadata = meta
918 docket.tree_metadata = meta
934 st.write(docket.serialize())
919 st.write(docket.serialize())
935 st.close()
920 st.close()
936 else:
921 else:
937 old_docket = self.docket
922 old_docket = self.docket
938 new_docket = docketmod.DirstateDocket.with_new_uuid(
923 new_docket = docketmod.DirstateDocket.with_new_uuid(
939 self.parents(), len(packed), meta
924 self.parents(), len(packed), meta
940 )
925 )
941 data_filename = new_docket.data_filename()
926 data_filename = new_docket.data_filename()
942 if tr:
927 if tr:
943 tr.add(data_filename, 0)
928 tr.add(data_filename, 0)
944 self._opener.write(data_filename, packed)
929 self._opener.write(data_filename, packed)
945 # Write the new docket after the new data file has been
930 # Write the new docket after the new data file has been
946 # written. Because `st` was opened with `atomictemp=True`,
931 # written. Because `st` was opened with `atomictemp=True`,
947 # the actual `.hg/dirstate` file is only affected on close.
932 # the actual `.hg/dirstate` file is only affected on close.
948 st.write(new_docket.serialize())
933 st.write(new_docket.serialize())
949 st.close()
934 st.close()
950 # Remove the old data file after the new docket pointing to
935 # Remove the old data file after the new docket pointing to
951 # the new data file was written.
936 # the new data file was written.
952 if old_docket.uuid:
937 if old_docket.uuid:
953 data_filename = old_docket.data_filename()
938 data_filename = old_docket.data_filename()
954 unlink = lambda _tr=None: self._opener.unlink(data_filename)
939 unlink = lambda _tr=None: self._opener.unlink(data_filename)
955 if tr:
940 if tr:
956 category = b"dirstate-v2-clean-" + old_docket.uuid
941 category = b"dirstate-v2-clean-" + old_docket.uuid
957 tr.addpostclose(category, unlink)
942 tr.addpostclose(category, unlink)
958 else:
943 else:
959 unlink()
944 unlink()
960 self._docket = new_docket
945 self._docket = new_docket
961 # Reload from the newly-written file
946 # Reload from the newly-written file
962 util.clearcachedproperty(self, b"_rustmap")
947 util.clearcachedproperty(self, b"_rustmap")
963 self._dirtyparents = False
948 self._dirtyparents = False
964
949
965 @propertycache
950 @propertycache
966 def filefoldmap(self):
951 def filefoldmap(self):
967 """Returns a dictionary mapping normalized case paths to their
952 """Returns a dictionary mapping normalized case paths to their
968 non-normalized versions.
953 non-normalized versions.
969 """
954 """
970 return self._rustmap.filefoldmapasdict()
955 return self._rustmap.filefoldmapasdict()
971
956
972 def hastrackeddir(self, d):
957 def hastrackeddir(self, d):
973 return self._rustmap.hastrackeddir(d)
958 return self._rustmap.hastrackeddir(d)
974
959
975 def hasdir(self, d):
960 def hasdir(self, d):
976 return self._rustmap.hasdir(d)
961 return self._rustmap.hasdir(d)
977
962
978 @propertycache
963 @propertycache
979 def identity(self):
964 def identity(self):
980 self._rustmap
965 self._rustmap
981 return self.identity
966 return self.identity
982
967
983 @property
968 @property
984 def nonnormalset(self):
969 def nonnormalset(self):
985 nonnorm = self._rustmap.non_normal_entries()
970 nonnorm = self._rustmap.non_normal_entries()
986 return nonnorm
971 return nonnorm
987
972
988 @propertycache
973 @propertycache
989 def otherparentset(self):
974 def otherparentset(self):
990 otherparents = self._rustmap.other_parent_entries()
975 otherparents = self._rustmap.other_parent_entries()
991 return otherparents
976 return otherparents
992
977
993 def non_normal_or_other_parent_paths(self):
978 def non_normal_or_other_parent_paths(self):
994 return self._rustmap.non_normal_or_other_parent_paths()
979 return self._rustmap.non_normal_or_other_parent_paths()
995
980
996 @propertycache
981 @propertycache
997 def dirfoldmap(self):
982 def dirfoldmap(self):
998 f = {}
983 f = {}
999 normcase = util.normcase
984 normcase = util.normcase
1000 for name in self._rustmap.tracked_dirs():
985 for name in self._rustmap.tracked_dirs():
1001 f[normcase(name)] = name
986 f[normcase(name)] = name
1002 return f
987 return f
1003
988
1004 def set_possibly_dirty(self, filename):
989 def set_possibly_dirty(self, filename):
1005 """record that the current state of the file on disk is unknown"""
990 """record that the current state of the file on disk is unknown"""
1006 entry = self[filename]
991 entry = self[filename]
1007 entry.set_possibly_dirty()
992 entry.set_possibly_dirty()
1008 self._rustmap.set_dirstate_item(filename, entry)
993 self._rustmap.set_dirstate_item(filename, entry)
1009
994
1010 def set_clean(self, filename, mode, size, mtime):
995 def set_clean(self, filename, mode, size, mtime):
1011 """mark a file as back to a clean state"""
996 """mark a file as back to a clean state"""
1012 entry = self[filename]
997 entry = self[filename]
1013 mtime = mtime & rangemask
998 mtime = mtime & rangemask
1014 size = size & rangemask
999 size = size & rangemask
1015 entry.set_clean(mode, size, mtime)
1000 entry.set_clean(mode, size, mtime)
1016 self._rustmap.set_dirstate_item(filename, entry)
1001 self._rustmap.set_dirstate_item(filename, entry)
1017 self._rustmap.copymap().pop(filename, None)
1002 self._rustmap.copymap().pop(filename, None)
1018
1003
1019 def __setitem__(self, key, value):
1004 def __setitem__(self, key, value):
1020 assert isinstance(value, DirstateItem)
1005 assert isinstance(value, DirstateItem)
1021 self._rustmap.set_dirstate_item(key, value)
1006 self._rustmap.set_dirstate_item(key, value)
@@ -1,822 +1,840 b''
1 # parsers.py - Python implementation of parsers.c
1 # parsers.py - Python implementation of parsers.c
2 #
2 #
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import struct
10 import struct
11 import zlib
11 import zlib
12
12
13 from ..node import (
13 from ..node import (
14 nullrev,
14 nullrev,
15 sha1nodeconstants,
15 sha1nodeconstants,
16 )
16 )
17 from ..thirdparty import attr
17 from ..thirdparty import attr
18 from .. import (
18 from .. import (
19 error,
19 error,
20 pycompat,
20 pycompat,
21 revlogutils,
21 revlogutils,
22 util,
22 util,
23 )
23 )
24
24
25 from ..revlogutils import nodemap as nodemaputil
25 from ..revlogutils import nodemap as nodemaputil
26 from ..revlogutils import constants as revlog_constants
26 from ..revlogutils import constants as revlog_constants
27
27
28 stringio = pycompat.bytesio
28 stringio = pycompat.bytesio
29
29
30
30
31 _pack = struct.pack
31 _pack = struct.pack
32 _unpack = struct.unpack
32 _unpack = struct.unpack
33 _compress = zlib.compress
33 _compress = zlib.compress
34 _decompress = zlib.decompress
34 _decompress = zlib.decompress
35
35
36
36
37 # a special value used internally for `size` if the file come from the other parent
37 # a special value used internally for `size` if the file come from the other parent
38 FROM_P2 = -2
38 FROM_P2 = -2
39
39
40 # a special value used internally for `size` if the file is modified/merged/added
40 # a special value used internally for `size` if the file is modified/merged/added
41 NONNORMAL = -1
41 NONNORMAL = -1
42
42
43 # a special value used internally for `time` if the time is ambigeous
43 # a special value used internally for `time` if the time is ambigeous
44 AMBIGUOUS_TIME = -1
44 AMBIGUOUS_TIME = -1
45
45
46
46
47 @attr.s(slots=True, init=False)
47 @attr.s(slots=True, init=False)
48 class DirstateItem(object):
48 class DirstateItem(object):
49 """represent a dirstate entry
49 """represent a dirstate entry
50
50
51 It contains:
51 It contains:
52
52
53 - state (one of 'n', 'a', 'r', 'm')
53 - state (one of 'n', 'a', 'r', 'm')
54 - mode,
54 - mode,
55 - size,
55 - size,
56 - mtime,
56 - mtime,
57 """
57 """
58
58
59 _wc_tracked = attr.ib()
59 _wc_tracked = attr.ib()
60 _p1_tracked = attr.ib()
60 _p1_tracked = attr.ib()
61 _p2_tracked = attr.ib()
61 _p2_tracked = attr.ib()
62 # the three item above should probably be combined
62 # the three item above should probably be combined
63 #
63 #
64 # However it is unclear if they properly cover some of the most advanced
64 # However it is unclear if they properly cover some of the most advanced
65 # merge case. So we should probably wait on this to be settled.
65 # merge case. So we should probably wait on this to be settled.
66 _merged = attr.ib()
66 _merged = attr.ib()
67 _clean_p1 = attr.ib()
67 _clean_p1 = attr.ib()
68 _clean_p2 = attr.ib()
68 _clean_p2 = attr.ib()
69 _possibly_dirty = attr.ib()
69 _possibly_dirty = attr.ib()
70 _mode = attr.ib()
70 _mode = attr.ib()
71 _size = attr.ib()
71 _size = attr.ib()
72 _mtime = attr.ib()
72 _mtime = attr.ib()
73
73
74 def __init__(
74 def __init__(
75 self,
75 self,
76 wc_tracked=False,
76 wc_tracked=False,
77 p1_tracked=False,
77 p1_tracked=False,
78 p2_tracked=False,
78 p2_tracked=False,
79 merged=False,
79 merged=False,
80 clean_p1=False,
80 clean_p1=False,
81 clean_p2=False,
81 clean_p2=False,
82 possibly_dirty=False,
82 possibly_dirty=False,
83 parentfiledata=None,
83 parentfiledata=None,
84 ):
84 ):
85 if merged and (clean_p1 or clean_p2):
85 if merged and (clean_p1 or clean_p2):
86 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
86 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
87 raise error.ProgrammingError(msg)
87 raise error.ProgrammingError(msg)
88
88
89 self._wc_tracked = wc_tracked
89 self._wc_tracked = wc_tracked
90 self._p1_tracked = p1_tracked
90 self._p1_tracked = p1_tracked
91 self._p2_tracked = p2_tracked
91 self._p2_tracked = p2_tracked
92 self._merged = merged
92 self._merged = merged
93 self._clean_p1 = clean_p1
93 self._clean_p1 = clean_p1
94 self._clean_p2 = clean_p2
94 self._clean_p2 = clean_p2
95 self._possibly_dirty = possibly_dirty
95 self._possibly_dirty = possibly_dirty
96 if parentfiledata is None:
96 if parentfiledata is None:
97 self._mode = None
97 self._mode = None
98 self._size = None
98 self._size = None
99 self._mtime = None
99 self._mtime = None
100 else:
100 else:
101 self._mode = parentfiledata[0]
101 self._mode = parentfiledata[0]
102 self._size = parentfiledata[1]
102 self._size = parentfiledata[1]
103 self._mtime = parentfiledata[2]
103 self._mtime = parentfiledata[2]
104
104
105 @classmethod
105 @classmethod
106 def new_added(cls):
106 def new_added(cls):
107 """constructor to help legacy API to build a new "added" item
107 """constructor to help legacy API to build a new "added" item
108
108
109 Should eventually be removed
109 Should eventually be removed
110 """
110 """
111 instance = cls()
111 instance = cls()
112 instance._wc_tracked = True
112 instance._wc_tracked = True
113 instance._p1_tracked = False
113 instance._p1_tracked = False
114 instance._p2_tracked = False
114 instance._p2_tracked = False
115 return instance
115 return instance
116
116
117 @classmethod
117 @classmethod
118 def new_merged(cls):
118 def new_merged(cls):
119 """constructor to help legacy API to build a new "merged" item
119 """constructor to help legacy API to build a new "merged" item
120
120
121 Should eventually be removed
121 Should eventually be removed
122 """
122 """
123 instance = cls()
123 instance = cls()
124 instance._wc_tracked = True
124 instance._wc_tracked = True
125 instance._p1_tracked = True # might not be True because of rename ?
125 instance._p1_tracked = True # might not be True because of rename ?
126 instance._p2_tracked = True # might not be True because of rename ?
126 instance._p2_tracked = True # might not be True because of rename ?
127 instance._merged = True
127 instance._merged = True
128 return instance
128 return instance
129
129
130 @classmethod
130 @classmethod
131 def new_from_p2(cls):
131 def new_from_p2(cls):
132 """constructor to help legacy API to build a new "from_p2" item
132 """constructor to help legacy API to build a new "from_p2" item
133
133
134 Should eventually be removed
134 Should eventually be removed
135 """
135 """
136 instance = cls()
136 instance = cls()
137 instance._wc_tracked = True
137 instance._wc_tracked = True
138 instance._p1_tracked = False # might actually be True
138 instance._p1_tracked = False # might actually be True
139 instance._p2_tracked = True
139 instance._p2_tracked = True
140 instance._clean_p2 = True
140 instance._clean_p2 = True
141 return instance
141 return instance
142
142
143 @classmethod
143 @classmethod
144 def new_possibly_dirty(cls):
144 def new_possibly_dirty(cls):
145 """constructor to help legacy API to build a new "possibly_dirty" item
145 """constructor to help legacy API to build a new "possibly_dirty" item
146
146
147 Should eventually be removed
147 Should eventually be removed
148 """
148 """
149 instance = cls()
149 instance = cls()
150 instance._wc_tracked = True
150 instance._wc_tracked = True
151 instance._p1_tracked = True
151 instance._p1_tracked = True
152 instance._possibly_dirty = True
152 instance._possibly_dirty = True
153 return instance
153 return instance
154
154
155 @classmethod
155 @classmethod
156 def new_normal(cls, mode, size, mtime):
156 def new_normal(cls, mode, size, mtime):
157 """constructor to help legacy API to build a new "normal" item
157 """constructor to help legacy API to build a new "normal" item
158
158
159 Should eventually be removed
159 Should eventually be removed
160 """
160 """
161 assert size != FROM_P2
161 assert size != FROM_P2
162 assert size != NONNORMAL
162 assert size != NONNORMAL
163 instance = cls()
163 instance = cls()
164 instance._wc_tracked = True
164 instance._wc_tracked = True
165 instance._p1_tracked = True
165 instance._p1_tracked = True
166 instance._mode = mode
166 instance._mode = mode
167 instance._size = size
167 instance._size = size
168 instance._mtime = mtime
168 instance._mtime = mtime
169 return instance
169 return instance
170
170
171 @classmethod
171 @classmethod
172 def from_v1_data(cls, state, mode, size, mtime):
172 def from_v1_data(cls, state, mode, size, mtime):
173 """Build a new DirstateItem object from V1 data
173 """Build a new DirstateItem object from V1 data
174
174
175 Since the dirstate-v1 format is frozen, the signature of this function
175 Since the dirstate-v1 format is frozen, the signature of this function
176 is not expected to change, unlike the __init__ one.
176 is not expected to change, unlike the __init__ one.
177 """
177 """
178 if state == b'm':
178 if state == b'm':
179 return cls.new_merged()
179 return cls.new_merged()
180 elif state == b'a':
180 elif state == b'a':
181 return cls.new_added()
181 return cls.new_added()
182 elif state == b'r':
182 elif state == b'r':
183 instance = cls()
183 instance = cls()
184 instance._wc_tracked = False
184 instance._wc_tracked = False
185 if size == NONNORMAL:
185 if size == NONNORMAL:
186 instance._merged = True
186 instance._merged = True
187 instance._p1_tracked = (
187 instance._p1_tracked = (
188 True # might not be True because of rename ?
188 True # might not be True because of rename ?
189 )
189 )
190 instance._p2_tracked = (
190 instance._p2_tracked = (
191 True # might not be True because of rename ?
191 True # might not be True because of rename ?
192 )
192 )
193 elif size == FROM_P2:
193 elif size == FROM_P2:
194 instance._clean_p2 = True
194 instance._clean_p2 = True
195 instance._p1_tracked = (
195 instance._p1_tracked = (
196 False # We actually don't know (file history)
196 False # We actually don't know (file history)
197 )
197 )
198 instance._p2_tracked = True
198 instance._p2_tracked = True
199 else:
199 else:
200 instance._p1_tracked = True
200 instance._p1_tracked = True
201 return instance
201 return instance
202 elif state == b'n':
202 elif state == b'n':
203 if size == FROM_P2:
203 if size == FROM_P2:
204 return cls.new_from_p2()
204 return cls.new_from_p2()
205 elif size == NONNORMAL:
205 elif size == NONNORMAL:
206 return cls.new_possibly_dirty()
206 return cls.new_possibly_dirty()
207 elif mtime == AMBIGUOUS_TIME:
207 elif mtime == AMBIGUOUS_TIME:
208 instance = cls.new_normal(mode, size, 42)
208 instance = cls.new_normal(mode, size, 42)
209 instance._mtime = None
209 instance._mtime = None
210 instance._possibly_dirty = True
210 instance._possibly_dirty = True
211 return instance
211 return instance
212 else:
212 else:
213 return cls.new_normal(mode, size, mtime)
213 return cls.new_normal(mode, size, mtime)
214 else:
214 else:
215 raise RuntimeError(b'unknown state: %s' % state)
215 raise RuntimeError(b'unknown state: %s' % state)
216
216
217 def set_possibly_dirty(self):
217 def set_possibly_dirty(self):
218 """Mark a file as "possibly dirty"
218 """Mark a file as "possibly dirty"
219
219
220 This means the next status call will have to actually check its content
220 This means the next status call will have to actually check its content
221 to make sure it is correct.
221 to make sure it is correct.
222 """
222 """
223 self._possibly_dirty = True
223 self._possibly_dirty = True
224
224
225 def set_clean(self, mode, size, mtime):
225 def set_clean(self, mode, size, mtime):
226 """mark a file as "clean" cancelling potential "possibly dirty call"
226 """mark a file as "clean" cancelling potential "possibly dirty call"
227
227
228 Note: this function is a descendant of `dirstate.normal` and is
228 Note: this function is a descendant of `dirstate.normal` and is
229 currently expected to be call on "normal" entry only. There are not
229 currently expected to be call on "normal" entry only. There are not
230 reason for this to not change in the future as long as the ccode is
230 reason for this to not change in the future as long as the ccode is
231 updated to preserve the proper state of the non-normal files.
231 updated to preserve the proper state of the non-normal files.
232 """
232 """
233 self._wc_tracked = True
233 self._wc_tracked = True
234 self._p1_tracked = True
234 self._p1_tracked = True
235 self._p2_tracked = False # this might be wrong
235 self._p2_tracked = False # this might be wrong
236 self._merged = False
236 self._merged = False
237 self._clean_p2 = False
237 self._clean_p2 = False
238 self._possibly_dirty = False
238 self._possibly_dirty = False
239 self._mode = mode
239 self._mode = mode
240 self._size = size
240 self._size = size
241 self._mtime = mtime
241 self._mtime = mtime
242
242
243 def set_tracked(self):
243 def set_tracked(self):
244 """mark a file as tracked in the working copy
244 """mark a file as tracked in the working copy
245
245
246 This will ultimately be called by command like `hg add`.
246 This will ultimately be called by command like `hg add`.
247 """
247 """
248 self._wc_tracked = True
248 self._wc_tracked = True
249 # `set_tracked` is replacing various `normallookup` call. So we set
249 # `set_tracked` is replacing various `normallookup` call. So we set
250 # "possibly dirty" to stay on the safe side.
250 # "possibly dirty" to stay on the safe side.
251 #
251 #
252 # Consider dropping this in the future in favor of something less broad.
252 # Consider dropping this in the future in favor of something less broad.
253 self._possibly_dirty = True
253 self._possibly_dirty = True
254
254
255 def set_untracked(self):
255 def set_untracked(self):
256 """mark a file as untracked in the working copy
256 """mark a file as untracked in the working copy
257
257
258 This will ultimately be called by command like `hg remove`.
258 This will ultimately be called by command like `hg remove`.
259 """
259 """
260 # backup the previous state (useful for merge)
260 # backup the previous state (useful for merge)
261 self._wc_tracked = False
261 self._wc_tracked = False
262 self._mode = None
262 self._mode = None
263 self._size = None
263 self._size = None
264 self._mtime = None
264 self._mtime = None
265
265
266 def drop_merge_data(self):
267 """remove all "merge-only" from a DirstateItem
268
269 This is to be call by the dirstatemap code when the second parent is dropped
270 """
271 if not (self.merged or self.from_p2):
272 return
273 self._p1_tracked = self.merged # why is this not already properly set ?
274
275 self._merged = False
276 self._clean_p1 = False
277 self._clean_p2 = False
278 self._p2_tracked = False
279 self._possibly_dirty = True
280 self._mode = None
281 self._size = None
282 self._mtime = None
283
266 @property
284 @property
267 def mode(self):
285 def mode(self):
268 return self.v1_mode()
286 return self.v1_mode()
269
287
270 @property
288 @property
271 def size(self):
289 def size(self):
272 return self.v1_size()
290 return self.v1_size()
273
291
274 @property
292 @property
275 def mtime(self):
293 def mtime(self):
276 return self.v1_mtime()
294 return self.v1_mtime()
277
295
278 @property
296 @property
279 def state(self):
297 def state(self):
280 """
298 """
281 States are:
299 States are:
282 n normal
300 n normal
283 m needs merging
301 m needs merging
284 r marked for removal
302 r marked for removal
285 a marked for addition
303 a marked for addition
286
304
287 XXX This "state" is a bit obscure and mostly a direct expression of the
305 XXX This "state" is a bit obscure and mostly a direct expression of the
288 dirstatev1 format. It would make sense to ultimately deprecate it in
306 dirstatev1 format. It would make sense to ultimately deprecate it in
289 favor of the more "semantic" attributes.
307 favor of the more "semantic" attributes.
290 """
308 """
291 return self.v1_state()
309 return self.v1_state()
292
310
293 @property
311 @property
294 def tracked(self):
312 def tracked(self):
295 """True is the file is tracked in the working copy"""
313 """True is the file is tracked in the working copy"""
296 return self._wc_tracked
314 return self._wc_tracked
297
315
298 @property
316 @property
299 def added(self):
317 def added(self):
300 """True if the file has been added"""
318 """True if the file has been added"""
301 return self._wc_tracked and not (self._p1_tracked or self._p2_tracked)
319 return self._wc_tracked and not (self._p1_tracked or self._p2_tracked)
302
320
303 @property
321 @property
304 def merged(self):
322 def merged(self):
305 """True if the file has been merged
323 """True if the file has been merged
306
324
307 Should only be set if a merge is in progress in the dirstate
325 Should only be set if a merge is in progress in the dirstate
308 """
326 """
309 return self._wc_tracked and self._merged
327 return self._wc_tracked and self._merged
310
328
311 @property
329 @property
312 def from_p2(self):
330 def from_p2(self):
313 """True if the file have been fetched from p2 during the current merge
331 """True if the file have been fetched from p2 during the current merge
314
332
315 This is only True is the file is currently tracked.
333 This is only True is the file is currently tracked.
316
334
317 Should only be set if a merge is in progress in the dirstate
335 Should only be set if a merge is in progress in the dirstate
318 """
336 """
319 if not self._wc_tracked:
337 if not self._wc_tracked:
320 return False
338 return False
321 return self._clean_p2 or (not self._p1_tracked and self._p2_tracked)
339 return self._clean_p2 or (not self._p1_tracked and self._p2_tracked)
322
340
323 @property
341 @property
324 def from_p2_removed(self):
342 def from_p2_removed(self):
325 """True if the file has been removed, but was "from_p2" initially
343 """True if the file has been removed, but was "from_p2" initially
326
344
327 This property seems like an abstraction leakage and should probably be
345 This property seems like an abstraction leakage and should probably be
328 dealt in this class (or maybe the dirstatemap) directly.
346 dealt in this class (or maybe the dirstatemap) directly.
329 """
347 """
330 return self.removed and self._clean_p2
348 return self.removed and self._clean_p2
331
349
332 @property
350 @property
333 def removed(self):
351 def removed(self):
334 """True if the file has been removed"""
352 """True if the file has been removed"""
335 return not self._wc_tracked and (self._p1_tracked or self._p2_tracked)
353 return not self._wc_tracked and (self._p1_tracked or self._p2_tracked)
336
354
337 @property
355 @property
338 def merged_removed(self):
356 def merged_removed(self):
339 """True if the file has been removed, but was "merged" initially
357 """True if the file has been removed, but was "merged" initially
340
358
341 This property seems like an abstraction leakage and should probably be
359 This property seems like an abstraction leakage and should probably be
342 dealt in this class (or maybe the dirstatemap) directly.
360 dealt in this class (or maybe the dirstatemap) directly.
343 """
361 """
344 return self.removed and self._merged
362 return self.removed and self._merged
345
363
346 @property
364 @property
347 def dm_nonnormal(self):
365 def dm_nonnormal(self):
348 """True is the entry is non-normal in the dirstatemap sense
366 """True is the entry is non-normal in the dirstatemap sense
349
367
350 There is no reason for any code, but the dirstatemap one to use this.
368 There is no reason for any code, but the dirstatemap one to use this.
351 """
369 """
352 return self.v1_state() != b'n' or self.v1_mtime() == AMBIGUOUS_TIME
370 return self.v1_state() != b'n' or self.v1_mtime() == AMBIGUOUS_TIME
353
371
354 @property
372 @property
355 def dm_otherparent(self):
373 def dm_otherparent(self):
356 """True is the entry is `otherparent` in the dirstatemap sense
374 """True is the entry is `otherparent` in the dirstatemap sense
357
375
358 There is no reason for any code, but the dirstatemap one to use this.
376 There is no reason for any code, but the dirstatemap one to use this.
359 """
377 """
360 return self.v1_size() == FROM_P2
378 return self.v1_size() == FROM_P2
361
379
362 def v1_state(self):
380 def v1_state(self):
363 """return a "state" suitable for v1 serialization"""
381 """return a "state" suitable for v1 serialization"""
364 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
382 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
365 # the object has no state to record, this is -currently-
383 # the object has no state to record, this is -currently-
366 # unsupported
384 # unsupported
367 raise RuntimeError('untracked item')
385 raise RuntimeError('untracked item')
368 elif self.removed:
386 elif self.removed:
369 return b'r'
387 return b'r'
370 elif self.merged:
388 elif self.merged:
371 return b'm'
389 return b'm'
372 elif self.added:
390 elif self.added:
373 return b'a'
391 return b'a'
374 else:
392 else:
375 return b'n'
393 return b'n'
376
394
377 def v1_mode(self):
395 def v1_mode(self):
378 """return a "mode" suitable for v1 serialization"""
396 """return a "mode" suitable for v1 serialization"""
379 return self._mode if self._mode is not None else 0
397 return self._mode if self._mode is not None else 0
380
398
381 def v1_size(self):
399 def v1_size(self):
382 """return a "size" suitable for v1 serialization"""
400 """return a "size" suitable for v1 serialization"""
383 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
401 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
384 # the object has no state to record, this is -currently-
402 # the object has no state to record, this is -currently-
385 # unsupported
403 # unsupported
386 raise RuntimeError('untracked item')
404 raise RuntimeError('untracked item')
387 elif self.merged_removed:
405 elif self.merged_removed:
388 return NONNORMAL
406 return NONNORMAL
389 elif self.from_p2_removed:
407 elif self.from_p2_removed:
390 return FROM_P2
408 return FROM_P2
391 elif self.removed:
409 elif self.removed:
392 return 0
410 return 0
393 elif self.merged:
411 elif self.merged:
394 return FROM_P2
412 return FROM_P2
395 elif self.added:
413 elif self.added:
396 return NONNORMAL
414 return NONNORMAL
397 elif self.from_p2:
415 elif self.from_p2:
398 return FROM_P2
416 return FROM_P2
399 elif self._possibly_dirty:
417 elif self._possibly_dirty:
400 return self._size if self._size is not None else NONNORMAL
418 return self._size if self._size is not None else NONNORMAL
401 else:
419 else:
402 return self._size
420 return self._size
403
421
404 def v1_mtime(self):
422 def v1_mtime(self):
405 """return a "mtime" suitable for v1 serialization"""
423 """return a "mtime" suitable for v1 serialization"""
406 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
424 if not (self._p1_tracked or self._p2_tracked or self._wc_tracked):
407 # the object has no state to record, this is -currently-
425 # the object has no state to record, this is -currently-
408 # unsupported
426 # unsupported
409 raise RuntimeError('untracked item')
427 raise RuntimeError('untracked item')
410 elif self.removed:
428 elif self.removed:
411 return 0
429 return 0
412 elif self._possibly_dirty:
430 elif self._possibly_dirty:
413 return AMBIGUOUS_TIME
431 return AMBIGUOUS_TIME
414 elif self.merged:
432 elif self.merged:
415 return AMBIGUOUS_TIME
433 return AMBIGUOUS_TIME
416 elif self.added:
434 elif self.added:
417 return AMBIGUOUS_TIME
435 return AMBIGUOUS_TIME
418 elif self.from_p2:
436 elif self.from_p2:
419 return AMBIGUOUS_TIME
437 return AMBIGUOUS_TIME
420 else:
438 else:
421 return self._mtime if self._mtime is not None else 0
439 return self._mtime if self._mtime is not None else 0
422
440
423 def need_delay(self, now):
441 def need_delay(self, now):
424 """True if the stored mtime would be ambiguous with the current time"""
442 """True if the stored mtime would be ambiguous with the current time"""
425 return self.v1_state() == b'n' and self.v1_mtime() == now
443 return self.v1_state() == b'n' and self.v1_mtime() == now
426
444
427
445
428 def gettype(q):
446 def gettype(q):
429 return int(q & 0xFFFF)
447 return int(q & 0xFFFF)
430
448
431
449
432 class BaseIndexObject(object):
450 class BaseIndexObject(object):
433 # Can I be passed to an algorithme implemented in Rust ?
451 # Can I be passed to an algorithme implemented in Rust ?
434 rust_ext_compat = 0
452 rust_ext_compat = 0
435 # Format of an index entry according to Python's `struct` language
453 # Format of an index entry according to Python's `struct` language
436 index_format = revlog_constants.INDEX_ENTRY_V1
454 index_format = revlog_constants.INDEX_ENTRY_V1
437 # Size of a C unsigned long long int, platform independent
455 # Size of a C unsigned long long int, platform independent
438 big_int_size = struct.calcsize(b'>Q')
456 big_int_size = struct.calcsize(b'>Q')
439 # Size of a C long int, platform independent
457 # Size of a C long int, platform independent
440 int_size = struct.calcsize(b'>i')
458 int_size = struct.calcsize(b'>i')
441 # An empty index entry, used as a default value to be overridden, or nullrev
459 # An empty index entry, used as a default value to be overridden, or nullrev
442 null_item = (
460 null_item = (
443 0,
461 0,
444 0,
462 0,
445 0,
463 0,
446 -1,
464 -1,
447 -1,
465 -1,
448 -1,
466 -1,
449 -1,
467 -1,
450 sha1nodeconstants.nullid,
468 sha1nodeconstants.nullid,
451 0,
469 0,
452 0,
470 0,
453 revlog_constants.COMP_MODE_INLINE,
471 revlog_constants.COMP_MODE_INLINE,
454 revlog_constants.COMP_MODE_INLINE,
472 revlog_constants.COMP_MODE_INLINE,
455 )
473 )
456
474
457 @util.propertycache
475 @util.propertycache
458 def entry_size(self):
476 def entry_size(self):
459 return self.index_format.size
477 return self.index_format.size
460
478
461 @property
479 @property
462 def nodemap(self):
480 def nodemap(self):
463 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
481 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
464 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
482 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
465 return self._nodemap
483 return self._nodemap
466
484
467 @util.propertycache
485 @util.propertycache
468 def _nodemap(self):
486 def _nodemap(self):
469 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
487 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
470 for r in range(0, len(self)):
488 for r in range(0, len(self)):
471 n = self[r][7]
489 n = self[r][7]
472 nodemap[n] = r
490 nodemap[n] = r
473 return nodemap
491 return nodemap
474
492
475 def has_node(self, node):
493 def has_node(self, node):
476 """return True if the node exist in the index"""
494 """return True if the node exist in the index"""
477 return node in self._nodemap
495 return node in self._nodemap
478
496
479 def rev(self, node):
497 def rev(self, node):
480 """return a revision for a node
498 """return a revision for a node
481
499
482 If the node is unknown, raise a RevlogError"""
500 If the node is unknown, raise a RevlogError"""
483 return self._nodemap[node]
501 return self._nodemap[node]
484
502
485 def get_rev(self, node):
503 def get_rev(self, node):
486 """return a revision for a node
504 """return a revision for a node
487
505
488 If the node is unknown, return None"""
506 If the node is unknown, return None"""
489 return self._nodemap.get(node)
507 return self._nodemap.get(node)
490
508
491 def _stripnodes(self, start):
509 def _stripnodes(self, start):
492 if '_nodemap' in vars(self):
510 if '_nodemap' in vars(self):
493 for r in range(start, len(self)):
511 for r in range(start, len(self)):
494 n = self[r][7]
512 n = self[r][7]
495 del self._nodemap[n]
513 del self._nodemap[n]
496
514
497 def clearcaches(self):
515 def clearcaches(self):
498 self.__dict__.pop('_nodemap', None)
516 self.__dict__.pop('_nodemap', None)
499
517
500 def __len__(self):
518 def __len__(self):
501 return self._lgt + len(self._extra)
519 return self._lgt + len(self._extra)
502
520
503 def append(self, tup):
521 def append(self, tup):
504 if '_nodemap' in vars(self):
522 if '_nodemap' in vars(self):
505 self._nodemap[tup[7]] = len(self)
523 self._nodemap[tup[7]] = len(self)
506 data = self._pack_entry(len(self), tup)
524 data = self._pack_entry(len(self), tup)
507 self._extra.append(data)
525 self._extra.append(data)
508
526
509 def _pack_entry(self, rev, entry):
527 def _pack_entry(self, rev, entry):
510 assert entry[8] == 0
528 assert entry[8] == 0
511 assert entry[9] == 0
529 assert entry[9] == 0
512 return self.index_format.pack(*entry[:8])
530 return self.index_format.pack(*entry[:8])
513
531
514 def _check_index(self, i):
532 def _check_index(self, i):
515 if not isinstance(i, int):
533 if not isinstance(i, int):
516 raise TypeError(b"expecting int indexes")
534 raise TypeError(b"expecting int indexes")
517 if i < 0 or i >= len(self):
535 if i < 0 or i >= len(self):
518 raise IndexError
536 raise IndexError
519
537
520 def __getitem__(self, i):
538 def __getitem__(self, i):
521 if i == -1:
539 if i == -1:
522 return self.null_item
540 return self.null_item
523 self._check_index(i)
541 self._check_index(i)
524 if i >= self._lgt:
542 if i >= self._lgt:
525 data = self._extra[i - self._lgt]
543 data = self._extra[i - self._lgt]
526 else:
544 else:
527 index = self._calculate_index(i)
545 index = self._calculate_index(i)
528 data = self._data[index : index + self.entry_size]
546 data = self._data[index : index + self.entry_size]
529 r = self._unpack_entry(i, data)
547 r = self._unpack_entry(i, data)
530 if self._lgt and i == 0:
548 if self._lgt and i == 0:
531 offset = revlogutils.offset_type(0, gettype(r[0]))
549 offset = revlogutils.offset_type(0, gettype(r[0]))
532 r = (offset,) + r[1:]
550 r = (offset,) + r[1:]
533 return r
551 return r
534
552
535 def _unpack_entry(self, rev, data):
553 def _unpack_entry(self, rev, data):
536 r = self.index_format.unpack(data)
554 r = self.index_format.unpack(data)
537 r = r + (
555 r = r + (
538 0,
556 0,
539 0,
557 0,
540 revlog_constants.COMP_MODE_INLINE,
558 revlog_constants.COMP_MODE_INLINE,
541 revlog_constants.COMP_MODE_INLINE,
559 revlog_constants.COMP_MODE_INLINE,
542 )
560 )
543 return r
561 return r
544
562
545 def pack_header(self, header):
563 def pack_header(self, header):
546 """pack header information as binary"""
564 """pack header information as binary"""
547 v_fmt = revlog_constants.INDEX_HEADER
565 v_fmt = revlog_constants.INDEX_HEADER
548 return v_fmt.pack(header)
566 return v_fmt.pack(header)
549
567
550 def entry_binary(self, rev):
568 def entry_binary(self, rev):
551 """return the raw binary string representing a revision"""
569 """return the raw binary string representing a revision"""
552 entry = self[rev]
570 entry = self[rev]
553 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
571 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
554 if rev == 0:
572 if rev == 0:
555 p = p[revlog_constants.INDEX_HEADER.size :]
573 p = p[revlog_constants.INDEX_HEADER.size :]
556 return p
574 return p
557
575
558
576
559 class IndexObject(BaseIndexObject):
577 class IndexObject(BaseIndexObject):
560 def __init__(self, data):
578 def __init__(self, data):
561 assert len(data) % self.entry_size == 0, (
579 assert len(data) % self.entry_size == 0, (
562 len(data),
580 len(data),
563 self.entry_size,
581 self.entry_size,
564 len(data) % self.entry_size,
582 len(data) % self.entry_size,
565 )
583 )
566 self._data = data
584 self._data = data
567 self._lgt = len(data) // self.entry_size
585 self._lgt = len(data) // self.entry_size
568 self._extra = []
586 self._extra = []
569
587
570 def _calculate_index(self, i):
588 def _calculate_index(self, i):
571 return i * self.entry_size
589 return i * self.entry_size
572
590
573 def __delitem__(self, i):
591 def __delitem__(self, i):
574 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
592 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
575 raise ValueError(b"deleting slices only supports a:-1 with step 1")
593 raise ValueError(b"deleting slices only supports a:-1 with step 1")
576 i = i.start
594 i = i.start
577 self._check_index(i)
595 self._check_index(i)
578 self._stripnodes(i)
596 self._stripnodes(i)
579 if i < self._lgt:
597 if i < self._lgt:
580 self._data = self._data[: i * self.entry_size]
598 self._data = self._data[: i * self.entry_size]
581 self._lgt = i
599 self._lgt = i
582 self._extra = []
600 self._extra = []
583 else:
601 else:
584 self._extra = self._extra[: i - self._lgt]
602 self._extra = self._extra[: i - self._lgt]
585
603
586
604
587 class PersistentNodeMapIndexObject(IndexObject):
605 class PersistentNodeMapIndexObject(IndexObject):
588 """a Debug oriented class to test persistent nodemap
606 """a Debug oriented class to test persistent nodemap
589
607
590 We need a simple python object to test API and higher level behavior. See
608 We need a simple python object to test API and higher level behavior. See
591 the Rust implementation for more serious usage. This should be used only
609 the Rust implementation for more serious usage. This should be used only
592 through the dedicated `devel.persistent-nodemap` config.
610 through the dedicated `devel.persistent-nodemap` config.
593 """
611 """
594
612
595 def nodemap_data_all(self):
613 def nodemap_data_all(self):
596 """Return bytes containing a full serialization of a nodemap
614 """Return bytes containing a full serialization of a nodemap
597
615
598 The nodemap should be valid for the full set of revisions in the
616 The nodemap should be valid for the full set of revisions in the
599 index."""
617 index."""
600 return nodemaputil.persistent_data(self)
618 return nodemaputil.persistent_data(self)
601
619
602 def nodemap_data_incremental(self):
620 def nodemap_data_incremental(self):
603 """Return bytes containing a incremental update to persistent nodemap
621 """Return bytes containing a incremental update to persistent nodemap
604
622
605 This containst the data for an append-only update of the data provided
623 This containst the data for an append-only update of the data provided
606 in the last call to `update_nodemap_data`.
624 in the last call to `update_nodemap_data`.
607 """
625 """
608 if self._nm_root is None:
626 if self._nm_root is None:
609 return None
627 return None
610 docket = self._nm_docket
628 docket = self._nm_docket
611 changed, data = nodemaputil.update_persistent_data(
629 changed, data = nodemaputil.update_persistent_data(
612 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
630 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
613 )
631 )
614
632
615 self._nm_root = self._nm_max_idx = self._nm_docket = None
633 self._nm_root = self._nm_max_idx = self._nm_docket = None
616 return docket, changed, data
634 return docket, changed, data
617
635
618 def update_nodemap_data(self, docket, nm_data):
636 def update_nodemap_data(self, docket, nm_data):
619 """provide full block of persisted binary data for a nodemap
637 """provide full block of persisted binary data for a nodemap
620
638
621 The data are expected to come from disk. See `nodemap_data_all` for a
639 The data are expected to come from disk. See `nodemap_data_all` for a
622 produceur of such data."""
640 produceur of such data."""
623 if nm_data is not None:
641 if nm_data is not None:
624 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
642 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
625 if self._nm_root:
643 if self._nm_root:
626 self._nm_docket = docket
644 self._nm_docket = docket
627 else:
645 else:
628 self._nm_root = self._nm_max_idx = self._nm_docket = None
646 self._nm_root = self._nm_max_idx = self._nm_docket = None
629
647
630
648
631 class InlinedIndexObject(BaseIndexObject):
649 class InlinedIndexObject(BaseIndexObject):
632 def __init__(self, data, inline=0):
650 def __init__(self, data, inline=0):
633 self._data = data
651 self._data = data
634 self._lgt = self._inline_scan(None)
652 self._lgt = self._inline_scan(None)
635 self._inline_scan(self._lgt)
653 self._inline_scan(self._lgt)
636 self._extra = []
654 self._extra = []
637
655
638 def _inline_scan(self, lgt):
656 def _inline_scan(self, lgt):
639 off = 0
657 off = 0
640 if lgt is not None:
658 if lgt is not None:
641 self._offsets = [0] * lgt
659 self._offsets = [0] * lgt
642 count = 0
660 count = 0
643 while off <= len(self._data) - self.entry_size:
661 while off <= len(self._data) - self.entry_size:
644 start = off + self.big_int_size
662 start = off + self.big_int_size
645 (s,) = struct.unpack(
663 (s,) = struct.unpack(
646 b'>i',
664 b'>i',
647 self._data[start : start + self.int_size],
665 self._data[start : start + self.int_size],
648 )
666 )
649 if lgt is not None:
667 if lgt is not None:
650 self._offsets[count] = off
668 self._offsets[count] = off
651 count += 1
669 count += 1
652 off += self.entry_size + s
670 off += self.entry_size + s
653 if off != len(self._data):
671 if off != len(self._data):
654 raise ValueError(b"corrupted data")
672 raise ValueError(b"corrupted data")
655 return count
673 return count
656
674
657 def __delitem__(self, i):
675 def __delitem__(self, i):
658 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
676 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
659 raise ValueError(b"deleting slices only supports a:-1 with step 1")
677 raise ValueError(b"deleting slices only supports a:-1 with step 1")
660 i = i.start
678 i = i.start
661 self._check_index(i)
679 self._check_index(i)
662 self._stripnodes(i)
680 self._stripnodes(i)
663 if i < self._lgt:
681 if i < self._lgt:
664 self._offsets = self._offsets[:i]
682 self._offsets = self._offsets[:i]
665 self._lgt = i
683 self._lgt = i
666 self._extra = []
684 self._extra = []
667 else:
685 else:
668 self._extra = self._extra[: i - self._lgt]
686 self._extra = self._extra[: i - self._lgt]
669
687
670 def _calculate_index(self, i):
688 def _calculate_index(self, i):
671 return self._offsets[i]
689 return self._offsets[i]
672
690
673
691
674 def parse_index2(data, inline, revlogv2=False):
692 def parse_index2(data, inline, revlogv2=False):
675 if not inline:
693 if not inline:
676 cls = IndexObject2 if revlogv2 else IndexObject
694 cls = IndexObject2 if revlogv2 else IndexObject
677 return cls(data), None
695 return cls(data), None
678 cls = InlinedIndexObject
696 cls = InlinedIndexObject
679 return cls(data, inline), (0, data)
697 return cls(data, inline), (0, data)
680
698
681
699
682 def parse_index_cl_v2(data):
700 def parse_index_cl_v2(data):
683 return IndexChangelogV2(data), None
701 return IndexChangelogV2(data), None
684
702
685
703
686 class IndexObject2(IndexObject):
704 class IndexObject2(IndexObject):
687 index_format = revlog_constants.INDEX_ENTRY_V2
705 index_format = revlog_constants.INDEX_ENTRY_V2
688
706
689 def replace_sidedata_info(
707 def replace_sidedata_info(
690 self,
708 self,
691 rev,
709 rev,
692 sidedata_offset,
710 sidedata_offset,
693 sidedata_length,
711 sidedata_length,
694 offset_flags,
712 offset_flags,
695 compression_mode,
713 compression_mode,
696 ):
714 ):
697 """
715 """
698 Replace an existing index entry's sidedata offset and length with new
716 Replace an existing index entry's sidedata offset and length with new
699 ones.
717 ones.
700 This cannot be used outside of the context of sidedata rewriting,
718 This cannot be used outside of the context of sidedata rewriting,
701 inside the transaction that creates the revision `rev`.
719 inside the transaction that creates the revision `rev`.
702 """
720 """
703 if rev < 0:
721 if rev < 0:
704 raise KeyError
722 raise KeyError
705 self._check_index(rev)
723 self._check_index(rev)
706 if rev < self._lgt:
724 if rev < self._lgt:
707 msg = b"cannot rewrite entries outside of this transaction"
725 msg = b"cannot rewrite entries outside of this transaction"
708 raise KeyError(msg)
726 raise KeyError(msg)
709 else:
727 else:
710 entry = list(self[rev])
728 entry = list(self[rev])
711 entry[0] = offset_flags
729 entry[0] = offset_flags
712 entry[8] = sidedata_offset
730 entry[8] = sidedata_offset
713 entry[9] = sidedata_length
731 entry[9] = sidedata_length
714 entry[11] = compression_mode
732 entry[11] = compression_mode
715 entry = tuple(entry)
733 entry = tuple(entry)
716 new = self._pack_entry(rev, entry)
734 new = self._pack_entry(rev, entry)
717 self._extra[rev - self._lgt] = new
735 self._extra[rev - self._lgt] = new
718
736
719 def _unpack_entry(self, rev, data):
737 def _unpack_entry(self, rev, data):
720 data = self.index_format.unpack(data)
738 data = self.index_format.unpack(data)
721 entry = data[:10]
739 entry = data[:10]
722 data_comp = data[10] & 3
740 data_comp = data[10] & 3
723 sidedata_comp = (data[10] & (3 << 2)) >> 2
741 sidedata_comp = (data[10] & (3 << 2)) >> 2
724 return entry + (data_comp, sidedata_comp)
742 return entry + (data_comp, sidedata_comp)
725
743
726 def _pack_entry(self, rev, entry):
744 def _pack_entry(self, rev, entry):
727 data = entry[:10]
745 data = entry[:10]
728 data_comp = entry[10] & 3
746 data_comp = entry[10] & 3
729 sidedata_comp = (entry[11] & 3) << 2
747 sidedata_comp = (entry[11] & 3) << 2
730 data += (data_comp | sidedata_comp,)
748 data += (data_comp | sidedata_comp,)
731
749
732 return self.index_format.pack(*data)
750 return self.index_format.pack(*data)
733
751
734 def entry_binary(self, rev):
752 def entry_binary(self, rev):
735 """return the raw binary string representing a revision"""
753 """return the raw binary string representing a revision"""
736 entry = self[rev]
754 entry = self[rev]
737 return self._pack_entry(rev, entry)
755 return self._pack_entry(rev, entry)
738
756
739 def pack_header(self, header):
757 def pack_header(self, header):
740 """pack header information as binary"""
758 """pack header information as binary"""
741 msg = 'version header should go in the docket, not the index: %d'
759 msg = 'version header should go in the docket, not the index: %d'
742 msg %= header
760 msg %= header
743 raise error.ProgrammingError(msg)
761 raise error.ProgrammingError(msg)
744
762
745
763
746 class IndexChangelogV2(IndexObject2):
764 class IndexChangelogV2(IndexObject2):
747 index_format = revlog_constants.INDEX_ENTRY_CL_V2
765 index_format = revlog_constants.INDEX_ENTRY_CL_V2
748
766
749 def _unpack_entry(self, rev, data, r=True):
767 def _unpack_entry(self, rev, data, r=True):
750 items = self.index_format.unpack(data)
768 items = self.index_format.unpack(data)
751 entry = items[:3] + (rev, rev) + items[3:8]
769 entry = items[:3] + (rev, rev) + items[3:8]
752 data_comp = items[8] & 3
770 data_comp = items[8] & 3
753 sidedata_comp = (items[8] >> 2) & 3
771 sidedata_comp = (items[8] >> 2) & 3
754 return entry + (data_comp, sidedata_comp)
772 return entry + (data_comp, sidedata_comp)
755
773
756 def _pack_entry(self, rev, entry):
774 def _pack_entry(self, rev, entry):
757 assert entry[3] == rev, entry[3]
775 assert entry[3] == rev, entry[3]
758 assert entry[4] == rev, entry[4]
776 assert entry[4] == rev, entry[4]
759 data = entry[:3] + entry[5:10]
777 data = entry[:3] + entry[5:10]
760 data_comp = entry[10] & 3
778 data_comp = entry[10] & 3
761 sidedata_comp = (entry[11] & 3) << 2
779 sidedata_comp = (entry[11] & 3) << 2
762 data += (data_comp | sidedata_comp,)
780 data += (data_comp | sidedata_comp,)
763 return self.index_format.pack(*data)
781 return self.index_format.pack(*data)
764
782
765
783
766 def parse_index_devel_nodemap(data, inline):
784 def parse_index_devel_nodemap(data, inline):
767 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
785 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
768 return PersistentNodeMapIndexObject(data), None
786 return PersistentNodeMapIndexObject(data), None
769
787
770
788
771 def parse_dirstate(dmap, copymap, st):
789 def parse_dirstate(dmap, copymap, st):
772 parents = [st[:20], st[20:40]]
790 parents = [st[:20], st[20:40]]
773 # dereference fields so they will be local in loop
791 # dereference fields so they will be local in loop
774 format = b">cllll"
792 format = b">cllll"
775 e_size = struct.calcsize(format)
793 e_size = struct.calcsize(format)
776 pos1 = 40
794 pos1 = 40
777 l = len(st)
795 l = len(st)
778
796
779 # the inner loop
797 # the inner loop
780 while pos1 < l:
798 while pos1 < l:
781 pos2 = pos1 + e_size
799 pos2 = pos1 + e_size
782 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
800 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
783 pos1 = pos2 + e[4]
801 pos1 = pos2 + e[4]
784 f = st[pos2:pos1]
802 f = st[pos2:pos1]
785 if b'\0' in f:
803 if b'\0' in f:
786 f, c = f.split(b'\0')
804 f, c = f.split(b'\0')
787 copymap[f] = c
805 copymap[f] = c
788 dmap[f] = DirstateItem.from_v1_data(*e[:4])
806 dmap[f] = DirstateItem.from_v1_data(*e[:4])
789 return parents
807 return parents
790
808
791
809
792 def pack_dirstate(dmap, copymap, pl, now):
810 def pack_dirstate(dmap, copymap, pl, now):
793 now = int(now)
811 now = int(now)
794 cs = stringio()
812 cs = stringio()
795 write = cs.write
813 write = cs.write
796 write(b"".join(pl))
814 write(b"".join(pl))
797 for f, e in pycompat.iteritems(dmap):
815 for f, e in pycompat.iteritems(dmap):
798 if e.need_delay(now):
816 if e.need_delay(now):
799 # The file was last modified "simultaneously" with the current
817 # The file was last modified "simultaneously" with the current
800 # write to dirstate (i.e. within the same second for file-
818 # write to dirstate (i.e. within the same second for file-
801 # systems with a granularity of 1 sec). This commonly happens
819 # systems with a granularity of 1 sec). This commonly happens
802 # for at least a couple of files on 'update'.
820 # for at least a couple of files on 'update'.
803 # The user could change the file without changing its size
821 # The user could change the file without changing its size
804 # within the same second. Invalidate the file's mtime in
822 # within the same second. Invalidate the file's mtime in
805 # dirstate, forcing future 'status' calls to compare the
823 # dirstate, forcing future 'status' calls to compare the
806 # contents of the file if the size is the same. This prevents
824 # contents of the file if the size is the same. This prevents
807 # mistakenly treating such files as clean.
825 # mistakenly treating such files as clean.
808 e.set_possibly_dirty()
826 e.set_possibly_dirty()
809
827
810 if f in copymap:
828 if f in copymap:
811 f = b"%s\0%s" % (f, copymap[f])
829 f = b"%s\0%s" % (f, copymap[f])
812 e = _pack(
830 e = _pack(
813 b">cllll",
831 b">cllll",
814 e.v1_state(),
832 e.v1_state(),
815 e.v1_mode(),
833 e.v1_mode(),
816 e.v1_size(),
834 e.v1_size(),
817 e.v1_mtime(),
835 e.v1_mtime(),
818 len(f),
836 len(f),
819 )
837 )
820 write(e)
838 write(e)
821 write(f)
839 write(f)
822 return cs.getvalue()
840 return cs.getvalue()
General Comments 0
You need to be logged in to leave comments. Login now