##// END OF EJS Templates
dirstate-item: introduce a `dm_nonnormal` property...
marmoute -
r48485:265cdfaa default
parent child Browse files
Show More
@@ -1,968 +1,979
1 /*
1 /*
2 parsers.c - efficient content parsing
2 parsers.c - efficient content parsing
3
3
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
5
5
6 This software may be used and distributed according to the terms of
6 This software may be used and distributed according to the terms of
7 the GNU General Public License, incorporated herein by reference.
7 the GNU General Public License, incorporated herein by reference.
8 */
8 */
9
9
10 #define PY_SSIZE_T_CLEAN
10 #define PY_SSIZE_T_CLEAN
11 #include <Python.h>
11 #include <Python.h>
12 #include <ctype.h>
12 #include <ctype.h>
13 #include <stddef.h>
13 #include <stddef.h>
14 #include <string.h>
14 #include <string.h>
15
15
16 #include "bitmanipulation.h"
16 #include "bitmanipulation.h"
17 #include "charencode.h"
17 #include "charencode.h"
18 #include "util.h"
18 #include "util.h"
19
19
20 #ifdef IS_PY3K
20 #ifdef IS_PY3K
21 /* The mapping of Python types is meant to be temporary to get Python
21 /* The mapping of Python types is meant to be temporary to get Python
22 * 3 to compile. We should remove this once Python 3 support is fully
22 * 3 to compile. We should remove this once Python 3 support is fully
23 * supported and proper types are used in the extensions themselves. */
23 * supported and proper types are used in the extensions themselves. */
24 #define PyInt_Check PyLong_Check
24 #define PyInt_Check PyLong_Check
25 #define PyInt_FromLong PyLong_FromLong
25 #define PyInt_FromLong PyLong_FromLong
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AsLong PyLong_AsLong
27 #define PyInt_AsLong PyLong_AsLong
28 #endif
28 #endif
29
29
30 static const char *const versionerrortext = "Python minor version mismatch";
30 static const char *const versionerrortext = "Python minor version mismatch";
31
31
32 static const int dirstate_v1_from_p2 = -2;
32 static const int dirstate_v1_from_p2 = -2;
33 static const int dirstate_v1_nonnormal = -1;
33 static const int dirstate_v1_nonnormal = -1;
34 static const int ambiguous_time = -1;
34 static const int ambiguous_time = -1;
35
35
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
37 {
37 {
38 Py_ssize_t expected_size;
38 Py_ssize_t expected_size;
39
39
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
41 return NULL;
41 return NULL;
42 }
42 }
43
43
44 return _dict_new_presized(expected_size);
44 return _dict_new_presized(expected_size);
45 }
45 }
46
46
47 static inline dirstateItemObject *make_dirstate_item(char state, int mode,
47 static inline dirstateItemObject *make_dirstate_item(char state, int mode,
48 int size, int mtime)
48 int size, int mtime)
49 {
49 {
50 dirstateItemObject *t =
50 dirstateItemObject *t =
51 PyObject_New(dirstateItemObject, &dirstateItemType);
51 PyObject_New(dirstateItemObject, &dirstateItemType);
52 if (!t) {
52 if (!t) {
53 return NULL;
53 return NULL;
54 }
54 }
55 t->state = state;
55 t->state = state;
56 t->mode = mode;
56 t->mode = mode;
57 t->size = size;
57 t->size = size;
58 t->mtime = mtime;
58 t->mtime = mtime;
59 return t;
59 return t;
60 }
60 }
61
61
62 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
62 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
63 PyObject *kwds)
63 PyObject *kwds)
64 {
64 {
65 /* We do all the initialization here and not a tp_init function because
65 /* We do all the initialization here and not a tp_init function because
66 * dirstate_item is immutable. */
66 * dirstate_item is immutable. */
67 dirstateItemObject *t;
67 dirstateItemObject *t;
68 char state;
68 char state;
69 int size, mode, mtime;
69 int size, mode, mtime;
70 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
70 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
71 return NULL;
71 return NULL;
72 }
72 }
73
73
74 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
74 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
75 if (!t) {
75 if (!t) {
76 return NULL;
76 return NULL;
77 }
77 }
78 t->state = state;
78 t->state = state;
79 t->mode = mode;
79 t->mode = mode;
80 t->size = size;
80 t->size = size;
81 t->mtime = mtime;
81 t->mtime = mtime;
82
82
83 return (PyObject *)t;
83 return (PyObject *)t;
84 }
84 }
85
85
86 static void dirstate_item_dealloc(PyObject *o)
86 static void dirstate_item_dealloc(PyObject *o)
87 {
87 {
88 PyObject_Del(o);
88 PyObject_Del(o);
89 }
89 }
90
90
91 static Py_ssize_t dirstate_item_length(PyObject *o)
91 static Py_ssize_t dirstate_item_length(PyObject *o)
92 {
92 {
93 return 4;
93 return 4;
94 }
94 }
95
95
96 static PyObject *dirstate_item_item(PyObject *o, Py_ssize_t i)
96 static PyObject *dirstate_item_item(PyObject *o, Py_ssize_t i)
97 {
97 {
98 dirstateItemObject *t = (dirstateItemObject *)o;
98 dirstateItemObject *t = (dirstateItemObject *)o;
99 switch (i) {
99 switch (i) {
100 case 0:
100 case 0:
101 return PyBytes_FromStringAndSize(&t->state, 1);
101 return PyBytes_FromStringAndSize(&t->state, 1);
102 case 1:
102 case 1:
103 return PyInt_FromLong(t->mode);
103 return PyInt_FromLong(t->mode);
104 case 2:
104 case 2:
105 return PyInt_FromLong(t->size);
105 return PyInt_FromLong(t->size);
106 case 3:
106 case 3:
107 return PyInt_FromLong(t->mtime);
107 return PyInt_FromLong(t->mtime);
108 default:
108 default:
109 PyErr_SetString(PyExc_IndexError, "index out of range");
109 PyErr_SetString(PyExc_IndexError, "index out of range");
110 return NULL;
110 return NULL;
111 }
111 }
112 }
112 }
113
113
114 static PySequenceMethods dirstate_item_sq = {
114 static PySequenceMethods dirstate_item_sq = {
115 dirstate_item_length, /* sq_length */
115 dirstate_item_length, /* sq_length */
116 0, /* sq_concat */
116 0, /* sq_concat */
117 0, /* sq_repeat */
117 0, /* sq_repeat */
118 dirstate_item_item, /* sq_item */
118 dirstate_item_item, /* sq_item */
119 0, /* sq_ass_item */
119 0, /* sq_ass_item */
120 0, /* sq_contains */
120 0, /* sq_contains */
121 0, /* sq_inplace_concat */
121 0, /* sq_inplace_concat */
122 0 /* sq_inplace_repeat */
122 0 /* sq_inplace_repeat */
123 };
123 };
124
124
125 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
125 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
126 {
126 {
127 return PyBytes_FromStringAndSize(&self->state, 1);
127 return PyBytes_FromStringAndSize(&self->state, 1);
128 };
128 };
129
129
130 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
130 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
131 {
131 {
132 return PyInt_FromLong(self->mode);
132 return PyInt_FromLong(self->mode);
133 };
133 };
134
134
135 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
135 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
136 {
136 {
137 return PyInt_FromLong(self->size);
137 return PyInt_FromLong(self->size);
138 };
138 };
139
139
140 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
140 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
141 {
141 {
142 return PyInt_FromLong(self->mtime);
142 return PyInt_FromLong(self->mtime);
143 };
143 };
144
144
145 static PyObject *dm_nonnormal(dirstateItemObject *self)
146 {
147 if (self->state != 'n' || self->mtime == ambiguous_time) {
148 Py_RETURN_TRUE;
149 } else {
150 Py_RETURN_FALSE;
151 }
152 };
153
145 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
154 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
146 PyObject *value)
155 PyObject *value)
147 {
156 {
148 long now;
157 long now;
149 if (!pylong_to_long(value, &now)) {
158 if (!pylong_to_long(value, &now)) {
150 return NULL;
159 return NULL;
151 }
160 }
152 if (self->state == 'n' && self->mtime == now) {
161 if (self->state == 'n' && self->mtime == now) {
153 Py_RETURN_TRUE;
162 Py_RETURN_TRUE;
154 } else {
163 } else {
155 Py_RETURN_FALSE;
164 Py_RETURN_FALSE;
156 }
165 }
157 };
166 };
158
167
159 /* This will never change since it's bound to V1, unlike `make_dirstate_item`
168 /* This will never change since it's bound to V1, unlike `make_dirstate_item`
160 */
169 */
161 static inline dirstateItemObject *
170 static inline dirstateItemObject *
162 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
171 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
163 {
172 {
164 dirstateItemObject *t =
173 dirstateItemObject *t =
165 PyObject_New(dirstateItemObject, &dirstateItemType);
174 PyObject_New(dirstateItemObject, &dirstateItemType);
166 if (!t) {
175 if (!t) {
167 return NULL;
176 return NULL;
168 }
177 }
169 t->state = state;
178 t->state = state;
170 t->mode = mode;
179 t->mode = mode;
171 t->size = size;
180 t->size = size;
172 t->mtime = mtime;
181 t->mtime = mtime;
173 return t;
182 return t;
174 }
183 }
175
184
176 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
185 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
177 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
186 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
178 PyObject *args)
187 PyObject *args)
179 {
188 {
180 /* We do all the initialization here and not a tp_init function because
189 /* We do all the initialization here and not a tp_init function because
181 * dirstate_item is immutable. */
190 * dirstate_item is immutable. */
182 dirstateItemObject *t;
191 dirstateItemObject *t;
183 char state;
192 char state;
184 int size, mode, mtime;
193 int size, mode, mtime;
185 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
194 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
186 return NULL;
195 return NULL;
187 }
196 }
188
197
189 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
198 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
190 if (!t) {
199 if (!t) {
191 return NULL;
200 return NULL;
192 }
201 }
193 t->state = state;
202 t->state = state;
194 t->mode = mode;
203 t->mode = mode;
195 t->size = size;
204 t->size = size;
196 t->mtime = mtime;
205 t->mtime = mtime;
197
206
198 return (PyObject *)t;
207 return (PyObject *)t;
199 };
208 };
200
209
201 /* This means the next status call will have to actually check its content
210 /* This means the next status call will have to actually check its content
202 to make sure it is correct. */
211 to make sure it is correct. */
203 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
212 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
204 {
213 {
205 self->mtime = ambiguous_time;
214 self->mtime = ambiguous_time;
206 Py_RETURN_NONE;
215 Py_RETURN_NONE;
207 }
216 }
208
217
209 static PyMethodDef dirstate_item_methods[] = {
218 static PyMethodDef dirstate_item_methods[] = {
210 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
219 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
211 "return a \"state\" suitable for v1 serialization"},
220 "return a \"state\" suitable for v1 serialization"},
212 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
221 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
213 "return a \"mode\" suitable for v1 serialization"},
222 "return a \"mode\" suitable for v1 serialization"},
214 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
223 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
215 "return a \"size\" suitable for v1 serialization"},
224 "return a \"size\" suitable for v1 serialization"},
216 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
225 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
217 "return a \"mtime\" suitable for v1 serialization"},
226 "return a \"mtime\" suitable for v1 serialization"},
218 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
227 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
219 "True if the stored mtime would be ambiguous with the current time"},
228 "True if the stored mtime would be ambiguous with the current time"},
220 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth, METH_O,
229 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth, METH_O,
221 "build a new DirstateItem object from V1 data"},
230 "build a new DirstateItem object from V1 data"},
222 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
231 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
223 METH_NOARGS, "mark a file as \"possibly dirty\""},
232 METH_NOARGS, "mark a file as \"possibly dirty\""},
233 {"dm_nonnormal", (PyCFunction)dm_nonnormal, METH_NOARGS,
234 "True is the entry is non-normal in the dirstatemap sense"},
224 {NULL} /* Sentinel */
235 {NULL} /* Sentinel */
225 };
236 };
226
237
227 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
238 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
228 {
239 {
229 return PyInt_FromLong(self->mode);
240 return PyInt_FromLong(self->mode);
230 };
241 };
231
242
232 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
243 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
233 {
244 {
234 return PyInt_FromLong(self->size);
245 return PyInt_FromLong(self->size);
235 };
246 };
236
247
237 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
248 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
238 {
249 {
239 return PyInt_FromLong(self->mtime);
250 return PyInt_FromLong(self->mtime);
240 };
251 };
241
252
242 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
253 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
243 {
254 {
244 return PyBytes_FromStringAndSize(&self->state, 1);
255 return PyBytes_FromStringAndSize(&self->state, 1);
245 };
256 };
246
257
247 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
258 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
248 {
259 {
249 if (self->state == 'a' || self->state == 'm' || self->state == 'n') {
260 if (self->state == 'a' || self->state == 'm' || self->state == 'n') {
250 Py_RETURN_TRUE;
261 Py_RETURN_TRUE;
251 } else {
262 } else {
252 Py_RETURN_FALSE;
263 Py_RETURN_FALSE;
253 }
264 }
254 };
265 };
255
266
256 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
267 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
257 {
268 {
258 if (self->state == 'a') {
269 if (self->state == 'a') {
259 Py_RETURN_TRUE;
270 Py_RETURN_TRUE;
260 } else {
271 } else {
261 Py_RETURN_FALSE;
272 Py_RETURN_FALSE;
262 }
273 }
263 };
274 };
264
275
265 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
276 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
266 {
277 {
267 if (self->state == 'm') {
278 if (self->state == 'm') {
268 Py_RETURN_TRUE;
279 Py_RETURN_TRUE;
269 } else {
280 } else {
270 Py_RETURN_FALSE;
281 Py_RETURN_FALSE;
271 }
282 }
272 };
283 };
273
284
274 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
285 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
275 {
286 {
276 if (self->state == 'r' && self->size == dirstate_v1_nonnormal) {
287 if (self->state == 'r' && self->size == dirstate_v1_nonnormal) {
277 Py_RETURN_TRUE;
288 Py_RETURN_TRUE;
278 } else {
289 } else {
279 Py_RETURN_FALSE;
290 Py_RETURN_FALSE;
280 }
291 }
281 };
292 };
282
293
283 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
294 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
284 {
295 {
285 if (self->state == 'n' && self->size == dirstate_v1_from_p2) {
296 if (self->state == 'n' && self->size == dirstate_v1_from_p2) {
286 Py_RETURN_TRUE;
297 Py_RETURN_TRUE;
287 } else {
298 } else {
288 Py_RETURN_FALSE;
299 Py_RETURN_FALSE;
289 }
300 }
290 };
301 };
291
302
292 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
303 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
293 {
304 {
294 if (self->state == 'r' && self->size == dirstate_v1_from_p2) {
305 if (self->state == 'r' && self->size == dirstate_v1_from_p2) {
295 Py_RETURN_TRUE;
306 Py_RETURN_TRUE;
296 } else {
307 } else {
297 Py_RETURN_FALSE;
308 Py_RETURN_FALSE;
298 }
309 }
299 };
310 };
300
311
301 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
312 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
302 {
313 {
303 if (self->state == 'r') {
314 if (self->state == 'r') {
304 Py_RETURN_TRUE;
315 Py_RETURN_TRUE;
305 } else {
316 } else {
306 Py_RETURN_FALSE;
317 Py_RETURN_FALSE;
307 }
318 }
308 };
319 };
309
320
310 static PyGetSetDef dirstate_item_getset[] = {
321 static PyGetSetDef dirstate_item_getset[] = {
311 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
322 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
312 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
323 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
313 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
324 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
314 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
325 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
315 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
326 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
316 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
327 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
317 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
328 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
318 "merged_removed", NULL},
329 "merged_removed", NULL},
319 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
330 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
320 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
331 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
321 "from_p2_removed", NULL},
332 "from_p2_removed", NULL},
322 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
333 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
323 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
334 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
324 {NULL} /* Sentinel */
335 {NULL} /* Sentinel */
325 };
336 };
326
337
327 PyTypeObject dirstateItemType = {
338 PyTypeObject dirstateItemType = {
328 PyVarObject_HEAD_INIT(NULL, 0) /* header */
339 PyVarObject_HEAD_INIT(NULL, 0) /* header */
329 "dirstate_tuple", /* tp_name */
340 "dirstate_tuple", /* tp_name */
330 sizeof(dirstateItemObject), /* tp_basicsize */
341 sizeof(dirstateItemObject), /* tp_basicsize */
331 0, /* tp_itemsize */
342 0, /* tp_itemsize */
332 (destructor)dirstate_item_dealloc, /* tp_dealloc */
343 (destructor)dirstate_item_dealloc, /* tp_dealloc */
333 0, /* tp_print */
344 0, /* tp_print */
334 0, /* tp_getattr */
345 0, /* tp_getattr */
335 0, /* tp_setattr */
346 0, /* tp_setattr */
336 0, /* tp_compare */
347 0, /* tp_compare */
337 0, /* tp_repr */
348 0, /* tp_repr */
338 0, /* tp_as_number */
349 0, /* tp_as_number */
339 &dirstate_item_sq, /* tp_as_sequence */
350 &dirstate_item_sq, /* tp_as_sequence */
340 0, /* tp_as_mapping */
351 0, /* tp_as_mapping */
341 0, /* tp_hash */
352 0, /* tp_hash */
342 0, /* tp_call */
353 0, /* tp_call */
343 0, /* tp_str */
354 0, /* tp_str */
344 0, /* tp_getattro */
355 0, /* tp_getattro */
345 0, /* tp_setattro */
356 0, /* tp_setattro */
346 0, /* tp_as_buffer */
357 0, /* tp_as_buffer */
347 Py_TPFLAGS_DEFAULT, /* tp_flags */
358 Py_TPFLAGS_DEFAULT, /* tp_flags */
348 "dirstate tuple", /* tp_doc */
359 "dirstate tuple", /* tp_doc */
349 0, /* tp_traverse */
360 0, /* tp_traverse */
350 0, /* tp_clear */
361 0, /* tp_clear */
351 0, /* tp_richcompare */
362 0, /* tp_richcompare */
352 0, /* tp_weaklistoffset */
363 0, /* tp_weaklistoffset */
353 0, /* tp_iter */
364 0, /* tp_iter */
354 0, /* tp_iternext */
365 0, /* tp_iternext */
355 dirstate_item_methods, /* tp_methods */
366 dirstate_item_methods, /* tp_methods */
356 0, /* tp_members */
367 0, /* tp_members */
357 dirstate_item_getset, /* tp_getset */
368 dirstate_item_getset, /* tp_getset */
358 0, /* tp_base */
369 0, /* tp_base */
359 0, /* tp_dict */
370 0, /* tp_dict */
360 0, /* tp_descr_get */
371 0, /* tp_descr_get */
361 0, /* tp_descr_set */
372 0, /* tp_descr_set */
362 0, /* tp_dictoffset */
373 0, /* tp_dictoffset */
363 0, /* tp_init */
374 0, /* tp_init */
364 0, /* tp_alloc */
375 0, /* tp_alloc */
365 dirstate_item_new, /* tp_new */
376 dirstate_item_new, /* tp_new */
366 };
377 };
367
378
368 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
379 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
369 {
380 {
370 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
381 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
371 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
382 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
372 char state, *cur, *str, *cpos;
383 char state, *cur, *str, *cpos;
373 int mode, size, mtime;
384 int mode, size, mtime;
374 unsigned int flen, pos = 40;
385 unsigned int flen, pos = 40;
375 Py_ssize_t len = 40;
386 Py_ssize_t len = 40;
376 Py_ssize_t readlen;
387 Py_ssize_t readlen;
377
388
378 if (!PyArg_ParseTuple(
389 if (!PyArg_ParseTuple(
379 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
390 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
380 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
391 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
381 goto quit;
392 goto quit;
382 }
393 }
383
394
384 len = readlen;
395 len = readlen;
385
396
386 /* read parents */
397 /* read parents */
387 if (len < 40) {
398 if (len < 40) {
388 PyErr_SetString(PyExc_ValueError,
399 PyErr_SetString(PyExc_ValueError,
389 "too little data for parents");
400 "too little data for parents");
390 goto quit;
401 goto quit;
391 }
402 }
392
403
393 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
404 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
394 str + 20, (Py_ssize_t)20);
405 str + 20, (Py_ssize_t)20);
395 if (!parents) {
406 if (!parents) {
396 goto quit;
407 goto quit;
397 }
408 }
398
409
399 /* read filenames */
410 /* read filenames */
400 while (pos >= 40 && pos < len) {
411 while (pos >= 40 && pos < len) {
401 if (pos + 17 > len) {
412 if (pos + 17 > len) {
402 PyErr_SetString(PyExc_ValueError,
413 PyErr_SetString(PyExc_ValueError,
403 "overflow in dirstate");
414 "overflow in dirstate");
404 goto quit;
415 goto quit;
405 }
416 }
406 cur = str + pos;
417 cur = str + pos;
407 /* unpack header */
418 /* unpack header */
408 state = *cur;
419 state = *cur;
409 mode = getbe32(cur + 1);
420 mode = getbe32(cur + 1);
410 size = getbe32(cur + 5);
421 size = getbe32(cur + 5);
411 mtime = getbe32(cur + 9);
422 mtime = getbe32(cur + 9);
412 flen = getbe32(cur + 13);
423 flen = getbe32(cur + 13);
413 pos += 17;
424 pos += 17;
414 cur += 17;
425 cur += 17;
415 if (flen > len - pos) {
426 if (flen > len - pos) {
416 PyErr_SetString(PyExc_ValueError,
427 PyErr_SetString(PyExc_ValueError,
417 "overflow in dirstate");
428 "overflow in dirstate");
418 goto quit;
429 goto quit;
419 }
430 }
420
431
421 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
432 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
422 size, mtime);
433 size, mtime);
423 cpos = memchr(cur, 0, flen);
434 cpos = memchr(cur, 0, flen);
424 if (cpos) {
435 if (cpos) {
425 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
436 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
426 cname = PyBytes_FromStringAndSize(
437 cname = PyBytes_FromStringAndSize(
427 cpos + 1, flen - (cpos - cur) - 1);
438 cpos + 1, flen - (cpos - cur) - 1);
428 if (!fname || !cname ||
439 if (!fname || !cname ||
429 PyDict_SetItem(cmap, fname, cname) == -1 ||
440 PyDict_SetItem(cmap, fname, cname) == -1 ||
430 PyDict_SetItem(dmap, fname, entry) == -1) {
441 PyDict_SetItem(dmap, fname, entry) == -1) {
431 goto quit;
442 goto quit;
432 }
443 }
433 Py_DECREF(cname);
444 Py_DECREF(cname);
434 } else {
445 } else {
435 fname = PyBytes_FromStringAndSize(cur, flen);
446 fname = PyBytes_FromStringAndSize(cur, flen);
436 if (!fname ||
447 if (!fname ||
437 PyDict_SetItem(dmap, fname, entry) == -1) {
448 PyDict_SetItem(dmap, fname, entry) == -1) {
438 goto quit;
449 goto quit;
439 }
450 }
440 }
451 }
441 Py_DECREF(fname);
452 Py_DECREF(fname);
442 Py_DECREF(entry);
453 Py_DECREF(entry);
443 fname = cname = entry = NULL;
454 fname = cname = entry = NULL;
444 pos += flen;
455 pos += flen;
445 }
456 }
446
457
447 ret = parents;
458 ret = parents;
448 Py_INCREF(ret);
459 Py_INCREF(ret);
449 quit:
460 quit:
450 Py_XDECREF(fname);
461 Py_XDECREF(fname);
451 Py_XDECREF(cname);
462 Py_XDECREF(cname);
452 Py_XDECREF(entry);
463 Py_XDECREF(entry);
453 Py_XDECREF(parents);
464 Py_XDECREF(parents);
454 return ret;
465 return ret;
455 }
466 }
456
467
457 /*
468 /*
458 * Build a set of non-normal and other parent entries from the dirstate dmap
469 * Build a set of non-normal and other parent entries from the dirstate dmap
459 */
470 */
460 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
471 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
461 {
472 {
462 PyObject *dmap, *fname, *v;
473 PyObject *dmap, *fname, *v;
463 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
474 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
464 Py_ssize_t pos;
475 Py_ssize_t pos;
465
476
466 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
477 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
467 &dmap)) {
478 &dmap)) {
468 goto bail;
479 goto bail;
469 }
480 }
470
481
471 nonnset = PySet_New(NULL);
482 nonnset = PySet_New(NULL);
472 if (nonnset == NULL) {
483 if (nonnset == NULL) {
473 goto bail;
484 goto bail;
474 }
485 }
475
486
476 otherpset = PySet_New(NULL);
487 otherpset = PySet_New(NULL);
477 if (otherpset == NULL) {
488 if (otherpset == NULL) {
478 goto bail;
489 goto bail;
479 }
490 }
480
491
481 pos = 0;
492 pos = 0;
482 while (PyDict_Next(dmap, &pos, &fname, &v)) {
493 while (PyDict_Next(dmap, &pos, &fname, &v)) {
483 dirstateItemObject *t;
494 dirstateItemObject *t;
484 if (!dirstate_tuple_check(v)) {
495 if (!dirstate_tuple_check(v)) {
485 PyErr_SetString(PyExc_TypeError,
496 PyErr_SetString(PyExc_TypeError,
486 "expected a dirstate tuple");
497 "expected a dirstate tuple");
487 goto bail;
498 goto bail;
488 }
499 }
489 t = (dirstateItemObject *)v;
500 t = (dirstateItemObject *)v;
490
501
491 if (t->state == 'n' && t->size == -2) {
502 if (t->state == 'n' && t->size == -2) {
492 if (PySet_Add(otherpset, fname) == -1) {
503 if (PySet_Add(otherpset, fname) == -1) {
493 goto bail;
504 goto bail;
494 }
505 }
495 }
506 }
496
507
497 if (t->state == 'n' && t->mtime != -1) {
508 if (t->state == 'n' && t->mtime != -1) {
498 continue;
509 continue;
499 }
510 }
500 if (PySet_Add(nonnset, fname) == -1) {
511 if (PySet_Add(nonnset, fname) == -1) {
501 goto bail;
512 goto bail;
502 }
513 }
503 }
514 }
504
515
505 result = Py_BuildValue("(OO)", nonnset, otherpset);
516 result = Py_BuildValue("(OO)", nonnset, otherpset);
506 if (result == NULL) {
517 if (result == NULL) {
507 goto bail;
518 goto bail;
508 }
519 }
509 Py_DECREF(nonnset);
520 Py_DECREF(nonnset);
510 Py_DECREF(otherpset);
521 Py_DECREF(otherpset);
511 return result;
522 return result;
512 bail:
523 bail:
513 Py_XDECREF(nonnset);
524 Py_XDECREF(nonnset);
514 Py_XDECREF(otherpset);
525 Py_XDECREF(otherpset);
515 Py_XDECREF(result);
526 Py_XDECREF(result);
516 return NULL;
527 return NULL;
517 }
528 }
518
529
519 /*
530 /*
520 * Efficiently pack a dirstate object into its on-disk format.
531 * Efficiently pack a dirstate object into its on-disk format.
521 */
532 */
522 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
533 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
523 {
534 {
524 PyObject *packobj = NULL;
535 PyObject *packobj = NULL;
525 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
536 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
526 Py_ssize_t nbytes, pos, l;
537 Py_ssize_t nbytes, pos, l;
527 PyObject *k, *v = NULL, *pn;
538 PyObject *k, *v = NULL, *pn;
528 char *p, *s;
539 char *p, *s;
529 int now;
540 int now;
530
541
531 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
542 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
532 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
543 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
533 &now)) {
544 &now)) {
534 return NULL;
545 return NULL;
535 }
546 }
536
547
537 if (PyTuple_Size(pl) != 2) {
548 if (PyTuple_Size(pl) != 2) {
538 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
549 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
539 return NULL;
550 return NULL;
540 }
551 }
541
552
542 /* Figure out how much we need to allocate. */
553 /* Figure out how much we need to allocate. */
543 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
554 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
544 PyObject *c;
555 PyObject *c;
545 if (!PyBytes_Check(k)) {
556 if (!PyBytes_Check(k)) {
546 PyErr_SetString(PyExc_TypeError, "expected string key");
557 PyErr_SetString(PyExc_TypeError, "expected string key");
547 goto bail;
558 goto bail;
548 }
559 }
549 nbytes += PyBytes_GET_SIZE(k) + 17;
560 nbytes += PyBytes_GET_SIZE(k) + 17;
550 c = PyDict_GetItem(copymap, k);
561 c = PyDict_GetItem(copymap, k);
551 if (c) {
562 if (c) {
552 if (!PyBytes_Check(c)) {
563 if (!PyBytes_Check(c)) {
553 PyErr_SetString(PyExc_TypeError,
564 PyErr_SetString(PyExc_TypeError,
554 "expected string key");
565 "expected string key");
555 goto bail;
566 goto bail;
556 }
567 }
557 nbytes += PyBytes_GET_SIZE(c) + 1;
568 nbytes += PyBytes_GET_SIZE(c) + 1;
558 }
569 }
559 }
570 }
560
571
561 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
572 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
562 if (packobj == NULL) {
573 if (packobj == NULL) {
563 goto bail;
574 goto bail;
564 }
575 }
565
576
566 p = PyBytes_AS_STRING(packobj);
577 p = PyBytes_AS_STRING(packobj);
567
578
568 pn = PyTuple_GET_ITEM(pl, 0);
579 pn = PyTuple_GET_ITEM(pl, 0);
569 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
580 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
570 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
581 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
571 goto bail;
582 goto bail;
572 }
583 }
573 memcpy(p, s, l);
584 memcpy(p, s, l);
574 p += 20;
585 p += 20;
575 pn = PyTuple_GET_ITEM(pl, 1);
586 pn = PyTuple_GET_ITEM(pl, 1);
576 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
587 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
577 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
588 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
578 goto bail;
589 goto bail;
579 }
590 }
580 memcpy(p, s, l);
591 memcpy(p, s, l);
581 p += 20;
592 p += 20;
582
593
583 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
594 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
584 dirstateItemObject *tuple;
595 dirstateItemObject *tuple;
585 char state;
596 char state;
586 int mode, size, mtime;
597 int mode, size, mtime;
587 Py_ssize_t len, l;
598 Py_ssize_t len, l;
588 PyObject *o;
599 PyObject *o;
589 char *t;
600 char *t;
590
601
591 if (!dirstate_tuple_check(v)) {
602 if (!dirstate_tuple_check(v)) {
592 PyErr_SetString(PyExc_TypeError,
603 PyErr_SetString(PyExc_TypeError,
593 "expected a dirstate tuple");
604 "expected a dirstate tuple");
594 goto bail;
605 goto bail;
595 }
606 }
596 tuple = (dirstateItemObject *)v;
607 tuple = (dirstateItemObject *)v;
597
608
598 state = tuple->state;
609 state = tuple->state;
599 mode = tuple->mode;
610 mode = tuple->mode;
600 size = tuple->size;
611 size = tuple->size;
601 mtime = tuple->mtime;
612 mtime = tuple->mtime;
602 if (state == 'n' && mtime == now) {
613 if (state == 'n' && mtime == now) {
603 /* See pure/parsers.py:pack_dirstate for why we do
614 /* See pure/parsers.py:pack_dirstate for why we do
604 * this. */
615 * this. */
605 mtime = -1;
616 mtime = -1;
606 mtime_unset = (PyObject *)make_dirstate_item(
617 mtime_unset = (PyObject *)make_dirstate_item(
607 state, mode, size, mtime);
618 state, mode, size, mtime);
608 if (!mtime_unset) {
619 if (!mtime_unset) {
609 goto bail;
620 goto bail;
610 }
621 }
611 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
622 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
612 goto bail;
623 goto bail;
613 }
624 }
614 Py_DECREF(mtime_unset);
625 Py_DECREF(mtime_unset);
615 mtime_unset = NULL;
626 mtime_unset = NULL;
616 }
627 }
617 *p++ = state;
628 *p++ = state;
618 putbe32((uint32_t)mode, p);
629 putbe32((uint32_t)mode, p);
619 putbe32((uint32_t)size, p + 4);
630 putbe32((uint32_t)size, p + 4);
620 putbe32((uint32_t)mtime, p + 8);
631 putbe32((uint32_t)mtime, p + 8);
621 t = p + 12;
632 t = p + 12;
622 p += 16;
633 p += 16;
623 len = PyBytes_GET_SIZE(k);
634 len = PyBytes_GET_SIZE(k);
624 memcpy(p, PyBytes_AS_STRING(k), len);
635 memcpy(p, PyBytes_AS_STRING(k), len);
625 p += len;
636 p += len;
626 o = PyDict_GetItem(copymap, k);
637 o = PyDict_GetItem(copymap, k);
627 if (o) {
638 if (o) {
628 *p++ = '\0';
639 *p++ = '\0';
629 l = PyBytes_GET_SIZE(o);
640 l = PyBytes_GET_SIZE(o);
630 memcpy(p, PyBytes_AS_STRING(o), l);
641 memcpy(p, PyBytes_AS_STRING(o), l);
631 p += l;
642 p += l;
632 len += l + 1;
643 len += l + 1;
633 }
644 }
634 putbe32((uint32_t)len, t);
645 putbe32((uint32_t)len, t);
635 }
646 }
636
647
637 pos = p - PyBytes_AS_STRING(packobj);
648 pos = p - PyBytes_AS_STRING(packobj);
638 if (pos != nbytes) {
649 if (pos != nbytes) {
639 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
650 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
640 (long)pos, (long)nbytes);
651 (long)pos, (long)nbytes);
641 goto bail;
652 goto bail;
642 }
653 }
643
654
644 return packobj;
655 return packobj;
645 bail:
656 bail:
646 Py_XDECREF(mtime_unset);
657 Py_XDECREF(mtime_unset);
647 Py_XDECREF(packobj);
658 Py_XDECREF(packobj);
648 Py_XDECREF(v);
659 Py_XDECREF(v);
649 return NULL;
660 return NULL;
650 }
661 }
651
662
652 #define BUMPED_FIX 1
663 #define BUMPED_FIX 1
653 #define USING_SHA_256 2
664 #define USING_SHA_256 2
654 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
665 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
655
666
656 static PyObject *readshas(const char *source, unsigned char num,
667 static PyObject *readshas(const char *source, unsigned char num,
657 Py_ssize_t hashwidth)
668 Py_ssize_t hashwidth)
658 {
669 {
659 int i;
670 int i;
660 PyObject *list = PyTuple_New(num);
671 PyObject *list = PyTuple_New(num);
661 if (list == NULL) {
672 if (list == NULL) {
662 return NULL;
673 return NULL;
663 }
674 }
664 for (i = 0; i < num; i++) {
675 for (i = 0; i < num; i++) {
665 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
676 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
666 if (hash == NULL) {
677 if (hash == NULL) {
667 Py_DECREF(list);
678 Py_DECREF(list);
668 return NULL;
679 return NULL;
669 }
680 }
670 PyTuple_SET_ITEM(list, i, hash);
681 PyTuple_SET_ITEM(list, i, hash);
671 source += hashwidth;
682 source += hashwidth;
672 }
683 }
673 return list;
684 return list;
674 }
685 }
675
686
676 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
687 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
677 uint32_t *msize)
688 uint32_t *msize)
678 {
689 {
679 const char *data = databegin;
690 const char *data = databegin;
680 const char *meta;
691 const char *meta;
681
692
682 double mtime;
693 double mtime;
683 int16_t tz;
694 int16_t tz;
684 uint16_t flags;
695 uint16_t flags;
685 unsigned char nsuccs, nparents, nmetadata;
696 unsigned char nsuccs, nparents, nmetadata;
686 Py_ssize_t hashwidth = 20;
697 Py_ssize_t hashwidth = 20;
687
698
688 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
699 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
689 PyObject *metadata = NULL, *ret = NULL;
700 PyObject *metadata = NULL, *ret = NULL;
690 int i;
701 int i;
691
702
692 if (data + FM1_HEADER_SIZE > dataend) {
703 if (data + FM1_HEADER_SIZE > dataend) {
693 goto overflow;
704 goto overflow;
694 }
705 }
695
706
696 *msize = getbe32(data);
707 *msize = getbe32(data);
697 data += 4;
708 data += 4;
698 mtime = getbefloat64(data);
709 mtime = getbefloat64(data);
699 data += 8;
710 data += 8;
700 tz = getbeint16(data);
711 tz = getbeint16(data);
701 data += 2;
712 data += 2;
702 flags = getbeuint16(data);
713 flags = getbeuint16(data);
703 data += 2;
714 data += 2;
704
715
705 if (flags & USING_SHA_256) {
716 if (flags & USING_SHA_256) {
706 hashwidth = 32;
717 hashwidth = 32;
707 }
718 }
708
719
709 nsuccs = (unsigned char)(*data++);
720 nsuccs = (unsigned char)(*data++);
710 nparents = (unsigned char)(*data++);
721 nparents = (unsigned char)(*data++);
711 nmetadata = (unsigned char)(*data++);
722 nmetadata = (unsigned char)(*data++);
712
723
713 if (databegin + *msize > dataend) {
724 if (databegin + *msize > dataend) {
714 goto overflow;
725 goto overflow;
715 }
726 }
716 dataend = databegin + *msize; /* narrow down to marker size */
727 dataend = databegin + *msize; /* narrow down to marker size */
717
728
718 if (data + hashwidth > dataend) {
729 if (data + hashwidth > dataend) {
719 goto overflow;
730 goto overflow;
720 }
731 }
721 prec = PyBytes_FromStringAndSize(data, hashwidth);
732 prec = PyBytes_FromStringAndSize(data, hashwidth);
722 data += hashwidth;
733 data += hashwidth;
723 if (prec == NULL) {
734 if (prec == NULL) {
724 goto bail;
735 goto bail;
725 }
736 }
726
737
727 if (data + nsuccs * hashwidth > dataend) {
738 if (data + nsuccs * hashwidth > dataend) {
728 goto overflow;
739 goto overflow;
729 }
740 }
730 succs = readshas(data, nsuccs, hashwidth);
741 succs = readshas(data, nsuccs, hashwidth);
731 if (succs == NULL) {
742 if (succs == NULL) {
732 goto bail;
743 goto bail;
733 }
744 }
734 data += nsuccs * hashwidth;
745 data += nsuccs * hashwidth;
735
746
736 if (nparents == 1 || nparents == 2) {
747 if (nparents == 1 || nparents == 2) {
737 if (data + nparents * hashwidth > dataend) {
748 if (data + nparents * hashwidth > dataend) {
738 goto overflow;
749 goto overflow;
739 }
750 }
740 parents = readshas(data, nparents, hashwidth);
751 parents = readshas(data, nparents, hashwidth);
741 if (parents == NULL) {
752 if (parents == NULL) {
742 goto bail;
753 goto bail;
743 }
754 }
744 data += nparents * hashwidth;
755 data += nparents * hashwidth;
745 } else {
756 } else {
746 parents = Py_None;
757 parents = Py_None;
747 Py_INCREF(parents);
758 Py_INCREF(parents);
748 }
759 }
749
760
750 if (data + 2 * nmetadata > dataend) {
761 if (data + 2 * nmetadata > dataend) {
751 goto overflow;
762 goto overflow;
752 }
763 }
753 meta = data + (2 * nmetadata);
764 meta = data + (2 * nmetadata);
754 metadata = PyTuple_New(nmetadata);
765 metadata = PyTuple_New(nmetadata);
755 if (metadata == NULL) {
766 if (metadata == NULL) {
756 goto bail;
767 goto bail;
757 }
768 }
758 for (i = 0; i < nmetadata; i++) {
769 for (i = 0; i < nmetadata; i++) {
759 PyObject *tmp, *left = NULL, *right = NULL;
770 PyObject *tmp, *left = NULL, *right = NULL;
760 Py_ssize_t leftsize = (unsigned char)(*data++);
771 Py_ssize_t leftsize = (unsigned char)(*data++);
761 Py_ssize_t rightsize = (unsigned char)(*data++);
772 Py_ssize_t rightsize = (unsigned char)(*data++);
762 if (meta + leftsize + rightsize > dataend) {
773 if (meta + leftsize + rightsize > dataend) {
763 goto overflow;
774 goto overflow;
764 }
775 }
765 left = PyBytes_FromStringAndSize(meta, leftsize);
776 left = PyBytes_FromStringAndSize(meta, leftsize);
766 meta += leftsize;
777 meta += leftsize;
767 right = PyBytes_FromStringAndSize(meta, rightsize);
778 right = PyBytes_FromStringAndSize(meta, rightsize);
768 meta += rightsize;
779 meta += rightsize;
769 tmp = PyTuple_New(2);
780 tmp = PyTuple_New(2);
770 if (!left || !right || !tmp) {
781 if (!left || !right || !tmp) {
771 Py_XDECREF(left);
782 Py_XDECREF(left);
772 Py_XDECREF(right);
783 Py_XDECREF(right);
773 Py_XDECREF(tmp);
784 Py_XDECREF(tmp);
774 goto bail;
785 goto bail;
775 }
786 }
776 PyTuple_SET_ITEM(tmp, 0, left);
787 PyTuple_SET_ITEM(tmp, 0, left);
777 PyTuple_SET_ITEM(tmp, 1, right);
788 PyTuple_SET_ITEM(tmp, 1, right);
778 PyTuple_SET_ITEM(metadata, i, tmp);
789 PyTuple_SET_ITEM(metadata, i, tmp);
779 }
790 }
780 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
791 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
781 (int)tz * 60, parents);
792 (int)tz * 60, parents);
782 goto bail; /* return successfully */
793 goto bail; /* return successfully */
783
794
784 overflow:
795 overflow:
785 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
796 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
786 bail:
797 bail:
787 Py_XDECREF(prec);
798 Py_XDECREF(prec);
788 Py_XDECREF(succs);
799 Py_XDECREF(succs);
789 Py_XDECREF(metadata);
800 Py_XDECREF(metadata);
790 Py_XDECREF(parents);
801 Py_XDECREF(parents);
791 return ret;
802 return ret;
792 }
803 }
793
804
794 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
805 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
795 {
806 {
796 const char *data, *dataend;
807 const char *data, *dataend;
797 Py_ssize_t datalen, offset, stop;
808 Py_ssize_t datalen, offset, stop;
798 PyObject *markers = NULL;
809 PyObject *markers = NULL;
799
810
800 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
811 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
801 &offset, &stop)) {
812 &offset, &stop)) {
802 return NULL;
813 return NULL;
803 }
814 }
804 if (offset < 0) {
815 if (offset < 0) {
805 PyErr_SetString(PyExc_ValueError,
816 PyErr_SetString(PyExc_ValueError,
806 "invalid negative offset in fm1readmarkers");
817 "invalid negative offset in fm1readmarkers");
807 return NULL;
818 return NULL;
808 }
819 }
809 if (stop > datalen) {
820 if (stop > datalen) {
810 PyErr_SetString(
821 PyErr_SetString(
811 PyExc_ValueError,
822 PyExc_ValueError,
812 "stop longer than data length in fm1readmarkers");
823 "stop longer than data length in fm1readmarkers");
813 return NULL;
824 return NULL;
814 }
825 }
815 dataend = data + datalen;
826 dataend = data + datalen;
816 data += offset;
827 data += offset;
817 markers = PyList_New(0);
828 markers = PyList_New(0);
818 if (!markers) {
829 if (!markers) {
819 return NULL;
830 return NULL;
820 }
831 }
821 while (offset < stop) {
832 while (offset < stop) {
822 uint32_t msize;
833 uint32_t msize;
823 int error;
834 int error;
824 PyObject *record = fm1readmarker(data, dataend, &msize);
835 PyObject *record = fm1readmarker(data, dataend, &msize);
825 if (!record) {
836 if (!record) {
826 goto bail;
837 goto bail;
827 }
838 }
828 error = PyList_Append(markers, record);
839 error = PyList_Append(markers, record);
829 Py_DECREF(record);
840 Py_DECREF(record);
830 if (error) {
841 if (error) {
831 goto bail;
842 goto bail;
832 }
843 }
833 data += msize;
844 data += msize;
834 offset += msize;
845 offset += msize;
835 }
846 }
836 return markers;
847 return markers;
837 bail:
848 bail:
838 Py_DECREF(markers);
849 Py_DECREF(markers);
839 return NULL;
850 return NULL;
840 }
851 }
841
852
842 static char parsers_doc[] = "Efficient content parsing.";
853 static char parsers_doc[] = "Efficient content parsing.";
843
854
844 PyObject *encodedir(PyObject *self, PyObject *args);
855 PyObject *encodedir(PyObject *self, PyObject *args);
845 PyObject *pathencode(PyObject *self, PyObject *args);
856 PyObject *pathencode(PyObject *self, PyObject *args);
846 PyObject *lowerencode(PyObject *self, PyObject *args);
857 PyObject *lowerencode(PyObject *self, PyObject *args);
847 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
858 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
848
859
849 static PyMethodDef methods[] = {
860 static PyMethodDef methods[] = {
850 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
861 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
851 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
862 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
852 "create a set containing non-normal and other parent entries of given "
863 "create a set containing non-normal and other parent entries of given "
853 "dirstate\n"},
864 "dirstate\n"},
854 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
865 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
855 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
866 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
856 "parse a revlog index\n"},
867 "parse a revlog index\n"},
857 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
868 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
858 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
869 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
859 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
870 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
860 {"dict_new_presized", dict_new_presized, METH_VARARGS,
871 {"dict_new_presized", dict_new_presized, METH_VARARGS,
861 "construct a dict with an expected size\n"},
872 "construct a dict with an expected size\n"},
862 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
873 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
863 "make file foldmap\n"},
874 "make file foldmap\n"},
864 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
875 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
865 "escape a UTF-8 byte string to JSON (fast path)\n"},
876 "escape a UTF-8 byte string to JSON (fast path)\n"},
866 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
877 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
867 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
878 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
868 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
879 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
869 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
880 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
870 "parse v1 obsolete markers\n"},
881 "parse v1 obsolete markers\n"},
871 {NULL, NULL}};
882 {NULL, NULL}};
872
883
873 void dirs_module_init(PyObject *mod);
884 void dirs_module_init(PyObject *mod);
874 void manifest_module_init(PyObject *mod);
885 void manifest_module_init(PyObject *mod);
875 void revlog_module_init(PyObject *mod);
886 void revlog_module_init(PyObject *mod);
876
887
877 static const int version = 20;
888 static const int version = 20;
878
889
879 static void module_init(PyObject *mod)
890 static void module_init(PyObject *mod)
880 {
891 {
881 PyObject *capsule = NULL;
892 PyObject *capsule = NULL;
882 PyModule_AddIntConstant(mod, "version", version);
893 PyModule_AddIntConstant(mod, "version", version);
883
894
884 /* This module constant has two purposes. First, it lets us unit test
895 /* This module constant has two purposes. First, it lets us unit test
885 * the ImportError raised without hard-coding any error text. This
896 * the ImportError raised without hard-coding any error text. This
886 * means we can change the text in the future without breaking tests,
897 * means we can change the text in the future without breaking tests,
887 * even across changesets without a recompile. Second, its presence
898 * even across changesets without a recompile. Second, its presence
888 * can be used to determine whether the version-checking logic is
899 * can be used to determine whether the version-checking logic is
889 * present, which also helps in testing across changesets without a
900 * present, which also helps in testing across changesets without a
890 * recompile. Note that this means the pure-Python version of parsers
901 * recompile. Note that this means the pure-Python version of parsers
891 * should not have this module constant. */
902 * should not have this module constant. */
892 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
903 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
893
904
894 dirs_module_init(mod);
905 dirs_module_init(mod);
895 manifest_module_init(mod);
906 manifest_module_init(mod);
896 revlog_module_init(mod);
907 revlog_module_init(mod);
897
908
898 capsule = PyCapsule_New(
909 capsule = PyCapsule_New(
899 make_dirstate_item,
910 make_dirstate_item,
900 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
911 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
901 if (capsule != NULL)
912 if (capsule != NULL)
902 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
913 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
903
914
904 if (PyType_Ready(&dirstateItemType) < 0) {
915 if (PyType_Ready(&dirstateItemType) < 0) {
905 return;
916 return;
906 }
917 }
907 Py_INCREF(&dirstateItemType);
918 Py_INCREF(&dirstateItemType);
908 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
919 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
909 }
920 }
910
921
911 static int check_python_version(void)
922 static int check_python_version(void)
912 {
923 {
913 PyObject *sys = PyImport_ImportModule("sys"), *ver;
924 PyObject *sys = PyImport_ImportModule("sys"), *ver;
914 long hexversion;
925 long hexversion;
915 if (!sys) {
926 if (!sys) {
916 return -1;
927 return -1;
917 }
928 }
918 ver = PyObject_GetAttrString(sys, "hexversion");
929 ver = PyObject_GetAttrString(sys, "hexversion");
919 Py_DECREF(sys);
930 Py_DECREF(sys);
920 if (!ver) {
931 if (!ver) {
921 return -1;
932 return -1;
922 }
933 }
923 hexversion = PyInt_AsLong(ver);
934 hexversion = PyInt_AsLong(ver);
924 Py_DECREF(ver);
935 Py_DECREF(ver);
925 /* sys.hexversion is a 32-bit number by default, so the -1 case
936 /* sys.hexversion is a 32-bit number by default, so the -1 case
926 * should only occur in unusual circumstances (e.g. if sys.hexversion
937 * should only occur in unusual circumstances (e.g. if sys.hexversion
927 * is manually set to an invalid value). */
938 * is manually set to an invalid value). */
928 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
939 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
929 PyErr_Format(PyExc_ImportError,
940 PyErr_Format(PyExc_ImportError,
930 "%s: The Mercurial extension "
941 "%s: The Mercurial extension "
931 "modules were compiled with Python " PY_VERSION
942 "modules were compiled with Python " PY_VERSION
932 ", but "
943 ", but "
933 "Mercurial is currently using Python with "
944 "Mercurial is currently using Python with "
934 "sys.hexversion=%ld: "
945 "sys.hexversion=%ld: "
935 "Python %s\n at: %s",
946 "Python %s\n at: %s",
936 versionerrortext, hexversion, Py_GetVersion(),
947 versionerrortext, hexversion, Py_GetVersion(),
937 Py_GetProgramFullPath());
948 Py_GetProgramFullPath());
938 return -1;
949 return -1;
939 }
950 }
940 return 0;
951 return 0;
941 }
952 }
942
953
943 #ifdef IS_PY3K
954 #ifdef IS_PY3K
944 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
955 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
945 parsers_doc, -1, methods};
956 parsers_doc, -1, methods};
946
957
947 PyMODINIT_FUNC PyInit_parsers(void)
958 PyMODINIT_FUNC PyInit_parsers(void)
948 {
959 {
949 PyObject *mod;
960 PyObject *mod;
950
961
951 if (check_python_version() == -1)
962 if (check_python_version() == -1)
952 return NULL;
963 return NULL;
953 mod = PyModule_Create(&parsers_module);
964 mod = PyModule_Create(&parsers_module);
954 module_init(mod);
965 module_init(mod);
955 return mod;
966 return mod;
956 }
967 }
957 #else
968 #else
958 PyMODINIT_FUNC initparsers(void)
969 PyMODINIT_FUNC initparsers(void)
959 {
970 {
960 PyObject *mod;
971 PyObject *mod;
961
972
962 if (check_python_version() == -1) {
973 if (check_python_version() == -1) {
963 return;
974 return;
964 }
975 }
965 mod = Py_InitModule3("parsers", methods, parsers_doc);
976 mod = Py_InitModule3("parsers", methods, parsers_doc);
966 module_init(mod);
977 module_init(mod);
967 }
978 }
968 #endif
979 #endif
@@ -1,752 +1,752
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 from .dirstateutils import (
21 from .dirstateutils import (
22 docket as docketmod,
22 docket as docketmod,
23 )
23 )
24
24
25 parsers = policy.importmod('parsers')
25 parsers = policy.importmod('parsers')
26 rustmod = policy.importrust('dirstate')
26 rustmod = policy.importrust('dirstate')
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 DirstateItem = parsers.DirstateItem
30 DirstateItem = parsers.DirstateItem
31
31
32
32
33 # a special value used internally for `size` if the file come from the other parent
33 # a special value used internally for `size` if the file come from the other parent
34 FROM_P2 = -2
34 FROM_P2 = -2
35
35
36 # a special value used internally for `size` if the file is modified/merged/added
36 # a special value used internally for `size` if the file is modified/merged/added
37 NONNORMAL = -1
37 NONNORMAL = -1
38
38
39 # a special value used internally for `time` if the time is ambigeous
39 # a special value used internally for `time` if the time is ambigeous
40 AMBIGUOUS_TIME = -1
40 AMBIGUOUS_TIME = -1
41
41
42 rangemask = 0x7FFFFFFF
42 rangemask = 0x7FFFFFFF
43
43
44
44
45 class dirstatemap(object):
45 class dirstatemap(object):
46 """Map encapsulating the dirstate's contents.
46 """Map encapsulating the dirstate's contents.
47
47
48 The dirstate contains the following state:
48 The dirstate contains the following state:
49
49
50 - `identity` is the identity of the dirstate file, which can be used to
50 - `identity` is the identity of the dirstate file, which can be used to
51 detect when changes have occurred to the dirstate file.
51 detect when changes have occurred to the dirstate file.
52
52
53 - `parents` is a pair containing the parents of the working copy. The
53 - `parents` is a pair containing the parents of the working copy. The
54 parents are updated by calling `setparents`.
54 parents are updated by calling `setparents`.
55
55
56 - the state map maps filenames to tuples of (state, mode, size, mtime),
56 - the state map maps filenames to tuples of (state, mode, size, mtime),
57 where state is a single character representing 'normal', 'added',
57 where state is a single character representing 'normal', 'added',
58 'removed', or 'merged'. It is read by treating the dirstate as a
58 'removed', or 'merged'. It is read by treating the dirstate as a
59 dict. File state is updated by calling the `addfile`, `removefile` and
59 dict. File state is updated by calling the `addfile`, `removefile` and
60 `dropfile` methods.
60 `dropfile` methods.
61
61
62 - `copymap` maps destination filenames to their source filename.
62 - `copymap` maps destination filenames to their source filename.
63
63
64 The dirstate also provides the following views onto the state:
64 The dirstate also provides the following views onto the state:
65
65
66 - `nonnormalset` is a set of the filenames that have state other
66 - `nonnormalset` is a set of the filenames that have state other
67 than 'normal', or are normal but have an mtime of -1 ('normallookup').
67 than 'normal', or are normal but have an mtime of -1 ('normallookup').
68
68
69 - `otherparentset` is a set of the filenames that are marked as coming
69 - `otherparentset` is a set of the filenames that are marked as coming
70 from the second parent when the dirstate is currently being merged.
70 from the second parent when the dirstate is currently being merged.
71
71
72 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
72 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
73 form that they appear as in the dirstate.
73 form that they appear as in the dirstate.
74
74
75 - `dirfoldmap` is a dict mapping normalized directory names to the
75 - `dirfoldmap` is a dict mapping normalized directory names to the
76 denormalized form that they appear as in the dirstate.
76 denormalized form that they appear as in the dirstate.
77 """
77 """
78
78
79 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
79 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
80 self._ui = ui
80 self._ui = ui
81 self._opener = opener
81 self._opener = opener
82 self._root = root
82 self._root = root
83 self._filename = b'dirstate'
83 self._filename = b'dirstate'
84 self._nodelen = 20
84 self._nodelen = 20
85 self._nodeconstants = nodeconstants
85 self._nodeconstants = nodeconstants
86 assert (
86 assert (
87 not use_dirstate_v2
87 not use_dirstate_v2
88 ), "should have detected unsupported requirement"
88 ), "should have detected unsupported requirement"
89
89
90 self._parents = None
90 self._parents = None
91 self._dirtyparents = False
91 self._dirtyparents = False
92
92
93 # for consistent view between _pl() and _read() invocations
93 # for consistent view between _pl() and _read() invocations
94 self._pendingmode = None
94 self._pendingmode = None
95
95
96 @propertycache
96 @propertycache
97 def _map(self):
97 def _map(self):
98 self._map = {}
98 self._map = {}
99 self.read()
99 self.read()
100 return self._map
100 return self._map
101
101
102 @propertycache
102 @propertycache
103 def copymap(self):
103 def copymap(self):
104 self.copymap = {}
104 self.copymap = {}
105 self._map
105 self._map
106 return self.copymap
106 return self.copymap
107
107
108 def clear(self):
108 def clear(self):
109 self._map.clear()
109 self._map.clear()
110 self.copymap.clear()
110 self.copymap.clear()
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
112 util.clearcachedproperty(self, b"_dirs")
112 util.clearcachedproperty(self, b"_dirs")
113 util.clearcachedproperty(self, b"_alldirs")
113 util.clearcachedproperty(self, b"_alldirs")
114 util.clearcachedproperty(self, b"filefoldmap")
114 util.clearcachedproperty(self, b"filefoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
116 util.clearcachedproperty(self, b"nonnormalset")
116 util.clearcachedproperty(self, b"nonnormalset")
117 util.clearcachedproperty(self, b"otherparentset")
117 util.clearcachedproperty(self, b"otherparentset")
118
118
119 def items(self):
119 def items(self):
120 return pycompat.iteritems(self._map)
120 return pycompat.iteritems(self._map)
121
121
122 # forward for python2,3 compat
122 # forward for python2,3 compat
123 iteritems = items
123 iteritems = items
124
124
125 debug_iter = items
125 debug_iter = items
126
126
127 def __len__(self):
127 def __len__(self):
128 return len(self._map)
128 return len(self._map)
129
129
130 def __iter__(self):
130 def __iter__(self):
131 return iter(self._map)
131 return iter(self._map)
132
132
133 def get(self, key, default=None):
133 def get(self, key, default=None):
134 return self._map.get(key, default)
134 return self._map.get(key, default)
135
135
136 def __contains__(self, key):
136 def __contains__(self, key):
137 return key in self._map
137 return key in self._map
138
138
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 return self._map[key]
140 return self._map[key]
141
141
142 def keys(self):
142 def keys(self):
143 return self._map.keys()
143 return self._map.keys()
144
144
145 def preload(self):
145 def preload(self):
146 """Loads the underlying data, if it's not already loaded"""
146 """Loads the underlying data, if it's not already loaded"""
147 self._map
147 self._map
148
148
149 def addfile(
149 def addfile(
150 self,
150 self,
151 f,
151 f,
152 mode=0,
152 mode=0,
153 size=None,
153 size=None,
154 mtime=None,
154 mtime=None,
155 added=False,
155 added=False,
156 merged=False,
156 merged=False,
157 from_p2=False,
157 from_p2=False,
158 possibly_dirty=False,
158 possibly_dirty=False,
159 ):
159 ):
160 """Add a tracked file to the dirstate."""
160 """Add a tracked file to the dirstate."""
161 if added:
161 if added:
162 assert not merged
162 assert not merged
163 assert not possibly_dirty
163 assert not possibly_dirty
164 assert not from_p2
164 assert not from_p2
165 state = b'a'
165 state = b'a'
166 size = NONNORMAL
166 size = NONNORMAL
167 mtime = AMBIGUOUS_TIME
167 mtime = AMBIGUOUS_TIME
168 elif merged:
168 elif merged:
169 assert not possibly_dirty
169 assert not possibly_dirty
170 assert not from_p2
170 assert not from_p2
171 state = b'm'
171 state = b'm'
172 size = FROM_P2
172 size = FROM_P2
173 mtime = AMBIGUOUS_TIME
173 mtime = AMBIGUOUS_TIME
174 elif from_p2:
174 elif from_p2:
175 assert not possibly_dirty
175 assert not possibly_dirty
176 state = b'n'
176 state = b'n'
177 size = FROM_P2
177 size = FROM_P2
178 mtime = AMBIGUOUS_TIME
178 mtime = AMBIGUOUS_TIME
179 elif possibly_dirty:
179 elif possibly_dirty:
180 state = b'n'
180 state = b'n'
181 size = NONNORMAL
181 size = NONNORMAL
182 mtime = AMBIGUOUS_TIME
182 mtime = AMBIGUOUS_TIME
183 else:
183 else:
184 assert size != FROM_P2
184 assert size != FROM_P2
185 assert size != NONNORMAL
185 assert size != NONNORMAL
186 state = b'n'
186 state = b'n'
187 size = size & rangemask
187 size = size & rangemask
188 mtime = mtime & rangemask
188 mtime = mtime & rangemask
189 assert state is not None
189 assert state is not None
190 assert size is not None
190 assert size is not None
191 assert mtime is not None
191 assert mtime is not None
192 old_entry = self.get(f)
192 old_entry = self.get(f)
193 if (
193 if (
194 old_entry is None or old_entry.removed
194 old_entry is None or old_entry.removed
195 ) and "_dirs" in self.__dict__:
195 ) and "_dirs" in self.__dict__:
196 self._dirs.addpath(f)
196 self._dirs.addpath(f)
197 if old_entry is None and "_alldirs" in self.__dict__:
197 if old_entry is None and "_alldirs" in self.__dict__:
198 self._alldirs.addpath(f)
198 self._alldirs.addpath(f)
199 self._map[f] = DirstateItem(state, mode, size, mtime)
199 e = self._map[f] = DirstateItem(state, mode, size, mtime)
200 if state != b'n' or mtime == AMBIGUOUS_TIME:
200 if e.dm_nonnormal:
201 self.nonnormalset.add(f)
201 self.nonnormalset.add(f)
202 if size == FROM_P2:
202 if size == FROM_P2:
203 self.otherparentset.add(f)
203 self.otherparentset.add(f)
204
204
205 def removefile(self, f, in_merge=False):
205 def removefile(self, f, in_merge=False):
206 """
206 """
207 Mark a file as removed in the dirstate.
207 Mark a file as removed in the dirstate.
208
208
209 The `size` parameter is used to store sentinel values that indicate
209 The `size` parameter is used to store sentinel values that indicate
210 the file's previous state. In the future, we should refactor this
210 the file's previous state. In the future, we should refactor this
211 to be more explicit about what that state is.
211 to be more explicit about what that state is.
212 """
212 """
213 entry = self.get(f)
213 entry = self.get(f)
214 size = 0
214 size = 0
215 if in_merge:
215 if in_merge:
216 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
216 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
217 # during a merge. So I (marmoute) am not sure we need the
217 # during a merge. So I (marmoute) am not sure we need the
218 # conditionnal at all. Adding double checking this with assert
218 # conditionnal at all. Adding double checking this with assert
219 # would be nice.
219 # would be nice.
220 if entry is not None:
220 if entry is not None:
221 # backup the previous state
221 # backup the previous state
222 if entry.merged: # merge
222 if entry.merged: # merge
223 size = NONNORMAL
223 size = NONNORMAL
224 elif entry.from_p2:
224 elif entry.from_p2:
225 size = FROM_P2
225 size = FROM_P2
226 self.otherparentset.add(f)
226 self.otherparentset.add(f)
227 if entry is not None and not (entry.merged or entry.from_p2):
227 if entry is not None and not (entry.merged or entry.from_p2):
228 self.copymap.pop(f, None)
228 self.copymap.pop(f, None)
229
229
230 if entry is not None and not entry.removed and "_dirs" in self.__dict__:
230 if entry is not None and not entry.removed and "_dirs" in self.__dict__:
231 self._dirs.delpath(f)
231 self._dirs.delpath(f)
232 if entry is None and "_alldirs" in self.__dict__:
232 if entry is None and "_alldirs" in self.__dict__:
233 self._alldirs.addpath(f)
233 self._alldirs.addpath(f)
234 if "filefoldmap" in self.__dict__:
234 if "filefoldmap" in self.__dict__:
235 normed = util.normcase(f)
235 normed = util.normcase(f)
236 self.filefoldmap.pop(normed, None)
236 self.filefoldmap.pop(normed, None)
237 self._map[f] = DirstateItem(b'r', 0, size, 0)
237 self._map[f] = DirstateItem(b'r', 0, size, 0)
238 self.nonnormalset.add(f)
238 self.nonnormalset.add(f)
239
239
240 def dropfile(self, f):
240 def dropfile(self, f):
241 """
241 """
242 Remove a file from the dirstate. Returns True if the file was
242 Remove a file from the dirstate. Returns True if the file was
243 previously recorded.
243 previously recorded.
244 """
244 """
245 old_entry = self._map.pop(f, None)
245 old_entry = self._map.pop(f, None)
246 exists = False
246 exists = False
247 oldstate = b'?'
247 oldstate = b'?'
248 if old_entry is not None:
248 if old_entry is not None:
249 exists = True
249 exists = True
250 oldstate = old_entry.state
250 oldstate = old_entry.state
251 if exists:
251 if exists:
252 if oldstate != b"r" and "_dirs" in self.__dict__:
252 if oldstate != b"r" and "_dirs" in self.__dict__:
253 self._dirs.delpath(f)
253 self._dirs.delpath(f)
254 if "_alldirs" in self.__dict__:
254 if "_alldirs" in self.__dict__:
255 self._alldirs.delpath(f)
255 self._alldirs.delpath(f)
256 if "filefoldmap" in self.__dict__:
256 if "filefoldmap" in self.__dict__:
257 normed = util.normcase(f)
257 normed = util.normcase(f)
258 self.filefoldmap.pop(normed, None)
258 self.filefoldmap.pop(normed, None)
259 self.nonnormalset.discard(f)
259 self.nonnormalset.discard(f)
260 return exists
260 return exists
261
261
262 def clearambiguoustimes(self, files, now):
262 def clearambiguoustimes(self, files, now):
263 for f in files:
263 for f in files:
264 e = self.get(f)
264 e = self.get(f)
265 if e is not None and e.need_delay(now):
265 if e is not None and e.need_delay(now):
266 e.set_possibly_dirty()
266 e.set_possibly_dirty()
267 self.nonnormalset.add(f)
267 self.nonnormalset.add(f)
268
268
269 def nonnormalentries(self):
269 def nonnormalentries(self):
270 '''Compute the nonnormal dirstate entries from the dmap'''
270 '''Compute the nonnormal dirstate entries from the dmap'''
271 try:
271 try:
272 return parsers.nonnormalotherparententries(self._map)
272 return parsers.nonnormalotherparententries(self._map)
273 except AttributeError:
273 except AttributeError:
274 nonnorm = set()
274 nonnorm = set()
275 otherparent = set()
275 otherparent = set()
276 for fname, e in pycompat.iteritems(self._map):
276 for fname, e in pycompat.iteritems(self._map):
277 if e.state != b'n' or e.mtime == AMBIGUOUS_TIME:
277 if e.dm_nonnormal:
278 nonnorm.add(fname)
278 nonnorm.add(fname)
279 if e.from_p2:
279 if e.from_p2:
280 otherparent.add(fname)
280 otherparent.add(fname)
281 return nonnorm, otherparent
281 return nonnorm, otherparent
282
282
283 @propertycache
283 @propertycache
284 def filefoldmap(self):
284 def filefoldmap(self):
285 """Returns a dictionary mapping normalized case paths to their
285 """Returns a dictionary mapping normalized case paths to their
286 non-normalized versions.
286 non-normalized versions.
287 """
287 """
288 try:
288 try:
289 makefilefoldmap = parsers.make_file_foldmap
289 makefilefoldmap = parsers.make_file_foldmap
290 except AttributeError:
290 except AttributeError:
291 pass
291 pass
292 else:
292 else:
293 return makefilefoldmap(
293 return makefilefoldmap(
294 self._map, util.normcasespec, util.normcasefallback
294 self._map, util.normcasespec, util.normcasefallback
295 )
295 )
296
296
297 f = {}
297 f = {}
298 normcase = util.normcase
298 normcase = util.normcase
299 for name, s in pycompat.iteritems(self._map):
299 for name, s in pycompat.iteritems(self._map):
300 if not s.removed:
300 if not s.removed:
301 f[normcase(name)] = name
301 f[normcase(name)] = name
302 f[b'.'] = b'.' # prevents useless util.fspath() invocation
302 f[b'.'] = b'.' # prevents useless util.fspath() invocation
303 return f
303 return f
304
304
305 def hastrackeddir(self, d):
305 def hastrackeddir(self, d):
306 """
306 """
307 Returns True if the dirstate contains a tracked (not removed) file
307 Returns True if the dirstate contains a tracked (not removed) file
308 in this directory.
308 in this directory.
309 """
309 """
310 return d in self._dirs
310 return d in self._dirs
311
311
312 def hasdir(self, d):
312 def hasdir(self, d):
313 """
313 """
314 Returns True if the dirstate contains a file (tracked or removed)
314 Returns True if the dirstate contains a file (tracked or removed)
315 in this directory.
315 in this directory.
316 """
316 """
317 return d in self._alldirs
317 return d in self._alldirs
318
318
319 @propertycache
319 @propertycache
320 def _dirs(self):
320 def _dirs(self):
321 return pathutil.dirs(self._map, b'r')
321 return pathutil.dirs(self._map, b'r')
322
322
323 @propertycache
323 @propertycache
324 def _alldirs(self):
324 def _alldirs(self):
325 return pathutil.dirs(self._map)
325 return pathutil.dirs(self._map)
326
326
327 def _opendirstatefile(self):
327 def _opendirstatefile(self):
328 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
328 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
329 if self._pendingmode is not None and self._pendingmode != mode:
329 if self._pendingmode is not None and self._pendingmode != mode:
330 fp.close()
330 fp.close()
331 raise error.Abort(
331 raise error.Abort(
332 _(b'working directory state may be changed parallelly')
332 _(b'working directory state may be changed parallelly')
333 )
333 )
334 self._pendingmode = mode
334 self._pendingmode = mode
335 return fp
335 return fp
336
336
337 def parents(self):
337 def parents(self):
338 if not self._parents:
338 if not self._parents:
339 try:
339 try:
340 fp = self._opendirstatefile()
340 fp = self._opendirstatefile()
341 st = fp.read(2 * self._nodelen)
341 st = fp.read(2 * self._nodelen)
342 fp.close()
342 fp.close()
343 except IOError as err:
343 except IOError as err:
344 if err.errno != errno.ENOENT:
344 if err.errno != errno.ENOENT:
345 raise
345 raise
346 # File doesn't exist, so the current state is empty
346 # File doesn't exist, so the current state is empty
347 st = b''
347 st = b''
348
348
349 l = len(st)
349 l = len(st)
350 if l == self._nodelen * 2:
350 if l == self._nodelen * 2:
351 self._parents = (
351 self._parents = (
352 st[: self._nodelen],
352 st[: self._nodelen],
353 st[self._nodelen : 2 * self._nodelen],
353 st[self._nodelen : 2 * self._nodelen],
354 )
354 )
355 elif l == 0:
355 elif l == 0:
356 self._parents = (
356 self._parents = (
357 self._nodeconstants.nullid,
357 self._nodeconstants.nullid,
358 self._nodeconstants.nullid,
358 self._nodeconstants.nullid,
359 )
359 )
360 else:
360 else:
361 raise error.Abort(
361 raise error.Abort(
362 _(b'working directory state appears damaged!')
362 _(b'working directory state appears damaged!')
363 )
363 )
364
364
365 return self._parents
365 return self._parents
366
366
367 def setparents(self, p1, p2):
367 def setparents(self, p1, p2):
368 self._parents = (p1, p2)
368 self._parents = (p1, p2)
369 self._dirtyparents = True
369 self._dirtyparents = True
370
370
371 def read(self):
371 def read(self):
372 # ignore HG_PENDING because identity is used only for writing
372 # ignore HG_PENDING because identity is used only for writing
373 self.identity = util.filestat.frompath(
373 self.identity = util.filestat.frompath(
374 self._opener.join(self._filename)
374 self._opener.join(self._filename)
375 )
375 )
376
376
377 try:
377 try:
378 fp = self._opendirstatefile()
378 fp = self._opendirstatefile()
379 try:
379 try:
380 st = fp.read()
380 st = fp.read()
381 finally:
381 finally:
382 fp.close()
382 fp.close()
383 except IOError as err:
383 except IOError as err:
384 if err.errno != errno.ENOENT:
384 if err.errno != errno.ENOENT:
385 raise
385 raise
386 return
386 return
387 if not st:
387 if not st:
388 return
388 return
389
389
390 if util.safehasattr(parsers, b'dict_new_presized'):
390 if util.safehasattr(parsers, b'dict_new_presized'):
391 # Make an estimate of the number of files in the dirstate based on
391 # Make an estimate of the number of files in the dirstate based on
392 # its size. This trades wasting some memory for avoiding costly
392 # its size. This trades wasting some memory for avoiding costly
393 # resizes. Each entry have a prefix of 17 bytes followed by one or
393 # resizes. Each entry have a prefix of 17 bytes followed by one or
394 # two path names. Studies on various large-scale real-world repositories
394 # two path names. Studies on various large-scale real-world repositories
395 # found 54 bytes a reasonable upper limit for the average path names.
395 # found 54 bytes a reasonable upper limit for the average path names.
396 # Copy entries are ignored for the sake of this estimate.
396 # Copy entries are ignored for the sake of this estimate.
397 self._map = parsers.dict_new_presized(len(st) // 71)
397 self._map = parsers.dict_new_presized(len(st) // 71)
398
398
399 # Python's garbage collector triggers a GC each time a certain number
399 # Python's garbage collector triggers a GC each time a certain number
400 # of container objects (the number being defined by
400 # of container objects (the number being defined by
401 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
401 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
402 # for each file in the dirstate. The C version then immediately marks
402 # for each file in the dirstate. The C version then immediately marks
403 # them as not to be tracked by the collector. However, this has no
403 # them as not to be tracked by the collector. However, this has no
404 # effect on when GCs are triggered, only on what objects the GC looks
404 # effect on when GCs are triggered, only on what objects the GC looks
405 # into. This means that O(number of files) GCs are unavoidable.
405 # into. This means that O(number of files) GCs are unavoidable.
406 # Depending on when in the process's lifetime the dirstate is parsed,
406 # Depending on when in the process's lifetime the dirstate is parsed,
407 # this can get very expensive. As a workaround, disable GC while
407 # this can get very expensive. As a workaround, disable GC while
408 # parsing the dirstate.
408 # parsing the dirstate.
409 #
409 #
410 # (we cannot decorate the function directly since it is in a C module)
410 # (we cannot decorate the function directly since it is in a C module)
411 parse_dirstate = util.nogc(parsers.parse_dirstate)
411 parse_dirstate = util.nogc(parsers.parse_dirstate)
412 p = parse_dirstate(self._map, self.copymap, st)
412 p = parse_dirstate(self._map, self.copymap, st)
413 if not self._dirtyparents:
413 if not self._dirtyparents:
414 self.setparents(*p)
414 self.setparents(*p)
415
415
416 # Avoid excess attribute lookups by fast pathing certain checks
416 # Avoid excess attribute lookups by fast pathing certain checks
417 self.__contains__ = self._map.__contains__
417 self.__contains__ = self._map.__contains__
418 self.__getitem__ = self._map.__getitem__
418 self.__getitem__ = self._map.__getitem__
419 self.get = self._map.get
419 self.get = self._map.get
420
420
421 def write(self, _tr, st, now):
421 def write(self, _tr, st, now):
422 st.write(
422 st.write(
423 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
423 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
424 )
424 )
425 st.close()
425 st.close()
426 self._dirtyparents = False
426 self._dirtyparents = False
427 self.nonnormalset, self.otherparentset = self.nonnormalentries()
427 self.nonnormalset, self.otherparentset = self.nonnormalentries()
428
428
429 @propertycache
429 @propertycache
430 def nonnormalset(self):
430 def nonnormalset(self):
431 nonnorm, otherparents = self.nonnormalentries()
431 nonnorm, otherparents = self.nonnormalentries()
432 self.otherparentset = otherparents
432 self.otherparentset = otherparents
433 return nonnorm
433 return nonnorm
434
434
435 @propertycache
435 @propertycache
436 def otherparentset(self):
436 def otherparentset(self):
437 nonnorm, otherparents = self.nonnormalentries()
437 nonnorm, otherparents = self.nonnormalentries()
438 self.nonnormalset = nonnorm
438 self.nonnormalset = nonnorm
439 return otherparents
439 return otherparents
440
440
441 def non_normal_or_other_parent_paths(self):
441 def non_normal_or_other_parent_paths(self):
442 return self.nonnormalset.union(self.otherparentset)
442 return self.nonnormalset.union(self.otherparentset)
443
443
444 @propertycache
444 @propertycache
445 def identity(self):
445 def identity(self):
446 self._map
446 self._map
447 return self.identity
447 return self.identity
448
448
449 @propertycache
449 @propertycache
450 def dirfoldmap(self):
450 def dirfoldmap(self):
451 f = {}
451 f = {}
452 normcase = util.normcase
452 normcase = util.normcase
453 for name in self._dirs:
453 for name in self._dirs:
454 f[normcase(name)] = name
454 f[normcase(name)] = name
455 return f
455 return f
456
456
457
457
458 if rustmod is not None:
458 if rustmod is not None:
459
459
460 class dirstatemap(object):
460 class dirstatemap(object):
461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
462 self._use_dirstate_v2 = use_dirstate_v2
462 self._use_dirstate_v2 = use_dirstate_v2
463 self._nodeconstants = nodeconstants
463 self._nodeconstants = nodeconstants
464 self._ui = ui
464 self._ui = ui
465 self._opener = opener
465 self._opener = opener
466 self._root = root
466 self._root = root
467 self._filename = b'dirstate'
467 self._filename = b'dirstate'
468 self._nodelen = 20 # Also update Rust code when changing this!
468 self._nodelen = 20 # Also update Rust code when changing this!
469 self._parents = None
469 self._parents = None
470 self._dirtyparents = False
470 self._dirtyparents = False
471 self._docket = None
471 self._docket = None
472
472
473 # for consistent view between _pl() and _read() invocations
473 # for consistent view between _pl() and _read() invocations
474 self._pendingmode = None
474 self._pendingmode = None
475
475
476 self._use_dirstate_tree = self._ui.configbool(
476 self._use_dirstate_tree = self._ui.configbool(
477 b"experimental",
477 b"experimental",
478 b"dirstate-tree.in-memory",
478 b"dirstate-tree.in-memory",
479 False,
479 False,
480 )
480 )
481
481
482 def addfile(
482 def addfile(
483 self,
483 self,
484 f,
484 f,
485 mode=0,
485 mode=0,
486 size=None,
486 size=None,
487 mtime=None,
487 mtime=None,
488 added=False,
488 added=False,
489 merged=False,
489 merged=False,
490 from_p2=False,
490 from_p2=False,
491 possibly_dirty=False,
491 possibly_dirty=False,
492 ):
492 ):
493 return self._rustmap.addfile(
493 return self._rustmap.addfile(
494 f,
494 f,
495 mode,
495 mode,
496 size,
496 size,
497 mtime,
497 mtime,
498 added,
498 added,
499 merged,
499 merged,
500 from_p2,
500 from_p2,
501 possibly_dirty,
501 possibly_dirty,
502 )
502 )
503
503
504 def removefile(self, *args, **kwargs):
504 def removefile(self, *args, **kwargs):
505 return self._rustmap.removefile(*args, **kwargs)
505 return self._rustmap.removefile(*args, **kwargs)
506
506
507 def dropfile(self, *args, **kwargs):
507 def dropfile(self, *args, **kwargs):
508 return self._rustmap.dropfile(*args, **kwargs)
508 return self._rustmap.dropfile(*args, **kwargs)
509
509
510 def clearambiguoustimes(self, *args, **kwargs):
510 def clearambiguoustimes(self, *args, **kwargs):
511 return self._rustmap.clearambiguoustimes(*args, **kwargs)
511 return self._rustmap.clearambiguoustimes(*args, **kwargs)
512
512
513 def nonnormalentries(self):
513 def nonnormalentries(self):
514 return self._rustmap.nonnormalentries()
514 return self._rustmap.nonnormalentries()
515
515
516 def get(self, *args, **kwargs):
516 def get(self, *args, **kwargs):
517 return self._rustmap.get(*args, **kwargs)
517 return self._rustmap.get(*args, **kwargs)
518
518
519 @property
519 @property
520 def copymap(self):
520 def copymap(self):
521 return self._rustmap.copymap()
521 return self._rustmap.copymap()
522
522
523 def directories(self):
523 def directories(self):
524 return self._rustmap.directories()
524 return self._rustmap.directories()
525
525
526 def debug_iter(self):
526 def debug_iter(self):
527 return self._rustmap.debug_iter()
527 return self._rustmap.debug_iter()
528
528
529 def preload(self):
529 def preload(self):
530 self._rustmap
530 self._rustmap
531
531
532 def clear(self):
532 def clear(self):
533 self._rustmap.clear()
533 self._rustmap.clear()
534 self.setparents(
534 self.setparents(
535 self._nodeconstants.nullid, self._nodeconstants.nullid
535 self._nodeconstants.nullid, self._nodeconstants.nullid
536 )
536 )
537 util.clearcachedproperty(self, b"_dirs")
537 util.clearcachedproperty(self, b"_dirs")
538 util.clearcachedproperty(self, b"_alldirs")
538 util.clearcachedproperty(self, b"_alldirs")
539 util.clearcachedproperty(self, b"dirfoldmap")
539 util.clearcachedproperty(self, b"dirfoldmap")
540
540
541 def items(self):
541 def items(self):
542 return self._rustmap.items()
542 return self._rustmap.items()
543
543
544 def keys(self):
544 def keys(self):
545 return iter(self._rustmap)
545 return iter(self._rustmap)
546
546
547 def __contains__(self, key):
547 def __contains__(self, key):
548 return key in self._rustmap
548 return key in self._rustmap
549
549
550 def __getitem__(self, item):
550 def __getitem__(self, item):
551 return self._rustmap[item]
551 return self._rustmap[item]
552
552
553 def __len__(self):
553 def __len__(self):
554 return len(self._rustmap)
554 return len(self._rustmap)
555
555
556 def __iter__(self):
556 def __iter__(self):
557 return iter(self._rustmap)
557 return iter(self._rustmap)
558
558
559 # forward for python2,3 compat
559 # forward for python2,3 compat
560 iteritems = items
560 iteritems = items
561
561
562 def _opendirstatefile(self):
562 def _opendirstatefile(self):
563 fp, mode = txnutil.trypending(
563 fp, mode = txnutil.trypending(
564 self._root, self._opener, self._filename
564 self._root, self._opener, self._filename
565 )
565 )
566 if self._pendingmode is not None and self._pendingmode != mode:
566 if self._pendingmode is not None and self._pendingmode != mode:
567 fp.close()
567 fp.close()
568 raise error.Abort(
568 raise error.Abort(
569 _(b'working directory state may be changed parallelly')
569 _(b'working directory state may be changed parallelly')
570 )
570 )
571 self._pendingmode = mode
571 self._pendingmode = mode
572 return fp
572 return fp
573
573
574 def _readdirstatefile(self, size=-1):
574 def _readdirstatefile(self, size=-1):
575 try:
575 try:
576 with self._opendirstatefile() as fp:
576 with self._opendirstatefile() as fp:
577 return fp.read(size)
577 return fp.read(size)
578 except IOError as err:
578 except IOError as err:
579 if err.errno != errno.ENOENT:
579 if err.errno != errno.ENOENT:
580 raise
580 raise
581 # File doesn't exist, so the current state is empty
581 # File doesn't exist, so the current state is empty
582 return b''
582 return b''
583
583
584 def setparents(self, p1, p2):
584 def setparents(self, p1, p2):
585 self._parents = (p1, p2)
585 self._parents = (p1, p2)
586 self._dirtyparents = True
586 self._dirtyparents = True
587
587
588 def parents(self):
588 def parents(self):
589 if not self._parents:
589 if not self._parents:
590 if self._use_dirstate_v2:
590 if self._use_dirstate_v2:
591 self._parents = self.docket.parents
591 self._parents = self.docket.parents
592 else:
592 else:
593 read_len = self._nodelen * 2
593 read_len = self._nodelen * 2
594 st = self._readdirstatefile(read_len)
594 st = self._readdirstatefile(read_len)
595 l = len(st)
595 l = len(st)
596 if l == read_len:
596 if l == read_len:
597 self._parents = (
597 self._parents = (
598 st[: self._nodelen],
598 st[: self._nodelen],
599 st[self._nodelen : 2 * self._nodelen],
599 st[self._nodelen : 2 * self._nodelen],
600 )
600 )
601 elif l == 0:
601 elif l == 0:
602 self._parents = (
602 self._parents = (
603 self._nodeconstants.nullid,
603 self._nodeconstants.nullid,
604 self._nodeconstants.nullid,
604 self._nodeconstants.nullid,
605 )
605 )
606 else:
606 else:
607 raise error.Abort(
607 raise error.Abort(
608 _(b'working directory state appears damaged!')
608 _(b'working directory state appears damaged!')
609 )
609 )
610
610
611 return self._parents
611 return self._parents
612
612
613 @property
613 @property
614 def docket(self):
614 def docket(self):
615 if not self._docket:
615 if not self._docket:
616 if not self._use_dirstate_v2:
616 if not self._use_dirstate_v2:
617 raise error.ProgrammingError(
617 raise error.ProgrammingError(
618 b'dirstate only has a docket in v2 format'
618 b'dirstate only has a docket in v2 format'
619 )
619 )
620 self._docket = docketmod.DirstateDocket.parse(
620 self._docket = docketmod.DirstateDocket.parse(
621 self._readdirstatefile(), self._nodeconstants
621 self._readdirstatefile(), self._nodeconstants
622 )
622 )
623 return self._docket
623 return self._docket
624
624
625 @propertycache
625 @propertycache
626 def _rustmap(self):
626 def _rustmap(self):
627 """
627 """
628 Fills the Dirstatemap when called.
628 Fills the Dirstatemap when called.
629 """
629 """
630 # ignore HG_PENDING because identity is used only for writing
630 # ignore HG_PENDING because identity is used only for writing
631 self.identity = util.filestat.frompath(
631 self.identity = util.filestat.frompath(
632 self._opener.join(self._filename)
632 self._opener.join(self._filename)
633 )
633 )
634
634
635 if self._use_dirstate_v2:
635 if self._use_dirstate_v2:
636 if self.docket.uuid:
636 if self.docket.uuid:
637 # TODO: use mmap when possible
637 # TODO: use mmap when possible
638 data = self._opener.read(self.docket.data_filename())
638 data = self._opener.read(self.docket.data_filename())
639 else:
639 else:
640 data = b''
640 data = b''
641 self._rustmap = rustmod.DirstateMap.new_v2(
641 self._rustmap = rustmod.DirstateMap.new_v2(
642 data, self.docket.data_size, self.docket.tree_metadata
642 data, self.docket.data_size, self.docket.tree_metadata
643 )
643 )
644 parents = self.docket.parents
644 parents = self.docket.parents
645 else:
645 else:
646 self._rustmap, parents = rustmod.DirstateMap.new_v1(
646 self._rustmap, parents = rustmod.DirstateMap.new_v1(
647 self._use_dirstate_tree, self._readdirstatefile()
647 self._use_dirstate_tree, self._readdirstatefile()
648 )
648 )
649
649
650 if parents and not self._dirtyparents:
650 if parents and not self._dirtyparents:
651 self.setparents(*parents)
651 self.setparents(*parents)
652
652
653 self.__contains__ = self._rustmap.__contains__
653 self.__contains__ = self._rustmap.__contains__
654 self.__getitem__ = self._rustmap.__getitem__
654 self.__getitem__ = self._rustmap.__getitem__
655 self.get = self._rustmap.get
655 self.get = self._rustmap.get
656 return self._rustmap
656 return self._rustmap
657
657
658 def write(self, tr, st, now):
658 def write(self, tr, st, now):
659 if not self._use_dirstate_v2:
659 if not self._use_dirstate_v2:
660 p1, p2 = self.parents()
660 p1, p2 = self.parents()
661 packed = self._rustmap.write_v1(p1, p2, now)
661 packed = self._rustmap.write_v1(p1, p2, now)
662 st.write(packed)
662 st.write(packed)
663 st.close()
663 st.close()
664 self._dirtyparents = False
664 self._dirtyparents = False
665 return
665 return
666
666
667 # We can only append to an existing data file if there is one
667 # We can only append to an existing data file if there is one
668 can_append = self.docket.uuid is not None
668 can_append = self.docket.uuid is not None
669 packed, meta, append = self._rustmap.write_v2(now, can_append)
669 packed, meta, append = self._rustmap.write_v2(now, can_append)
670 if append:
670 if append:
671 docket = self.docket
671 docket = self.docket
672 data_filename = docket.data_filename()
672 data_filename = docket.data_filename()
673 if tr:
673 if tr:
674 tr.add(data_filename, docket.data_size)
674 tr.add(data_filename, docket.data_size)
675 with self._opener(data_filename, b'r+b') as fp:
675 with self._opener(data_filename, b'r+b') as fp:
676 fp.seek(docket.data_size)
676 fp.seek(docket.data_size)
677 assert fp.tell() == docket.data_size
677 assert fp.tell() == docket.data_size
678 written = fp.write(packed)
678 written = fp.write(packed)
679 if written is not None: # py2 may return None
679 if written is not None: # py2 may return None
680 assert written == len(packed), (written, len(packed))
680 assert written == len(packed), (written, len(packed))
681 docket.data_size += len(packed)
681 docket.data_size += len(packed)
682 docket.parents = self.parents()
682 docket.parents = self.parents()
683 docket.tree_metadata = meta
683 docket.tree_metadata = meta
684 st.write(docket.serialize())
684 st.write(docket.serialize())
685 st.close()
685 st.close()
686 else:
686 else:
687 old_docket = self.docket
687 old_docket = self.docket
688 new_docket = docketmod.DirstateDocket.with_new_uuid(
688 new_docket = docketmod.DirstateDocket.with_new_uuid(
689 self.parents(), len(packed), meta
689 self.parents(), len(packed), meta
690 )
690 )
691 data_filename = new_docket.data_filename()
691 data_filename = new_docket.data_filename()
692 if tr:
692 if tr:
693 tr.add(data_filename, 0)
693 tr.add(data_filename, 0)
694 self._opener.write(data_filename, packed)
694 self._opener.write(data_filename, packed)
695 # Write the new docket after the new data file has been
695 # Write the new docket after the new data file has been
696 # written. Because `st` was opened with `atomictemp=True`,
696 # written. Because `st` was opened with `atomictemp=True`,
697 # the actual `.hg/dirstate` file is only affected on close.
697 # the actual `.hg/dirstate` file is only affected on close.
698 st.write(new_docket.serialize())
698 st.write(new_docket.serialize())
699 st.close()
699 st.close()
700 # Remove the old data file after the new docket pointing to
700 # Remove the old data file after the new docket pointing to
701 # the new data file was written.
701 # the new data file was written.
702 if old_docket.uuid:
702 if old_docket.uuid:
703 data_filename = old_docket.data_filename()
703 data_filename = old_docket.data_filename()
704 unlink = lambda _tr=None: self._opener.unlink(data_filename)
704 unlink = lambda _tr=None: self._opener.unlink(data_filename)
705 if tr:
705 if tr:
706 category = b"dirstate-v2-clean-" + old_docket.uuid
706 category = b"dirstate-v2-clean-" + old_docket.uuid
707 tr.addpostclose(category, unlink)
707 tr.addpostclose(category, unlink)
708 else:
708 else:
709 unlink()
709 unlink()
710 self._docket = new_docket
710 self._docket = new_docket
711 # Reload from the newly-written file
711 # Reload from the newly-written file
712 util.clearcachedproperty(self, b"_rustmap")
712 util.clearcachedproperty(self, b"_rustmap")
713 self._dirtyparents = False
713 self._dirtyparents = False
714
714
715 @propertycache
715 @propertycache
716 def filefoldmap(self):
716 def filefoldmap(self):
717 """Returns a dictionary mapping normalized case paths to their
717 """Returns a dictionary mapping normalized case paths to their
718 non-normalized versions.
718 non-normalized versions.
719 """
719 """
720 return self._rustmap.filefoldmapasdict()
720 return self._rustmap.filefoldmapasdict()
721
721
722 def hastrackeddir(self, d):
722 def hastrackeddir(self, d):
723 return self._rustmap.hastrackeddir(d)
723 return self._rustmap.hastrackeddir(d)
724
724
725 def hasdir(self, d):
725 def hasdir(self, d):
726 return self._rustmap.hasdir(d)
726 return self._rustmap.hasdir(d)
727
727
728 @propertycache
728 @propertycache
729 def identity(self):
729 def identity(self):
730 self._rustmap
730 self._rustmap
731 return self.identity
731 return self.identity
732
732
733 @property
733 @property
734 def nonnormalset(self):
734 def nonnormalset(self):
735 nonnorm = self._rustmap.non_normal_entries()
735 nonnorm = self._rustmap.non_normal_entries()
736 return nonnorm
736 return nonnorm
737
737
738 @propertycache
738 @propertycache
739 def otherparentset(self):
739 def otherparentset(self):
740 otherparents = self._rustmap.other_parent_entries()
740 otherparents = self._rustmap.other_parent_entries()
741 return otherparents
741 return otherparents
742
742
743 def non_normal_or_other_parent_paths(self):
743 def non_normal_or_other_parent_paths(self):
744 return self._rustmap.non_normal_or_other_parent_paths()
744 return self._rustmap.non_normal_or_other_parent_paths()
745
745
746 @propertycache
746 @propertycache
747 def dirfoldmap(self):
747 def dirfoldmap(self):
748 f = {}
748 f = {}
749 normcase = util.normcase
749 normcase = util.normcase
750 for name in self._rustmap.tracked_dirs():
750 for name in self._rustmap.tracked_dirs():
751 f[normcase(name)] = name
751 f[normcase(name)] = name
752 return f
752 return f
@@ -1,605 +1,613
1 # parsers.py - Python implementation of parsers.c
1 # parsers.py - Python implementation of parsers.c
2 #
2 #
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import struct
10 import struct
11 import zlib
11 import zlib
12
12
13 from ..node import (
13 from ..node import (
14 nullrev,
14 nullrev,
15 sha1nodeconstants,
15 sha1nodeconstants,
16 )
16 )
17 from ..thirdparty import attr
17 from ..thirdparty import attr
18 from .. import (
18 from .. import (
19 error,
19 error,
20 pycompat,
20 pycompat,
21 revlogutils,
21 revlogutils,
22 util,
22 util,
23 )
23 )
24
24
25 from ..revlogutils import nodemap as nodemaputil
25 from ..revlogutils import nodemap as nodemaputil
26 from ..revlogutils import constants as revlog_constants
26 from ..revlogutils import constants as revlog_constants
27
27
28 stringio = pycompat.bytesio
28 stringio = pycompat.bytesio
29
29
30
30
31 _pack = struct.pack
31 _pack = struct.pack
32 _unpack = struct.unpack
32 _unpack = struct.unpack
33 _compress = zlib.compress
33 _compress = zlib.compress
34 _decompress = zlib.decompress
34 _decompress = zlib.decompress
35
35
36
36
37 # a special value used internally for `size` if the file come from the other parent
37 # a special value used internally for `size` if the file come from the other parent
38 FROM_P2 = -2
38 FROM_P2 = -2
39
39
40 # a special value used internally for `size` if the file is modified/merged/added
40 # a special value used internally for `size` if the file is modified/merged/added
41 NONNORMAL = -1
41 NONNORMAL = -1
42
42
43 # a special value used internally for `time` if the time is ambigeous
43 # a special value used internally for `time` if the time is ambigeous
44 AMBIGUOUS_TIME = -1
44 AMBIGUOUS_TIME = -1
45
45
46
46
47 @attr.s(slots=True, init=False)
47 @attr.s(slots=True, init=False)
48 class DirstateItem(object):
48 class DirstateItem(object):
49 """represent a dirstate entry
49 """represent a dirstate entry
50
50
51 It contains:
51 It contains:
52
52
53 - state (one of 'n', 'a', 'r', 'm')
53 - state (one of 'n', 'a', 'r', 'm')
54 - mode,
54 - mode,
55 - size,
55 - size,
56 - mtime,
56 - mtime,
57 """
57 """
58
58
59 _state = attr.ib()
59 _state = attr.ib()
60 _mode = attr.ib()
60 _mode = attr.ib()
61 _size = attr.ib()
61 _size = attr.ib()
62 _mtime = attr.ib()
62 _mtime = attr.ib()
63
63
64 def __init__(self, state, mode, size, mtime):
64 def __init__(self, state, mode, size, mtime):
65 self._state = state
65 self._state = state
66 self._mode = mode
66 self._mode = mode
67 self._size = size
67 self._size = size
68 self._mtime = mtime
68 self._mtime = mtime
69
69
70 @classmethod
70 @classmethod
71 def from_v1_data(cls, state, mode, size, mtime):
71 def from_v1_data(cls, state, mode, size, mtime):
72 """Build a new DirstateItem object from V1 data
72 """Build a new DirstateItem object from V1 data
73
73
74 Since the dirstate-v1 format is frozen, the signature of this function
74 Since the dirstate-v1 format is frozen, the signature of this function
75 is not expected to change, unlike the __init__ one.
75 is not expected to change, unlike the __init__ one.
76 """
76 """
77 return cls(
77 return cls(
78 state=state,
78 state=state,
79 mode=mode,
79 mode=mode,
80 size=size,
80 size=size,
81 mtime=mtime,
81 mtime=mtime,
82 )
82 )
83
83
84 def set_possibly_dirty(self):
84 def set_possibly_dirty(self):
85 """Mark a file as "possibly dirty"
85 """Mark a file as "possibly dirty"
86
86
87 This means the next status call will have to actually check its content
87 This means the next status call will have to actually check its content
88 to make sure it is correct.
88 to make sure it is correct.
89 """
89 """
90 self._mtime = AMBIGUOUS_TIME
90 self._mtime = AMBIGUOUS_TIME
91
91
92 def __getitem__(self, idx):
92 def __getitem__(self, idx):
93 if idx == 0 or idx == -4:
93 if idx == 0 or idx == -4:
94 msg = b"do not use item[x], use item.state"
94 msg = b"do not use item[x], use item.state"
95 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
95 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
96 return self._state
96 return self._state
97 elif idx == 1 or idx == -3:
97 elif idx == 1 or idx == -3:
98 msg = b"do not use item[x], use item.mode"
98 msg = b"do not use item[x], use item.mode"
99 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
99 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
100 return self._mode
100 return self._mode
101 elif idx == 2 or idx == -2:
101 elif idx == 2 or idx == -2:
102 msg = b"do not use item[x], use item.size"
102 msg = b"do not use item[x], use item.size"
103 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
103 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
104 return self._size
104 return self._size
105 elif idx == 3 or idx == -1:
105 elif idx == 3 or idx == -1:
106 msg = b"do not use item[x], use item.mtime"
106 msg = b"do not use item[x], use item.mtime"
107 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
107 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
108 return self._mtime
108 return self._mtime
109 else:
109 else:
110 raise IndexError(idx)
110 raise IndexError(idx)
111
111
112 @property
112 @property
113 def mode(self):
113 def mode(self):
114 return self._mode
114 return self._mode
115
115
116 @property
116 @property
117 def size(self):
117 def size(self):
118 return self._size
118 return self._size
119
119
120 @property
120 @property
121 def mtime(self):
121 def mtime(self):
122 return self._mtime
122 return self._mtime
123
123
124 @property
124 @property
125 def state(self):
125 def state(self):
126 """
126 """
127 States are:
127 States are:
128 n normal
128 n normal
129 m needs merging
129 m needs merging
130 r marked for removal
130 r marked for removal
131 a marked for addition
131 a marked for addition
132
132
133 XXX This "state" is a bit obscure and mostly a direct expression of the
133 XXX This "state" is a bit obscure and mostly a direct expression of the
134 dirstatev1 format. It would make sense to ultimately deprecate it in
134 dirstatev1 format. It would make sense to ultimately deprecate it in
135 favor of the more "semantic" attributes.
135 favor of the more "semantic" attributes.
136 """
136 """
137 return self._state
137 return self._state
138
138
139 @property
139 @property
140 def tracked(self):
140 def tracked(self):
141 """True is the file is tracked in the working copy"""
141 """True is the file is tracked in the working copy"""
142 return self._state in b"nma"
142 return self._state in b"nma"
143
143
144 @property
144 @property
145 def added(self):
145 def added(self):
146 """True if the file has been added"""
146 """True if the file has been added"""
147 return self._state == b'a'
147 return self._state == b'a'
148
148
149 @property
149 @property
150 def merged(self):
150 def merged(self):
151 """True if the file has been merged
151 """True if the file has been merged
152
152
153 Should only be set if a merge is in progress in the dirstate
153 Should only be set if a merge is in progress in the dirstate
154 """
154 """
155 return self._state == b'm'
155 return self._state == b'm'
156
156
157 @property
157 @property
158 def from_p2(self):
158 def from_p2(self):
159 """True if the file have been fetched from p2 during the current merge
159 """True if the file have been fetched from p2 during the current merge
160
160
161 This is only True is the file is currently tracked.
161 This is only True is the file is currently tracked.
162
162
163 Should only be set if a merge is in progress in the dirstate
163 Should only be set if a merge is in progress in the dirstate
164 """
164 """
165 return self._state == b'n' and self._size == FROM_P2
165 return self._state == b'n' and self._size == FROM_P2
166
166
167 @property
167 @property
168 def from_p2_removed(self):
168 def from_p2_removed(self):
169 """True if the file has been removed, but was "from_p2" initially
169 """True if the file has been removed, but was "from_p2" initially
170
170
171 This property seems like an abstraction leakage and should probably be
171 This property seems like an abstraction leakage and should probably be
172 dealt in this class (or maybe the dirstatemap) directly.
172 dealt in this class (or maybe the dirstatemap) directly.
173 """
173 """
174 return self._state == b'r' and self._size == FROM_P2
174 return self._state == b'r' and self._size == FROM_P2
175
175
176 @property
176 @property
177 def removed(self):
177 def removed(self):
178 """True if the file has been removed"""
178 """True if the file has been removed"""
179 return self._state == b'r'
179 return self._state == b'r'
180
180
181 @property
181 @property
182 def merged_removed(self):
182 def merged_removed(self):
183 """True if the file has been removed, but was "merged" initially
183 """True if the file has been removed, but was "merged" initially
184
184
185 This property seems like an abstraction leakage and should probably be
185 This property seems like an abstraction leakage and should probably be
186 dealt in this class (or maybe the dirstatemap) directly.
186 dealt in this class (or maybe the dirstatemap) directly.
187 """
187 """
188 return self._state == b'r' and self._size == NONNORMAL
188 return self._state == b'r' and self._size == NONNORMAL
189
189
190 @property
191 def dm_nonnormal(self):
192 """True is the entry is non-normal in the dirstatemap sense
193
194 There is no reason for any code, but the dirstatemap one to use this.
195 """
196 return self.state != b'n' or self.mtime == AMBIGUOUS_TIME
197
190 def v1_state(self):
198 def v1_state(self):
191 """return a "state" suitable for v1 serialization"""
199 """return a "state" suitable for v1 serialization"""
192 return self._state
200 return self._state
193
201
194 def v1_mode(self):
202 def v1_mode(self):
195 """return a "mode" suitable for v1 serialization"""
203 """return a "mode" suitable for v1 serialization"""
196 return self._mode
204 return self._mode
197
205
198 def v1_size(self):
206 def v1_size(self):
199 """return a "size" suitable for v1 serialization"""
207 """return a "size" suitable for v1 serialization"""
200 return self._size
208 return self._size
201
209
202 def v1_mtime(self):
210 def v1_mtime(self):
203 """return a "mtime" suitable for v1 serialization"""
211 """return a "mtime" suitable for v1 serialization"""
204 return self._mtime
212 return self._mtime
205
213
206 def need_delay(self, now):
214 def need_delay(self, now):
207 """True if the stored mtime would be ambiguous with the current time"""
215 """True if the stored mtime would be ambiguous with the current time"""
208 return self._state == b'n' and self._mtime == now
216 return self._state == b'n' and self._mtime == now
209
217
210
218
211 def gettype(q):
219 def gettype(q):
212 return int(q & 0xFFFF)
220 return int(q & 0xFFFF)
213
221
214
222
215 class BaseIndexObject(object):
223 class BaseIndexObject(object):
216 # Can I be passed to an algorithme implemented in Rust ?
224 # Can I be passed to an algorithme implemented in Rust ?
217 rust_ext_compat = 0
225 rust_ext_compat = 0
218 # Format of an index entry according to Python's `struct` language
226 # Format of an index entry according to Python's `struct` language
219 index_format = revlog_constants.INDEX_ENTRY_V1
227 index_format = revlog_constants.INDEX_ENTRY_V1
220 # Size of a C unsigned long long int, platform independent
228 # Size of a C unsigned long long int, platform independent
221 big_int_size = struct.calcsize(b'>Q')
229 big_int_size = struct.calcsize(b'>Q')
222 # Size of a C long int, platform independent
230 # Size of a C long int, platform independent
223 int_size = struct.calcsize(b'>i')
231 int_size = struct.calcsize(b'>i')
224 # An empty index entry, used as a default value to be overridden, or nullrev
232 # An empty index entry, used as a default value to be overridden, or nullrev
225 null_item = (
233 null_item = (
226 0,
234 0,
227 0,
235 0,
228 0,
236 0,
229 -1,
237 -1,
230 -1,
238 -1,
231 -1,
239 -1,
232 -1,
240 -1,
233 sha1nodeconstants.nullid,
241 sha1nodeconstants.nullid,
234 0,
242 0,
235 0,
243 0,
236 revlog_constants.COMP_MODE_INLINE,
244 revlog_constants.COMP_MODE_INLINE,
237 revlog_constants.COMP_MODE_INLINE,
245 revlog_constants.COMP_MODE_INLINE,
238 )
246 )
239
247
240 @util.propertycache
248 @util.propertycache
241 def entry_size(self):
249 def entry_size(self):
242 return self.index_format.size
250 return self.index_format.size
243
251
244 @property
252 @property
245 def nodemap(self):
253 def nodemap(self):
246 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
254 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
247 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
255 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
248 return self._nodemap
256 return self._nodemap
249
257
250 @util.propertycache
258 @util.propertycache
251 def _nodemap(self):
259 def _nodemap(self):
252 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
260 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
253 for r in range(0, len(self)):
261 for r in range(0, len(self)):
254 n = self[r][7]
262 n = self[r][7]
255 nodemap[n] = r
263 nodemap[n] = r
256 return nodemap
264 return nodemap
257
265
258 def has_node(self, node):
266 def has_node(self, node):
259 """return True if the node exist in the index"""
267 """return True if the node exist in the index"""
260 return node in self._nodemap
268 return node in self._nodemap
261
269
262 def rev(self, node):
270 def rev(self, node):
263 """return a revision for a node
271 """return a revision for a node
264
272
265 If the node is unknown, raise a RevlogError"""
273 If the node is unknown, raise a RevlogError"""
266 return self._nodemap[node]
274 return self._nodemap[node]
267
275
268 def get_rev(self, node):
276 def get_rev(self, node):
269 """return a revision for a node
277 """return a revision for a node
270
278
271 If the node is unknown, return None"""
279 If the node is unknown, return None"""
272 return self._nodemap.get(node)
280 return self._nodemap.get(node)
273
281
274 def _stripnodes(self, start):
282 def _stripnodes(self, start):
275 if '_nodemap' in vars(self):
283 if '_nodemap' in vars(self):
276 for r in range(start, len(self)):
284 for r in range(start, len(self)):
277 n = self[r][7]
285 n = self[r][7]
278 del self._nodemap[n]
286 del self._nodemap[n]
279
287
280 def clearcaches(self):
288 def clearcaches(self):
281 self.__dict__.pop('_nodemap', None)
289 self.__dict__.pop('_nodemap', None)
282
290
283 def __len__(self):
291 def __len__(self):
284 return self._lgt + len(self._extra)
292 return self._lgt + len(self._extra)
285
293
286 def append(self, tup):
294 def append(self, tup):
287 if '_nodemap' in vars(self):
295 if '_nodemap' in vars(self):
288 self._nodemap[tup[7]] = len(self)
296 self._nodemap[tup[7]] = len(self)
289 data = self._pack_entry(len(self), tup)
297 data = self._pack_entry(len(self), tup)
290 self._extra.append(data)
298 self._extra.append(data)
291
299
292 def _pack_entry(self, rev, entry):
300 def _pack_entry(self, rev, entry):
293 assert entry[8] == 0
301 assert entry[8] == 0
294 assert entry[9] == 0
302 assert entry[9] == 0
295 return self.index_format.pack(*entry[:8])
303 return self.index_format.pack(*entry[:8])
296
304
297 def _check_index(self, i):
305 def _check_index(self, i):
298 if not isinstance(i, int):
306 if not isinstance(i, int):
299 raise TypeError(b"expecting int indexes")
307 raise TypeError(b"expecting int indexes")
300 if i < 0 or i >= len(self):
308 if i < 0 or i >= len(self):
301 raise IndexError
309 raise IndexError
302
310
303 def __getitem__(self, i):
311 def __getitem__(self, i):
304 if i == -1:
312 if i == -1:
305 return self.null_item
313 return self.null_item
306 self._check_index(i)
314 self._check_index(i)
307 if i >= self._lgt:
315 if i >= self._lgt:
308 data = self._extra[i - self._lgt]
316 data = self._extra[i - self._lgt]
309 else:
317 else:
310 index = self._calculate_index(i)
318 index = self._calculate_index(i)
311 data = self._data[index : index + self.entry_size]
319 data = self._data[index : index + self.entry_size]
312 r = self._unpack_entry(i, data)
320 r = self._unpack_entry(i, data)
313 if self._lgt and i == 0:
321 if self._lgt and i == 0:
314 offset = revlogutils.offset_type(0, gettype(r[0]))
322 offset = revlogutils.offset_type(0, gettype(r[0]))
315 r = (offset,) + r[1:]
323 r = (offset,) + r[1:]
316 return r
324 return r
317
325
318 def _unpack_entry(self, rev, data):
326 def _unpack_entry(self, rev, data):
319 r = self.index_format.unpack(data)
327 r = self.index_format.unpack(data)
320 r = r + (
328 r = r + (
321 0,
329 0,
322 0,
330 0,
323 revlog_constants.COMP_MODE_INLINE,
331 revlog_constants.COMP_MODE_INLINE,
324 revlog_constants.COMP_MODE_INLINE,
332 revlog_constants.COMP_MODE_INLINE,
325 )
333 )
326 return r
334 return r
327
335
328 def pack_header(self, header):
336 def pack_header(self, header):
329 """pack header information as binary"""
337 """pack header information as binary"""
330 v_fmt = revlog_constants.INDEX_HEADER
338 v_fmt = revlog_constants.INDEX_HEADER
331 return v_fmt.pack(header)
339 return v_fmt.pack(header)
332
340
333 def entry_binary(self, rev):
341 def entry_binary(self, rev):
334 """return the raw binary string representing a revision"""
342 """return the raw binary string representing a revision"""
335 entry = self[rev]
343 entry = self[rev]
336 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
344 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
337 if rev == 0:
345 if rev == 0:
338 p = p[revlog_constants.INDEX_HEADER.size :]
346 p = p[revlog_constants.INDEX_HEADER.size :]
339 return p
347 return p
340
348
341
349
342 class IndexObject(BaseIndexObject):
350 class IndexObject(BaseIndexObject):
343 def __init__(self, data):
351 def __init__(self, data):
344 assert len(data) % self.entry_size == 0, (
352 assert len(data) % self.entry_size == 0, (
345 len(data),
353 len(data),
346 self.entry_size,
354 self.entry_size,
347 len(data) % self.entry_size,
355 len(data) % self.entry_size,
348 )
356 )
349 self._data = data
357 self._data = data
350 self._lgt = len(data) // self.entry_size
358 self._lgt = len(data) // self.entry_size
351 self._extra = []
359 self._extra = []
352
360
353 def _calculate_index(self, i):
361 def _calculate_index(self, i):
354 return i * self.entry_size
362 return i * self.entry_size
355
363
356 def __delitem__(self, i):
364 def __delitem__(self, i):
357 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
365 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
358 raise ValueError(b"deleting slices only supports a:-1 with step 1")
366 raise ValueError(b"deleting slices only supports a:-1 with step 1")
359 i = i.start
367 i = i.start
360 self._check_index(i)
368 self._check_index(i)
361 self._stripnodes(i)
369 self._stripnodes(i)
362 if i < self._lgt:
370 if i < self._lgt:
363 self._data = self._data[: i * self.entry_size]
371 self._data = self._data[: i * self.entry_size]
364 self._lgt = i
372 self._lgt = i
365 self._extra = []
373 self._extra = []
366 else:
374 else:
367 self._extra = self._extra[: i - self._lgt]
375 self._extra = self._extra[: i - self._lgt]
368
376
369
377
370 class PersistentNodeMapIndexObject(IndexObject):
378 class PersistentNodeMapIndexObject(IndexObject):
371 """a Debug oriented class to test persistent nodemap
379 """a Debug oriented class to test persistent nodemap
372
380
373 We need a simple python object to test API and higher level behavior. See
381 We need a simple python object to test API and higher level behavior. See
374 the Rust implementation for more serious usage. This should be used only
382 the Rust implementation for more serious usage. This should be used only
375 through the dedicated `devel.persistent-nodemap` config.
383 through the dedicated `devel.persistent-nodemap` config.
376 """
384 """
377
385
378 def nodemap_data_all(self):
386 def nodemap_data_all(self):
379 """Return bytes containing a full serialization of a nodemap
387 """Return bytes containing a full serialization of a nodemap
380
388
381 The nodemap should be valid for the full set of revisions in the
389 The nodemap should be valid for the full set of revisions in the
382 index."""
390 index."""
383 return nodemaputil.persistent_data(self)
391 return nodemaputil.persistent_data(self)
384
392
385 def nodemap_data_incremental(self):
393 def nodemap_data_incremental(self):
386 """Return bytes containing a incremental update to persistent nodemap
394 """Return bytes containing a incremental update to persistent nodemap
387
395
388 This containst the data for an append-only update of the data provided
396 This containst the data for an append-only update of the data provided
389 in the last call to `update_nodemap_data`.
397 in the last call to `update_nodemap_data`.
390 """
398 """
391 if self._nm_root is None:
399 if self._nm_root is None:
392 return None
400 return None
393 docket = self._nm_docket
401 docket = self._nm_docket
394 changed, data = nodemaputil.update_persistent_data(
402 changed, data = nodemaputil.update_persistent_data(
395 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
403 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
396 )
404 )
397
405
398 self._nm_root = self._nm_max_idx = self._nm_docket = None
406 self._nm_root = self._nm_max_idx = self._nm_docket = None
399 return docket, changed, data
407 return docket, changed, data
400
408
401 def update_nodemap_data(self, docket, nm_data):
409 def update_nodemap_data(self, docket, nm_data):
402 """provide full block of persisted binary data for a nodemap
410 """provide full block of persisted binary data for a nodemap
403
411
404 The data are expected to come from disk. See `nodemap_data_all` for a
412 The data are expected to come from disk. See `nodemap_data_all` for a
405 produceur of such data."""
413 produceur of such data."""
406 if nm_data is not None:
414 if nm_data is not None:
407 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
415 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
408 if self._nm_root:
416 if self._nm_root:
409 self._nm_docket = docket
417 self._nm_docket = docket
410 else:
418 else:
411 self._nm_root = self._nm_max_idx = self._nm_docket = None
419 self._nm_root = self._nm_max_idx = self._nm_docket = None
412
420
413
421
414 class InlinedIndexObject(BaseIndexObject):
422 class InlinedIndexObject(BaseIndexObject):
415 def __init__(self, data, inline=0):
423 def __init__(self, data, inline=0):
416 self._data = data
424 self._data = data
417 self._lgt = self._inline_scan(None)
425 self._lgt = self._inline_scan(None)
418 self._inline_scan(self._lgt)
426 self._inline_scan(self._lgt)
419 self._extra = []
427 self._extra = []
420
428
421 def _inline_scan(self, lgt):
429 def _inline_scan(self, lgt):
422 off = 0
430 off = 0
423 if lgt is not None:
431 if lgt is not None:
424 self._offsets = [0] * lgt
432 self._offsets = [0] * lgt
425 count = 0
433 count = 0
426 while off <= len(self._data) - self.entry_size:
434 while off <= len(self._data) - self.entry_size:
427 start = off + self.big_int_size
435 start = off + self.big_int_size
428 (s,) = struct.unpack(
436 (s,) = struct.unpack(
429 b'>i',
437 b'>i',
430 self._data[start : start + self.int_size],
438 self._data[start : start + self.int_size],
431 )
439 )
432 if lgt is not None:
440 if lgt is not None:
433 self._offsets[count] = off
441 self._offsets[count] = off
434 count += 1
442 count += 1
435 off += self.entry_size + s
443 off += self.entry_size + s
436 if off != len(self._data):
444 if off != len(self._data):
437 raise ValueError(b"corrupted data")
445 raise ValueError(b"corrupted data")
438 return count
446 return count
439
447
440 def __delitem__(self, i):
448 def __delitem__(self, i):
441 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
449 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
442 raise ValueError(b"deleting slices only supports a:-1 with step 1")
450 raise ValueError(b"deleting slices only supports a:-1 with step 1")
443 i = i.start
451 i = i.start
444 self._check_index(i)
452 self._check_index(i)
445 self._stripnodes(i)
453 self._stripnodes(i)
446 if i < self._lgt:
454 if i < self._lgt:
447 self._offsets = self._offsets[:i]
455 self._offsets = self._offsets[:i]
448 self._lgt = i
456 self._lgt = i
449 self._extra = []
457 self._extra = []
450 else:
458 else:
451 self._extra = self._extra[: i - self._lgt]
459 self._extra = self._extra[: i - self._lgt]
452
460
453 def _calculate_index(self, i):
461 def _calculate_index(self, i):
454 return self._offsets[i]
462 return self._offsets[i]
455
463
456
464
457 def parse_index2(data, inline, revlogv2=False):
465 def parse_index2(data, inline, revlogv2=False):
458 if not inline:
466 if not inline:
459 cls = IndexObject2 if revlogv2 else IndexObject
467 cls = IndexObject2 if revlogv2 else IndexObject
460 return cls(data), None
468 return cls(data), None
461 cls = InlinedIndexObject
469 cls = InlinedIndexObject
462 return cls(data, inline), (0, data)
470 return cls(data, inline), (0, data)
463
471
464
472
465 def parse_index_cl_v2(data):
473 def parse_index_cl_v2(data):
466 return IndexChangelogV2(data), None
474 return IndexChangelogV2(data), None
467
475
468
476
469 class IndexObject2(IndexObject):
477 class IndexObject2(IndexObject):
470 index_format = revlog_constants.INDEX_ENTRY_V2
478 index_format = revlog_constants.INDEX_ENTRY_V2
471
479
472 def replace_sidedata_info(
480 def replace_sidedata_info(
473 self,
481 self,
474 rev,
482 rev,
475 sidedata_offset,
483 sidedata_offset,
476 sidedata_length,
484 sidedata_length,
477 offset_flags,
485 offset_flags,
478 compression_mode,
486 compression_mode,
479 ):
487 ):
480 """
488 """
481 Replace an existing index entry's sidedata offset and length with new
489 Replace an existing index entry's sidedata offset and length with new
482 ones.
490 ones.
483 This cannot be used outside of the context of sidedata rewriting,
491 This cannot be used outside of the context of sidedata rewriting,
484 inside the transaction that creates the revision `rev`.
492 inside the transaction that creates the revision `rev`.
485 """
493 """
486 if rev < 0:
494 if rev < 0:
487 raise KeyError
495 raise KeyError
488 self._check_index(rev)
496 self._check_index(rev)
489 if rev < self._lgt:
497 if rev < self._lgt:
490 msg = b"cannot rewrite entries outside of this transaction"
498 msg = b"cannot rewrite entries outside of this transaction"
491 raise KeyError(msg)
499 raise KeyError(msg)
492 else:
500 else:
493 entry = list(self[rev])
501 entry = list(self[rev])
494 entry[0] = offset_flags
502 entry[0] = offset_flags
495 entry[8] = sidedata_offset
503 entry[8] = sidedata_offset
496 entry[9] = sidedata_length
504 entry[9] = sidedata_length
497 entry[11] = compression_mode
505 entry[11] = compression_mode
498 entry = tuple(entry)
506 entry = tuple(entry)
499 new = self._pack_entry(rev, entry)
507 new = self._pack_entry(rev, entry)
500 self._extra[rev - self._lgt] = new
508 self._extra[rev - self._lgt] = new
501
509
502 def _unpack_entry(self, rev, data):
510 def _unpack_entry(self, rev, data):
503 data = self.index_format.unpack(data)
511 data = self.index_format.unpack(data)
504 entry = data[:10]
512 entry = data[:10]
505 data_comp = data[10] & 3
513 data_comp = data[10] & 3
506 sidedata_comp = (data[10] & (3 << 2)) >> 2
514 sidedata_comp = (data[10] & (3 << 2)) >> 2
507 return entry + (data_comp, sidedata_comp)
515 return entry + (data_comp, sidedata_comp)
508
516
509 def _pack_entry(self, rev, entry):
517 def _pack_entry(self, rev, entry):
510 data = entry[:10]
518 data = entry[:10]
511 data_comp = entry[10] & 3
519 data_comp = entry[10] & 3
512 sidedata_comp = (entry[11] & 3) << 2
520 sidedata_comp = (entry[11] & 3) << 2
513 data += (data_comp | sidedata_comp,)
521 data += (data_comp | sidedata_comp,)
514
522
515 return self.index_format.pack(*data)
523 return self.index_format.pack(*data)
516
524
517 def entry_binary(self, rev):
525 def entry_binary(self, rev):
518 """return the raw binary string representing a revision"""
526 """return the raw binary string representing a revision"""
519 entry = self[rev]
527 entry = self[rev]
520 return self._pack_entry(rev, entry)
528 return self._pack_entry(rev, entry)
521
529
522 def pack_header(self, header):
530 def pack_header(self, header):
523 """pack header information as binary"""
531 """pack header information as binary"""
524 msg = 'version header should go in the docket, not the index: %d'
532 msg = 'version header should go in the docket, not the index: %d'
525 msg %= header
533 msg %= header
526 raise error.ProgrammingError(msg)
534 raise error.ProgrammingError(msg)
527
535
528
536
529 class IndexChangelogV2(IndexObject2):
537 class IndexChangelogV2(IndexObject2):
530 index_format = revlog_constants.INDEX_ENTRY_CL_V2
538 index_format = revlog_constants.INDEX_ENTRY_CL_V2
531
539
532 def _unpack_entry(self, rev, data, r=True):
540 def _unpack_entry(self, rev, data, r=True):
533 items = self.index_format.unpack(data)
541 items = self.index_format.unpack(data)
534 entry = items[:3] + (rev, rev) + items[3:8]
542 entry = items[:3] + (rev, rev) + items[3:8]
535 data_comp = items[8] & 3
543 data_comp = items[8] & 3
536 sidedata_comp = (items[8] >> 2) & 3
544 sidedata_comp = (items[8] >> 2) & 3
537 return entry + (data_comp, sidedata_comp)
545 return entry + (data_comp, sidedata_comp)
538
546
539 def _pack_entry(self, rev, entry):
547 def _pack_entry(self, rev, entry):
540 assert entry[3] == rev, entry[3]
548 assert entry[3] == rev, entry[3]
541 assert entry[4] == rev, entry[4]
549 assert entry[4] == rev, entry[4]
542 data = entry[:3] + entry[5:10]
550 data = entry[:3] + entry[5:10]
543 data_comp = entry[10] & 3
551 data_comp = entry[10] & 3
544 sidedata_comp = (entry[11] & 3) << 2
552 sidedata_comp = (entry[11] & 3) << 2
545 data += (data_comp | sidedata_comp,)
553 data += (data_comp | sidedata_comp,)
546 return self.index_format.pack(*data)
554 return self.index_format.pack(*data)
547
555
548
556
549 def parse_index_devel_nodemap(data, inline):
557 def parse_index_devel_nodemap(data, inline):
550 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
558 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
551 return PersistentNodeMapIndexObject(data), None
559 return PersistentNodeMapIndexObject(data), None
552
560
553
561
554 def parse_dirstate(dmap, copymap, st):
562 def parse_dirstate(dmap, copymap, st):
555 parents = [st[:20], st[20:40]]
563 parents = [st[:20], st[20:40]]
556 # dereference fields so they will be local in loop
564 # dereference fields so they will be local in loop
557 format = b">cllll"
565 format = b">cllll"
558 e_size = struct.calcsize(format)
566 e_size = struct.calcsize(format)
559 pos1 = 40
567 pos1 = 40
560 l = len(st)
568 l = len(st)
561
569
562 # the inner loop
570 # the inner loop
563 while pos1 < l:
571 while pos1 < l:
564 pos2 = pos1 + e_size
572 pos2 = pos1 + e_size
565 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
573 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
566 pos1 = pos2 + e[4]
574 pos1 = pos2 + e[4]
567 f = st[pos2:pos1]
575 f = st[pos2:pos1]
568 if b'\0' in f:
576 if b'\0' in f:
569 f, c = f.split(b'\0')
577 f, c = f.split(b'\0')
570 copymap[f] = c
578 copymap[f] = c
571 dmap[f] = DirstateItem.from_v1_data(*e[:4])
579 dmap[f] = DirstateItem.from_v1_data(*e[:4])
572 return parents
580 return parents
573
581
574
582
575 def pack_dirstate(dmap, copymap, pl, now):
583 def pack_dirstate(dmap, copymap, pl, now):
576 now = int(now)
584 now = int(now)
577 cs = stringio()
585 cs = stringio()
578 write = cs.write
586 write = cs.write
579 write(b"".join(pl))
587 write(b"".join(pl))
580 for f, e in pycompat.iteritems(dmap):
588 for f, e in pycompat.iteritems(dmap):
581 if e.need_delay(now):
589 if e.need_delay(now):
582 # The file was last modified "simultaneously" with the current
590 # The file was last modified "simultaneously" with the current
583 # write to dirstate (i.e. within the same second for file-
591 # write to dirstate (i.e. within the same second for file-
584 # systems with a granularity of 1 sec). This commonly happens
592 # systems with a granularity of 1 sec). This commonly happens
585 # for at least a couple of files on 'update'.
593 # for at least a couple of files on 'update'.
586 # The user could change the file without changing its size
594 # The user could change the file without changing its size
587 # within the same second. Invalidate the file's mtime in
595 # within the same second. Invalidate the file's mtime in
588 # dirstate, forcing future 'status' calls to compare the
596 # dirstate, forcing future 'status' calls to compare the
589 # contents of the file if the size is the same. This prevents
597 # contents of the file if the size is the same. This prevents
590 # mistakenly treating such files as clean.
598 # mistakenly treating such files as clean.
591 e.set_possibly_dirty()
599 e.set_possibly_dirty()
592
600
593 if f in copymap:
601 if f in copymap:
594 f = b"%s\0%s" % (f, copymap[f])
602 f = b"%s\0%s" % (f, copymap[f])
595 e = _pack(
603 e = _pack(
596 b">cllll",
604 b">cllll",
597 e.v1_state(),
605 e.v1_state(),
598 e.v1_mode(),
606 e.v1_mode(),
599 e.v1_size(),
607 e.v1_size(),
600 e.v1_mtime(),
608 e.v1_mtime(),
601 len(f),
609 len(f),
602 )
610 )
603 write(e)
611 write(e)
604 write(f)
612 write(f)
605 return cs.getvalue()
613 return cs.getvalue()
General Comments 0
You need to be logged in to leave comments. Login now