##// END OF EJS Templates
dirstate-item: introduce a `dm_otherparent` property...
marmoute -
r48486:e43128ee default
parent child Browse files
Show More
@@ -1,979 +1,989 b''
1 /*
1 /*
2 parsers.c - efficient content parsing
2 parsers.c - efficient content parsing
3
3
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
4 Copyright 2008 Olivia Mackall <olivia@selenic.com> and others
5
5
6 This software may be used and distributed according to the terms of
6 This software may be used and distributed according to the terms of
7 the GNU General Public License, incorporated herein by reference.
7 the GNU General Public License, incorporated herein by reference.
8 */
8 */
9
9
10 #define PY_SSIZE_T_CLEAN
10 #define PY_SSIZE_T_CLEAN
11 #include <Python.h>
11 #include <Python.h>
12 #include <ctype.h>
12 #include <ctype.h>
13 #include <stddef.h>
13 #include <stddef.h>
14 #include <string.h>
14 #include <string.h>
15
15
16 #include "bitmanipulation.h"
16 #include "bitmanipulation.h"
17 #include "charencode.h"
17 #include "charencode.h"
18 #include "util.h"
18 #include "util.h"
19
19
20 #ifdef IS_PY3K
20 #ifdef IS_PY3K
21 /* The mapping of Python types is meant to be temporary to get Python
21 /* The mapping of Python types is meant to be temporary to get Python
22 * 3 to compile. We should remove this once Python 3 support is fully
22 * 3 to compile. We should remove this once Python 3 support is fully
23 * supported and proper types are used in the extensions themselves. */
23 * supported and proper types are used in the extensions themselves. */
24 #define PyInt_Check PyLong_Check
24 #define PyInt_Check PyLong_Check
25 #define PyInt_FromLong PyLong_FromLong
25 #define PyInt_FromLong PyLong_FromLong
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AsLong PyLong_AsLong
27 #define PyInt_AsLong PyLong_AsLong
28 #endif
28 #endif
29
29
30 static const char *const versionerrortext = "Python minor version mismatch";
30 static const char *const versionerrortext = "Python minor version mismatch";
31
31
32 static const int dirstate_v1_from_p2 = -2;
32 static const int dirstate_v1_from_p2 = -2;
33 static const int dirstate_v1_nonnormal = -1;
33 static const int dirstate_v1_nonnormal = -1;
34 static const int ambiguous_time = -1;
34 static const int ambiguous_time = -1;
35
35
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
36 static PyObject *dict_new_presized(PyObject *self, PyObject *args)
37 {
37 {
38 Py_ssize_t expected_size;
38 Py_ssize_t expected_size;
39
39
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
40 if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) {
41 return NULL;
41 return NULL;
42 }
42 }
43
43
44 return _dict_new_presized(expected_size);
44 return _dict_new_presized(expected_size);
45 }
45 }
46
46
47 static inline dirstateItemObject *make_dirstate_item(char state, int mode,
47 static inline dirstateItemObject *make_dirstate_item(char state, int mode,
48 int size, int mtime)
48 int size, int mtime)
49 {
49 {
50 dirstateItemObject *t =
50 dirstateItemObject *t =
51 PyObject_New(dirstateItemObject, &dirstateItemType);
51 PyObject_New(dirstateItemObject, &dirstateItemType);
52 if (!t) {
52 if (!t) {
53 return NULL;
53 return NULL;
54 }
54 }
55 t->state = state;
55 t->state = state;
56 t->mode = mode;
56 t->mode = mode;
57 t->size = size;
57 t->size = size;
58 t->mtime = mtime;
58 t->mtime = mtime;
59 return t;
59 return t;
60 }
60 }
61
61
62 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
62 static PyObject *dirstate_item_new(PyTypeObject *subtype, PyObject *args,
63 PyObject *kwds)
63 PyObject *kwds)
64 {
64 {
65 /* We do all the initialization here and not a tp_init function because
65 /* We do all the initialization here and not a tp_init function because
66 * dirstate_item is immutable. */
66 * dirstate_item is immutable. */
67 dirstateItemObject *t;
67 dirstateItemObject *t;
68 char state;
68 char state;
69 int size, mode, mtime;
69 int size, mode, mtime;
70 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
70 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
71 return NULL;
71 return NULL;
72 }
72 }
73
73
74 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
74 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
75 if (!t) {
75 if (!t) {
76 return NULL;
76 return NULL;
77 }
77 }
78 t->state = state;
78 t->state = state;
79 t->mode = mode;
79 t->mode = mode;
80 t->size = size;
80 t->size = size;
81 t->mtime = mtime;
81 t->mtime = mtime;
82
82
83 return (PyObject *)t;
83 return (PyObject *)t;
84 }
84 }
85
85
86 static void dirstate_item_dealloc(PyObject *o)
86 static void dirstate_item_dealloc(PyObject *o)
87 {
87 {
88 PyObject_Del(o);
88 PyObject_Del(o);
89 }
89 }
90
90
91 static Py_ssize_t dirstate_item_length(PyObject *o)
91 static Py_ssize_t dirstate_item_length(PyObject *o)
92 {
92 {
93 return 4;
93 return 4;
94 }
94 }
95
95
96 static PyObject *dirstate_item_item(PyObject *o, Py_ssize_t i)
96 static PyObject *dirstate_item_item(PyObject *o, Py_ssize_t i)
97 {
97 {
98 dirstateItemObject *t = (dirstateItemObject *)o;
98 dirstateItemObject *t = (dirstateItemObject *)o;
99 switch (i) {
99 switch (i) {
100 case 0:
100 case 0:
101 return PyBytes_FromStringAndSize(&t->state, 1);
101 return PyBytes_FromStringAndSize(&t->state, 1);
102 case 1:
102 case 1:
103 return PyInt_FromLong(t->mode);
103 return PyInt_FromLong(t->mode);
104 case 2:
104 case 2:
105 return PyInt_FromLong(t->size);
105 return PyInt_FromLong(t->size);
106 case 3:
106 case 3:
107 return PyInt_FromLong(t->mtime);
107 return PyInt_FromLong(t->mtime);
108 default:
108 default:
109 PyErr_SetString(PyExc_IndexError, "index out of range");
109 PyErr_SetString(PyExc_IndexError, "index out of range");
110 return NULL;
110 return NULL;
111 }
111 }
112 }
112 }
113
113
114 static PySequenceMethods dirstate_item_sq = {
114 static PySequenceMethods dirstate_item_sq = {
115 dirstate_item_length, /* sq_length */
115 dirstate_item_length, /* sq_length */
116 0, /* sq_concat */
116 0, /* sq_concat */
117 0, /* sq_repeat */
117 0, /* sq_repeat */
118 dirstate_item_item, /* sq_item */
118 dirstate_item_item, /* sq_item */
119 0, /* sq_ass_item */
119 0, /* sq_ass_item */
120 0, /* sq_contains */
120 0, /* sq_contains */
121 0, /* sq_inplace_concat */
121 0, /* sq_inplace_concat */
122 0 /* sq_inplace_repeat */
122 0 /* sq_inplace_repeat */
123 };
123 };
124
124
125 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
125 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
126 {
126 {
127 return PyBytes_FromStringAndSize(&self->state, 1);
127 return PyBytes_FromStringAndSize(&self->state, 1);
128 };
128 };
129
129
130 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
130 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
131 {
131 {
132 return PyInt_FromLong(self->mode);
132 return PyInt_FromLong(self->mode);
133 };
133 };
134
134
135 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
135 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
136 {
136 {
137 return PyInt_FromLong(self->size);
137 return PyInt_FromLong(self->size);
138 };
138 };
139
139
140 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
140 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
141 {
141 {
142 return PyInt_FromLong(self->mtime);
142 return PyInt_FromLong(self->mtime);
143 };
143 };
144
144
145 static PyObject *dm_nonnormal(dirstateItemObject *self)
145 static PyObject *dm_nonnormal(dirstateItemObject *self)
146 {
146 {
147 if (self->state != 'n' || self->mtime == ambiguous_time) {
147 if (self->state != 'n' || self->mtime == ambiguous_time) {
148 Py_RETURN_TRUE;
148 Py_RETURN_TRUE;
149 } else {
149 } else {
150 Py_RETURN_FALSE;
150 Py_RETURN_FALSE;
151 }
151 }
152 };
152 };
153 static PyObject *dm_otherparent(dirstateItemObject *self)
154 {
155 if (self->size == dirstate_v1_from_p2) {
156 Py_RETURN_TRUE;
157 } else {
158 Py_RETURN_FALSE;
159 }
160 };
153
161
154 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
162 static PyObject *dirstate_item_need_delay(dirstateItemObject *self,
155 PyObject *value)
163 PyObject *value)
156 {
164 {
157 long now;
165 long now;
158 if (!pylong_to_long(value, &now)) {
166 if (!pylong_to_long(value, &now)) {
159 return NULL;
167 return NULL;
160 }
168 }
161 if (self->state == 'n' && self->mtime == now) {
169 if (self->state == 'n' && self->mtime == now) {
162 Py_RETURN_TRUE;
170 Py_RETURN_TRUE;
163 } else {
171 } else {
164 Py_RETURN_FALSE;
172 Py_RETURN_FALSE;
165 }
173 }
166 };
174 };
167
175
168 /* This will never change since it's bound to V1, unlike `make_dirstate_item`
176 /* This will never change since it's bound to V1, unlike `make_dirstate_item`
169 */
177 */
170 static inline dirstateItemObject *
178 static inline dirstateItemObject *
171 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
179 dirstate_item_from_v1_data(char state, int mode, int size, int mtime)
172 {
180 {
173 dirstateItemObject *t =
181 dirstateItemObject *t =
174 PyObject_New(dirstateItemObject, &dirstateItemType);
182 PyObject_New(dirstateItemObject, &dirstateItemType);
175 if (!t) {
183 if (!t) {
176 return NULL;
184 return NULL;
177 }
185 }
178 t->state = state;
186 t->state = state;
179 t->mode = mode;
187 t->mode = mode;
180 t->size = size;
188 t->size = size;
181 t->mtime = mtime;
189 t->mtime = mtime;
182 return t;
190 return t;
183 }
191 }
184
192
185 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
193 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
186 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
194 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
187 PyObject *args)
195 PyObject *args)
188 {
196 {
189 /* We do all the initialization here and not a tp_init function because
197 /* We do all the initialization here and not a tp_init function because
190 * dirstate_item is immutable. */
198 * dirstate_item is immutable. */
191 dirstateItemObject *t;
199 dirstateItemObject *t;
192 char state;
200 char state;
193 int size, mode, mtime;
201 int size, mode, mtime;
194 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
202 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
195 return NULL;
203 return NULL;
196 }
204 }
197
205
198 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
206 t = (dirstateItemObject *)subtype->tp_alloc(subtype, 1);
199 if (!t) {
207 if (!t) {
200 return NULL;
208 return NULL;
201 }
209 }
202 t->state = state;
210 t->state = state;
203 t->mode = mode;
211 t->mode = mode;
204 t->size = size;
212 t->size = size;
205 t->mtime = mtime;
213 t->mtime = mtime;
206
214
207 return (PyObject *)t;
215 return (PyObject *)t;
208 };
216 };
209
217
210 /* This means the next status call will have to actually check its content
218 /* This means the next status call will have to actually check its content
211 to make sure it is correct. */
219 to make sure it is correct. */
212 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
220 static PyObject *dirstate_item_set_possibly_dirty(dirstateItemObject *self)
213 {
221 {
214 self->mtime = ambiguous_time;
222 self->mtime = ambiguous_time;
215 Py_RETURN_NONE;
223 Py_RETURN_NONE;
216 }
224 }
217
225
218 static PyMethodDef dirstate_item_methods[] = {
226 static PyMethodDef dirstate_item_methods[] = {
219 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
227 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
220 "return a \"state\" suitable for v1 serialization"},
228 "return a \"state\" suitable for v1 serialization"},
221 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
229 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
222 "return a \"mode\" suitable for v1 serialization"},
230 "return a \"mode\" suitable for v1 serialization"},
223 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
231 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
224 "return a \"size\" suitable for v1 serialization"},
232 "return a \"size\" suitable for v1 serialization"},
225 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
233 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
226 "return a \"mtime\" suitable for v1 serialization"},
234 "return a \"mtime\" suitable for v1 serialization"},
227 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
235 {"need_delay", (PyCFunction)dirstate_item_need_delay, METH_O,
228 "True if the stored mtime would be ambiguous with the current time"},
236 "True if the stored mtime would be ambiguous with the current time"},
229 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth, METH_O,
237 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth, METH_O,
230 "build a new DirstateItem object from V1 data"},
238 "build a new DirstateItem object from V1 data"},
231 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
239 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
232 METH_NOARGS, "mark a file as \"possibly dirty\""},
240 METH_NOARGS, "mark a file as \"possibly dirty\""},
233 {"dm_nonnormal", (PyCFunction)dm_nonnormal, METH_NOARGS,
241 {"dm_nonnormal", (PyCFunction)dm_nonnormal, METH_NOARGS,
234 "True is the entry is non-normal in the dirstatemap sense"},
242 "True is the entry is non-normal in the dirstatemap sense"},
243 {"dm_otherparent", (PyCFunction)dm_otherparent, METH_NOARGS,
244 "True is the entry is `otherparent` in the dirstatemap sense"},
235 {NULL} /* Sentinel */
245 {NULL} /* Sentinel */
236 };
246 };
237
247
238 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
248 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
239 {
249 {
240 return PyInt_FromLong(self->mode);
250 return PyInt_FromLong(self->mode);
241 };
251 };
242
252
243 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
253 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
244 {
254 {
245 return PyInt_FromLong(self->size);
255 return PyInt_FromLong(self->size);
246 };
256 };
247
257
248 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
258 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
249 {
259 {
250 return PyInt_FromLong(self->mtime);
260 return PyInt_FromLong(self->mtime);
251 };
261 };
252
262
253 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
263 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
254 {
264 {
255 return PyBytes_FromStringAndSize(&self->state, 1);
265 return PyBytes_FromStringAndSize(&self->state, 1);
256 };
266 };
257
267
258 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
268 static PyObject *dirstate_item_get_tracked(dirstateItemObject *self)
259 {
269 {
260 if (self->state == 'a' || self->state == 'm' || self->state == 'n') {
270 if (self->state == 'a' || self->state == 'm' || self->state == 'n') {
261 Py_RETURN_TRUE;
271 Py_RETURN_TRUE;
262 } else {
272 } else {
263 Py_RETURN_FALSE;
273 Py_RETURN_FALSE;
264 }
274 }
265 };
275 };
266
276
267 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
277 static PyObject *dirstate_item_get_added(dirstateItemObject *self)
268 {
278 {
269 if (self->state == 'a') {
279 if (self->state == 'a') {
270 Py_RETURN_TRUE;
280 Py_RETURN_TRUE;
271 } else {
281 } else {
272 Py_RETURN_FALSE;
282 Py_RETURN_FALSE;
273 }
283 }
274 };
284 };
275
285
276 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
286 static PyObject *dirstate_item_get_merged(dirstateItemObject *self)
277 {
287 {
278 if (self->state == 'm') {
288 if (self->state == 'm') {
279 Py_RETURN_TRUE;
289 Py_RETURN_TRUE;
280 } else {
290 } else {
281 Py_RETURN_FALSE;
291 Py_RETURN_FALSE;
282 }
292 }
283 };
293 };
284
294
285 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
295 static PyObject *dirstate_item_get_merged_removed(dirstateItemObject *self)
286 {
296 {
287 if (self->state == 'r' && self->size == dirstate_v1_nonnormal) {
297 if (self->state == 'r' && self->size == dirstate_v1_nonnormal) {
288 Py_RETURN_TRUE;
298 Py_RETURN_TRUE;
289 } else {
299 } else {
290 Py_RETURN_FALSE;
300 Py_RETURN_FALSE;
291 }
301 }
292 };
302 };
293
303
294 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
304 static PyObject *dirstate_item_get_from_p2(dirstateItemObject *self)
295 {
305 {
296 if (self->state == 'n' && self->size == dirstate_v1_from_p2) {
306 if (self->state == 'n' && self->size == dirstate_v1_from_p2) {
297 Py_RETURN_TRUE;
307 Py_RETURN_TRUE;
298 } else {
308 } else {
299 Py_RETURN_FALSE;
309 Py_RETURN_FALSE;
300 }
310 }
301 };
311 };
302
312
303 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
313 static PyObject *dirstate_item_get_from_p2_removed(dirstateItemObject *self)
304 {
314 {
305 if (self->state == 'r' && self->size == dirstate_v1_from_p2) {
315 if (self->state == 'r' && self->size == dirstate_v1_from_p2) {
306 Py_RETURN_TRUE;
316 Py_RETURN_TRUE;
307 } else {
317 } else {
308 Py_RETURN_FALSE;
318 Py_RETURN_FALSE;
309 }
319 }
310 };
320 };
311
321
312 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
322 static PyObject *dirstate_item_get_removed(dirstateItemObject *self)
313 {
323 {
314 if (self->state == 'r') {
324 if (self->state == 'r') {
315 Py_RETURN_TRUE;
325 Py_RETURN_TRUE;
316 } else {
326 } else {
317 Py_RETURN_FALSE;
327 Py_RETURN_FALSE;
318 }
328 }
319 };
329 };
320
330
321 static PyGetSetDef dirstate_item_getset[] = {
331 static PyGetSetDef dirstate_item_getset[] = {
322 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
332 {"mode", (getter)dirstate_item_get_mode, NULL, "mode", NULL},
323 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
333 {"size", (getter)dirstate_item_get_size, NULL, "size", NULL},
324 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
334 {"mtime", (getter)dirstate_item_get_mtime, NULL, "mtime", NULL},
325 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
335 {"state", (getter)dirstate_item_get_state, NULL, "state", NULL},
326 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
336 {"tracked", (getter)dirstate_item_get_tracked, NULL, "tracked", NULL},
327 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
337 {"added", (getter)dirstate_item_get_added, NULL, "added", NULL},
328 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
338 {"merged_removed", (getter)dirstate_item_get_merged_removed, NULL,
329 "merged_removed", NULL},
339 "merged_removed", NULL},
330 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
340 {"merged", (getter)dirstate_item_get_merged, NULL, "merged", NULL},
331 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
341 {"from_p2_removed", (getter)dirstate_item_get_from_p2_removed, NULL,
332 "from_p2_removed", NULL},
342 "from_p2_removed", NULL},
333 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
343 {"from_p2", (getter)dirstate_item_get_from_p2, NULL, "from_p2", NULL},
334 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
344 {"removed", (getter)dirstate_item_get_removed, NULL, "removed", NULL},
335 {NULL} /* Sentinel */
345 {NULL} /* Sentinel */
336 };
346 };
337
347
338 PyTypeObject dirstateItemType = {
348 PyTypeObject dirstateItemType = {
339 PyVarObject_HEAD_INIT(NULL, 0) /* header */
349 PyVarObject_HEAD_INIT(NULL, 0) /* header */
340 "dirstate_tuple", /* tp_name */
350 "dirstate_tuple", /* tp_name */
341 sizeof(dirstateItemObject), /* tp_basicsize */
351 sizeof(dirstateItemObject), /* tp_basicsize */
342 0, /* tp_itemsize */
352 0, /* tp_itemsize */
343 (destructor)dirstate_item_dealloc, /* tp_dealloc */
353 (destructor)dirstate_item_dealloc, /* tp_dealloc */
344 0, /* tp_print */
354 0, /* tp_print */
345 0, /* tp_getattr */
355 0, /* tp_getattr */
346 0, /* tp_setattr */
356 0, /* tp_setattr */
347 0, /* tp_compare */
357 0, /* tp_compare */
348 0, /* tp_repr */
358 0, /* tp_repr */
349 0, /* tp_as_number */
359 0, /* tp_as_number */
350 &dirstate_item_sq, /* tp_as_sequence */
360 &dirstate_item_sq, /* tp_as_sequence */
351 0, /* tp_as_mapping */
361 0, /* tp_as_mapping */
352 0, /* tp_hash */
362 0, /* tp_hash */
353 0, /* tp_call */
363 0, /* tp_call */
354 0, /* tp_str */
364 0, /* tp_str */
355 0, /* tp_getattro */
365 0, /* tp_getattro */
356 0, /* tp_setattro */
366 0, /* tp_setattro */
357 0, /* tp_as_buffer */
367 0, /* tp_as_buffer */
358 Py_TPFLAGS_DEFAULT, /* tp_flags */
368 Py_TPFLAGS_DEFAULT, /* tp_flags */
359 "dirstate tuple", /* tp_doc */
369 "dirstate tuple", /* tp_doc */
360 0, /* tp_traverse */
370 0, /* tp_traverse */
361 0, /* tp_clear */
371 0, /* tp_clear */
362 0, /* tp_richcompare */
372 0, /* tp_richcompare */
363 0, /* tp_weaklistoffset */
373 0, /* tp_weaklistoffset */
364 0, /* tp_iter */
374 0, /* tp_iter */
365 0, /* tp_iternext */
375 0, /* tp_iternext */
366 dirstate_item_methods, /* tp_methods */
376 dirstate_item_methods, /* tp_methods */
367 0, /* tp_members */
377 0, /* tp_members */
368 dirstate_item_getset, /* tp_getset */
378 dirstate_item_getset, /* tp_getset */
369 0, /* tp_base */
379 0, /* tp_base */
370 0, /* tp_dict */
380 0, /* tp_dict */
371 0, /* tp_descr_get */
381 0, /* tp_descr_get */
372 0, /* tp_descr_set */
382 0, /* tp_descr_set */
373 0, /* tp_dictoffset */
383 0, /* tp_dictoffset */
374 0, /* tp_init */
384 0, /* tp_init */
375 0, /* tp_alloc */
385 0, /* tp_alloc */
376 dirstate_item_new, /* tp_new */
386 dirstate_item_new, /* tp_new */
377 };
387 };
378
388
379 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
389 static PyObject *parse_dirstate(PyObject *self, PyObject *args)
380 {
390 {
381 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
391 PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
382 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
392 PyObject *fname = NULL, *cname = NULL, *entry = NULL;
383 char state, *cur, *str, *cpos;
393 char state, *cur, *str, *cpos;
384 int mode, size, mtime;
394 int mode, size, mtime;
385 unsigned int flen, pos = 40;
395 unsigned int flen, pos = 40;
386 Py_ssize_t len = 40;
396 Py_ssize_t len = 40;
387 Py_ssize_t readlen;
397 Py_ssize_t readlen;
388
398
389 if (!PyArg_ParseTuple(
399 if (!PyArg_ParseTuple(
390 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
400 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
391 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
401 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
392 goto quit;
402 goto quit;
393 }
403 }
394
404
395 len = readlen;
405 len = readlen;
396
406
397 /* read parents */
407 /* read parents */
398 if (len < 40) {
408 if (len < 40) {
399 PyErr_SetString(PyExc_ValueError,
409 PyErr_SetString(PyExc_ValueError,
400 "too little data for parents");
410 "too little data for parents");
401 goto quit;
411 goto quit;
402 }
412 }
403
413
404 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
414 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
405 str + 20, (Py_ssize_t)20);
415 str + 20, (Py_ssize_t)20);
406 if (!parents) {
416 if (!parents) {
407 goto quit;
417 goto quit;
408 }
418 }
409
419
410 /* read filenames */
420 /* read filenames */
411 while (pos >= 40 && pos < len) {
421 while (pos >= 40 && pos < len) {
412 if (pos + 17 > len) {
422 if (pos + 17 > len) {
413 PyErr_SetString(PyExc_ValueError,
423 PyErr_SetString(PyExc_ValueError,
414 "overflow in dirstate");
424 "overflow in dirstate");
415 goto quit;
425 goto quit;
416 }
426 }
417 cur = str + pos;
427 cur = str + pos;
418 /* unpack header */
428 /* unpack header */
419 state = *cur;
429 state = *cur;
420 mode = getbe32(cur + 1);
430 mode = getbe32(cur + 1);
421 size = getbe32(cur + 5);
431 size = getbe32(cur + 5);
422 mtime = getbe32(cur + 9);
432 mtime = getbe32(cur + 9);
423 flen = getbe32(cur + 13);
433 flen = getbe32(cur + 13);
424 pos += 17;
434 pos += 17;
425 cur += 17;
435 cur += 17;
426 if (flen > len - pos) {
436 if (flen > len - pos) {
427 PyErr_SetString(PyExc_ValueError,
437 PyErr_SetString(PyExc_ValueError,
428 "overflow in dirstate");
438 "overflow in dirstate");
429 goto quit;
439 goto quit;
430 }
440 }
431
441
432 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
442 entry = (PyObject *)dirstate_item_from_v1_data(state, mode,
433 size, mtime);
443 size, mtime);
434 cpos = memchr(cur, 0, flen);
444 cpos = memchr(cur, 0, flen);
435 if (cpos) {
445 if (cpos) {
436 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
446 fname = PyBytes_FromStringAndSize(cur, cpos - cur);
437 cname = PyBytes_FromStringAndSize(
447 cname = PyBytes_FromStringAndSize(
438 cpos + 1, flen - (cpos - cur) - 1);
448 cpos + 1, flen - (cpos - cur) - 1);
439 if (!fname || !cname ||
449 if (!fname || !cname ||
440 PyDict_SetItem(cmap, fname, cname) == -1 ||
450 PyDict_SetItem(cmap, fname, cname) == -1 ||
441 PyDict_SetItem(dmap, fname, entry) == -1) {
451 PyDict_SetItem(dmap, fname, entry) == -1) {
442 goto quit;
452 goto quit;
443 }
453 }
444 Py_DECREF(cname);
454 Py_DECREF(cname);
445 } else {
455 } else {
446 fname = PyBytes_FromStringAndSize(cur, flen);
456 fname = PyBytes_FromStringAndSize(cur, flen);
447 if (!fname ||
457 if (!fname ||
448 PyDict_SetItem(dmap, fname, entry) == -1) {
458 PyDict_SetItem(dmap, fname, entry) == -1) {
449 goto quit;
459 goto quit;
450 }
460 }
451 }
461 }
452 Py_DECREF(fname);
462 Py_DECREF(fname);
453 Py_DECREF(entry);
463 Py_DECREF(entry);
454 fname = cname = entry = NULL;
464 fname = cname = entry = NULL;
455 pos += flen;
465 pos += flen;
456 }
466 }
457
467
458 ret = parents;
468 ret = parents;
459 Py_INCREF(ret);
469 Py_INCREF(ret);
460 quit:
470 quit:
461 Py_XDECREF(fname);
471 Py_XDECREF(fname);
462 Py_XDECREF(cname);
472 Py_XDECREF(cname);
463 Py_XDECREF(entry);
473 Py_XDECREF(entry);
464 Py_XDECREF(parents);
474 Py_XDECREF(parents);
465 return ret;
475 return ret;
466 }
476 }
467
477
468 /*
478 /*
469 * Build a set of non-normal and other parent entries from the dirstate dmap
479 * Build a set of non-normal and other parent entries from the dirstate dmap
470 */
480 */
471 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
481 static PyObject *nonnormalotherparententries(PyObject *self, PyObject *args)
472 {
482 {
473 PyObject *dmap, *fname, *v;
483 PyObject *dmap, *fname, *v;
474 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
484 PyObject *nonnset = NULL, *otherpset = NULL, *result = NULL;
475 Py_ssize_t pos;
485 Py_ssize_t pos;
476
486
477 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
487 if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type,
478 &dmap)) {
488 &dmap)) {
479 goto bail;
489 goto bail;
480 }
490 }
481
491
482 nonnset = PySet_New(NULL);
492 nonnset = PySet_New(NULL);
483 if (nonnset == NULL) {
493 if (nonnset == NULL) {
484 goto bail;
494 goto bail;
485 }
495 }
486
496
487 otherpset = PySet_New(NULL);
497 otherpset = PySet_New(NULL);
488 if (otherpset == NULL) {
498 if (otherpset == NULL) {
489 goto bail;
499 goto bail;
490 }
500 }
491
501
492 pos = 0;
502 pos = 0;
493 while (PyDict_Next(dmap, &pos, &fname, &v)) {
503 while (PyDict_Next(dmap, &pos, &fname, &v)) {
494 dirstateItemObject *t;
504 dirstateItemObject *t;
495 if (!dirstate_tuple_check(v)) {
505 if (!dirstate_tuple_check(v)) {
496 PyErr_SetString(PyExc_TypeError,
506 PyErr_SetString(PyExc_TypeError,
497 "expected a dirstate tuple");
507 "expected a dirstate tuple");
498 goto bail;
508 goto bail;
499 }
509 }
500 t = (dirstateItemObject *)v;
510 t = (dirstateItemObject *)v;
501
511
502 if (t->state == 'n' && t->size == -2) {
512 if (t->state == 'n' && t->size == -2) {
503 if (PySet_Add(otherpset, fname) == -1) {
513 if (PySet_Add(otherpset, fname) == -1) {
504 goto bail;
514 goto bail;
505 }
515 }
506 }
516 }
507
517
508 if (t->state == 'n' && t->mtime != -1) {
518 if (t->state == 'n' && t->mtime != -1) {
509 continue;
519 continue;
510 }
520 }
511 if (PySet_Add(nonnset, fname) == -1) {
521 if (PySet_Add(nonnset, fname) == -1) {
512 goto bail;
522 goto bail;
513 }
523 }
514 }
524 }
515
525
516 result = Py_BuildValue("(OO)", nonnset, otherpset);
526 result = Py_BuildValue("(OO)", nonnset, otherpset);
517 if (result == NULL) {
527 if (result == NULL) {
518 goto bail;
528 goto bail;
519 }
529 }
520 Py_DECREF(nonnset);
530 Py_DECREF(nonnset);
521 Py_DECREF(otherpset);
531 Py_DECREF(otherpset);
522 return result;
532 return result;
523 bail:
533 bail:
524 Py_XDECREF(nonnset);
534 Py_XDECREF(nonnset);
525 Py_XDECREF(otherpset);
535 Py_XDECREF(otherpset);
526 Py_XDECREF(result);
536 Py_XDECREF(result);
527 return NULL;
537 return NULL;
528 }
538 }
529
539
530 /*
540 /*
531 * Efficiently pack a dirstate object into its on-disk format.
541 * Efficiently pack a dirstate object into its on-disk format.
532 */
542 */
533 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
543 static PyObject *pack_dirstate(PyObject *self, PyObject *args)
534 {
544 {
535 PyObject *packobj = NULL;
545 PyObject *packobj = NULL;
536 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
546 PyObject *map, *copymap, *pl, *mtime_unset = NULL;
537 Py_ssize_t nbytes, pos, l;
547 Py_ssize_t nbytes, pos, l;
538 PyObject *k, *v = NULL, *pn;
548 PyObject *k, *v = NULL, *pn;
539 char *p, *s;
549 char *p, *s;
540 int now;
550 int now;
541
551
542 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
552 if (!PyArg_ParseTuple(args, "O!O!O!i:pack_dirstate", &PyDict_Type, &map,
543 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
553 &PyDict_Type, &copymap, &PyTuple_Type, &pl,
544 &now)) {
554 &now)) {
545 return NULL;
555 return NULL;
546 }
556 }
547
557
548 if (PyTuple_Size(pl) != 2) {
558 if (PyTuple_Size(pl) != 2) {
549 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
559 PyErr_SetString(PyExc_TypeError, "expected 2-element tuple");
550 return NULL;
560 return NULL;
551 }
561 }
552
562
553 /* Figure out how much we need to allocate. */
563 /* Figure out how much we need to allocate. */
554 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
564 for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
555 PyObject *c;
565 PyObject *c;
556 if (!PyBytes_Check(k)) {
566 if (!PyBytes_Check(k)) {
557 PyErr_SetString(PyExc_TypeError, "expected string key");
567 PyErr_SetString(PyExc_TypeError, "expected string key");
558 goto bail;
568 goto bail;
559 }
569 }
560 nbytes += PyBytes_GET_SIZE(k) + 17;
570 nbytes += PyBytes_GET_SIZE(k) + 17;
561 c = PyDict_GetItem(copymap, k);
571 c = PyDict_GetItem(copymap, k);
562 if (c) {
572 if (c) {
563 if (!PyBytes_Check(c)) {
573 if (!PyBytes_Check(c)) {
564 PyErr_SetString(PyExc_TypeError,
574 PyErr_SetString(PyExc_TypeError,
565 "expected string key");
575 "expected string key");
566 goto bail;
576 goto bail;
567 }
577 }
568 nbytes += PyBytes_GET_SIZE(c) + 1;
578 nbytes += PyBytes_GET_SIZE(c) + 1;
569 }
579 }
570 }
580 }
571
581
572 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
582 packobj = PyBytes_FromStringAndSize(NULL, nbytes);
573 if (packobj == NULL) {
583 if (packobj == NULL) {
574 goto bail;
584 goto bail;
575 }
585 }
576
586
577 p = PyBytes_AS_STRING(packobj);
587 p = PyBytes_AS_STRING(packobj);
578
588
579 pn = PyTuple_GET_ITEM(pl, 0);
589 pn = PyTuple_GET_ITEM(pl, 0);
580 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
590 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
581 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
591 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
582 goto bail;
592 goto bail;
583 }
593 }
584 memcpy(p, s, l);
594 memcpy(p, s, l);
585 p += 20;
595 p += 20;
586 pn = PyTuple_GET_ITEM(pl, 1);
596 pn = PyTuple_GET_ITEM(pl, 1);
587 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
597 if (PyBytes_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
588 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
598 PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
589 goto bail;
599 goto bail;
590 }
600 }
591 memcpy(p, s, l);
601 memcpy(p, s, l);
592 p += 20;
602 p += 20;
593
603
594 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
604 for (pos = 0; PyDict_Next(map, &pos, &k, &v);) {
595 dirstateItemObject *tuple;
605 dirstateItemObject *tuple;
596 char state;
606 char state;
597 int mode, size, mtime;
607 int mode, size, mtime;
598 Py_ssize_t len, l;
608 Py_ssize_t len, l;
599 PyObject *o;
609 PyObject *o;
600 char *t;
610 char *t;
601
611
602 if (!dirstate_tuple_check(v)) {
612 if (!dirstate_tuple_check(v)) {
603 PyErr_SetString(PyExc_TypeError,
613 PyErr_SetString(PyExc_TypeError,
604 "expected a dirstate tuple");
614 "expected a dirstate tuple");
605 goto bail;
615 goto bail;
606 }
616 }
607 tuple = (dirstateItemObject *)v;
617 tuple = (dirstateItemObject *)v;
608
618
609 state = tuple->state;
619 state = tuple->state;
610 mode = tuple->mode;
620 mode = tuple->mode;
611 size = tuple->size;
621 size = tuple->size;
612 mtime = tuple->mtime;
622 mtime = tuple->mtime;
613 if (state == 'n' && mtime == now) {
623 if (state == 'n' && mtime == now) {
614 /* See pure/parsers.py:pack_dirstate for why we do
624 /* See pure/parsers.py:pack_dirstate for why we do
615 * this. */
625 * this. */
616 mtime = -1;
626 mtime = -1;
617 mtime_unset = (PyObject *)make_dirstate_item(
627 mtime_unset = (PyObject *)make_dirstate_item(
618 state, mode, size, mtime);
628 state, mode, size, mtime);
619 if (!mtime_unset) {
629 if (!mtime_unset) {
620 goto bail;
630 goto bail;
621 }
631 }
622 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
632 if (PyDict_SetItem(map, k, mtime_unset) == -1) {
623 goto bail;
633 goto bail;
624 }
634 }
625 Py_DECREF(mtime_unset);
635 Py_DECREF(mtime_unset);
626 mtime_unset = NULL;
636 mtime_unset = NULL;
627 }
637 }
628 *p++ = state;
638 *p++ = state;
629 putbe32((uint32_t)mode, p);
639 putbe32((uint32_t)mode, p);
630 putbe32((uint32_t)size, p + 4);
640 putbe32((uint32_t)size, p + 4);
631 putbe32((uint32_t)mtime, p + 8);
641 putbe32((uint32_t)mtime, p + 8);
632 t = p + 12;
642 t = p + 12;
633 p += 16;
643 p += 16;
634 len = PyBytes_GET_SIZE(k);
644 len = PyBytes_GET_SIZE(k);
635 memcpy(p, PyBytes_AS_STRING(k), len);
645 memcpy(p, PyBytes_AS_STRING(k), len);
636 p += len;
646 p += len;
637 o = PyDict_GetItem(copymap, k);
647 o = PyDict_GetItem(copymap, k);
638 if (o) {
648 if (o) {
639 *p++ = '\0';
649 *p++ = '\0';
640 l = PyBytes_GET_SIZE(o);
650 l = PyBytes_GET_SIZE(o);
641 memcpy(p, PyBytes_AS_STRING(o), l);
651 memcpy(p, PyBytes_AS_STRING(o), l);
642 p += l;
652 p += l;
643 len += l + 1;
653 len += l + 1;
644 }
654 }
645 putbe32((uint32_t)len, t);
655 putbe32((uint32_t)len, t);
646 }
656 }
647
657
648 pos = p - PyBytes_AS_STRING(packobj);
658 pos = p - PyBytes_AS_STRING(packobj);
649 if (pos != nbytes) {
659 if (pos != nbytes) {
650 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
660 PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
651 (long)pos, (long)nbytes);
661 (long)pos, (long)nbytes);
652 goto bail;
662 goto bail;
653 }
663 }
654
664
655 return packobj;
665 return packobj;
656 bail:
666 bail:
657 Py_XDECREF(mtime_unset);
667 Py_XDECREF(mtime_unset);
658 Py_XDECREF(packobj);
668 Py_XDECREF(packobj);
659 Py_XDECREF(v);
669 Py_XDECREF(v);
660 return NULL;
670 return NULL;
661 }
671 }
662
672
663 #define BUMPED_FIX 1
673 #define BUMPED_FIX 1
664 #define USING_SHA_256 2
674 #define USING_SHA_256 2
665 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
675 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1)
666
676
667 static PyObject *readshas(const char *source, unsigned char num,
677 static PyObject *readshas(const char *source, unsigned char num,
668 Py_ssize_t hashwidth)
678 Py_ssize_t hashwidth)
669 {
679 {
670 int i;
680 int i;
671 PyObject *list = PyTuple_New(num);
681 PyObject *list = PyTuple_New(num);
672 if (list == NULL) {
682 if (list == NULL) {
673 return NULL;
683 return NULL;
674 }
684 }
675 for (i = 0; i < num; i++) {
685 for (i = 0; i < num; i++) {
676 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
686 PyObject *hash = PyBytes_FromStringAndSize(source, hashwidth);
677 if (hash == NULL) {
687 if (hash == NULL) {
678 Py_DECREF(list);
688 Py_DECREF(list);
679 return NULL;
689 return NULL;
680 }
690 }
681 PyTuple_SET_ITEM(list, i, hash);
691 PyTuple_SET_ITEM(list, i, hash);
682 source += hashwidth;
692 source += hashwidth;
683 }
693 }
684 return list;
694 return list;
685 }
695 }
686
696
687 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
697 static PyObject *fm1readmarker(const char *databegin, const char *dataend,
688 uint32_t *msize)
698 uint32_t *msize)
689 {
699 {
690 const char *data = databegin;
700 const char *data = databegin;
691 const char *meta;
701 const char *meta;
692
702
693 double mtime;
703 double mtime;
694 int16_t tz;
704 int16_t tz;
695 uint16_t flags;
705 uint16_t flags;
696 unsigned char nsuccs, nparents, nmetadata;
706 unsigned char nsuccs, nparents, nmetadata;
697 Py_ssize_t hashwidth = 20;
707 Py_ssize_t hashwidth = 20;
698
708
699 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
709 PyObject *prec = NULL, *parents = NULL, *succs = NULL;
700 PyObject *metadata = NULL, *ret = NULL;
710 PyObject *metadata = NULL, *ret = NULL;
701 int i;
711 int i;
702
712
703 if (data + FM1_HEADER_SIZE > dataend) {
713 if (data + FM1_HEADER_SIZE > dataend) {
704 goto overflow;
714 goto overflow;
705 }
715 }
706
716
707 *msize = getbe32(data);
717 *msize = getbe32(data);
708 data += 4;
718 data += 4;
709 mtime = getbefloat64(data);
719 mtime = getbefloat64(data);
710 data += 8;
720 data += 8;
711 tz = getbeint16(data);
721 tz = getbeint16(data);
712 data += 2;
722 data += 2;
713 flags = getbeuint16(data);
723 flags = getbeuint16(data);
714 data += 2;
724 data += 2;
715
725
716 if (flags & USING_SHA_256) {
726 if (flags & USING_SHA_256) {
717 hashwidth = 32;
727 hashwidth = 32;
718 }
728 }
719
729
720 nsuccs = (unsigned char)(*data++);
730 nsuccs = (unsigned char)(*data++);
721 nparents = (unsigned char)(*data++);
731 nparents = (unsigned char)(*data++);
722 nmetadata = (unsigned char)(*data++);
732 nmetadata = (unsigned char)(*data++);
723
733
724 if (databegin + *msize > dataend) {
734 if (databegin + *msize > dataend) {
725 goto overflow;
735 goto overflow;
726 }
736 }
727 dataend = databegin + *msize; /* narrow down to marker size */
737 dataend = databegin + *msize; /* narrow down to marker size */
728
738
729 if (data + hashwidth > dataend) {
739 if (data + hashwidth > dataend) {
730 goto overflow;
740 goto overflow;
731 }
741 }
732 prec = PyBytes_FromStringAndSize(data, hashwidth);
742 prec = PyBytes_FromStringAndSize(data, hashwidth);
733 data += hashwidth;
743 data += hashwidth;
734 if (prec == NULL) {
744 if (prec == NULL) {
735 goto bail;
745 goto bail;
736 }
746 }
737
747
738 if (data + nsuccs * hashwidth > dataend) {
748 if (data + nsuccs * hashwidth > dataend) {
739 goto overflow;
749 goto overflow;
740 }
750 }
741 succs = readshas(data, nsuccs, hashwidth);
751 succs = readshas(data, nsuccs, hashwidth);
742 if (succs == NULL) {
752 if (succs == NULL) {
743 goto bail;
753 goto bail;
744 }
754 }
745 data += nsuccs * hashwidth;
755 data += nsuccs * hashwidth;
746
756
747 if (nparents == 1 || nparents == 2) {
757 if (nparents == 1 || nparents == 2) {
748 if (data + nparents * hashwidth > dataend) {
758 if (data + nparents * hashwidth > dataend) {
749 goto overflow;
759 goto overflow;
750 }
760 }
751 parents = readshas(data, nparents, hashwidth);
761 parents = readshas(data, nparents, hashwidth);
752 if (parents == NULL) {
762 if (parents == NULL) {
753 goto bail;
763 goto bail;
754 }
764 }
755 data += nparents * hashwidth;
765 data += nparents * hashwidth;
756 } else {
766 } else {
757 parents = Py_None;
767 parents = Py_None;
758 Py_INCREF(parents);
768 Py_INCREF(parents);
759 }
769 }
760
770
761 if (data + 2 * nmetadata > dataend) {
771 if (data + 2 * nmetadata > dataend) {
762 goto overflow;
772 goto overflow;
763 }
773 }
764 meta = data + (2 * nmetadata);
774 meta = data + (2 * nmetadata);
765 metadata = PyTuple_New(nmetadata);
775 metadata = PyTuple_New(nmetadata);
766 if (metadata == NULL) {
776 if (metadata == NULL) {
767 goto bail;
777 goto bail;
768 }
778 }
769 for (i = 0; i < nmetadata; i++) {
779 for (i = 0; i < nmetadata; i++) {
770 PyObject *tmp, *left = NULL, *right = NULL;
780 PyObject *tmp, *left = NULL, *right = NULL;
771 Py_ssize_t leftsize = (unsigned char)(*data++);
781 Py_ssize_t leftsize = (unsigned char)(*data++);
772 Py_ssize_t rightsize = (unsigned char)(*data++);
782 Py_ssize_t rightsize = (unsigned char)(*data++);
773 if (meta + leftsize + rightsize > dataend) {
783 if (meta + leftsize + rightsize > dataend) {
774 goto overflow;
784 goto overflow;
775 }
785 }
776 left = PyBytes_FromStringAndSize(meta, leftsize);
786 left = PyBytes_FromStringAndSize(meta, leftsize);
777 meta += leftsize;
787 meta += leftsize;
778 right = PyBytes_FromStringAndSize(meta, rightsize);
788 right = PyBytes_FromStringAndSize(meta, rightsize);
779 meta += rightsize;
789 meta += rightsize;
780 tmp = PyTuple_New(2);
790 tmp = PyTuple_New(2);
781 if (!left || !right || !tmp) {
791 if (!left || !right || !tmp) {
782 Py_XDECREF(left);
792 Py_XDECREF(left);
783 Py_XDECREF(right);
793 Py_XDECREF(right);
784 Py_XDECREF(tmp);
794 Py_XDECREF(tmp);
785 goto bail;
795 goto bail;
786 }
796 }
787 PyTuple_SET_ITEM(tmp, 0, left);
797 PyTuple_SET_ITEM(tmp, 0, left);
788 PyTuple_SET_ITEM(tmp, 1, right);
798 PyTuple_SET_ITEM(tmp, 1, right);
789 PyTuple_SET_ITEM(metadata, i, tmp);
799 PyTuple_SET_ITEM(metadata, i, tmp);
790 }
800 }
791 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
801 ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime,
792 (int)tz * 60, parents);
802 (int)tz * 60, parents);
793 goto bail; /* return successfully */
803 goto bail; /* return successfully */
794
804
795 overflow:
805 overflow:
796 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
806 PyErr_SetString(PyExc_ValueError, "overflow in obsstore");
797 bail:
807 bail:
798 Py_XDECREF(prec);
808 Py_XDECREF(prec);
799 Py_XDECREF(succs);
809 Py_XDECREF(succs);
800 Py_XDECREF(metadata);
810 Py_XDECREF(metadata);
801 Py_XDECREF(parents);
811 Py_XDECREF(parents);
802 return ret;
812 return ret;
803 }
813 }
804
814
805 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
815 static PyObject *fm1readmarkers(PyObject *self, PyObject *args)
806 {
816 {
807 const char *data, *dataend;
817 const char *data, *dataend;
808 Py_ssize_t datalen, offset, stop;
818 Py_ssize_t datalen, offset, stop;
809 PyObject *markers = NULL;
819 PyObject *markers = NULL;
810
820
811 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
821 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
812 &offset, &stop)) {
822 &offset, &stop)) {
813 return NULL;
823 return NULL;
814 }
824 }
815 if (offset < 0) {
825 if (offset < 0) {
816 PyErr_SetString(PyExc_ValueError,
826 PyErr_SetString(PyExc_ValueError,
817 "invalid negative offset in fm1readmarkers");
827 "invalid negative offset in fm1readmarkers");
818 return NULL;
828 return NULL;
819 }
829 }
820 if (stop > datalen) {
830 if (stop > datalen) {
821 PyErr_SetString(
831 PyErr_SetString(
822 PyExc_ValueError,
832 PyExc_ValueError,
823 "stop longer than data length in fm1readmarkers");
833 "stop longer than data length in fm1readmarkers");
824 return NULL;
834 return NULL;
825 }
835 }
826 dataend = data + datalen;
836 dataend = data + datalen;
827 data += offset;
837 data += offset;
828 markers = PyList_New(0);
838 markers = PyList_New(0);
829 if (!markers) {
839 if (!markers) {
830 return NULL;
840 return NULL;
831 }
841 }
832 while (offset < stop) {
842 while (offset < stop) {
833 uint32_t msize;
843 uint32_t msize;
834 int error;
844 int error;
835 PyObject *record = fm1readmarker(data, dataend, &msize);
845 PyObject *record = fm1readmarker(data, dataend, &msize);
836 if (!record) {
846 if (!record) {
837 goto bail;
847 goto bail;
838 }
848 }
839 error = PyList_Append(markers, record);
849 error = PyList_Append(markers, record);
840 Py_DECREF(record);
850 Py_DECREF(record);
841 if (error) {
851 if (error) {
842 goto bail;
852 goto bail;
843 }
853 }
844 data += msize;
854 data += msize;
845 offset += msize;
855 offset += msize;
846 }
856 }
847 return markers;
857 return markers;
848 bail:
858 bail:
849 Py_DECREF(markers);
859 Py_DECREF(markers);
850 return NULL;
860 return NULL;
851 }
861 }
852
862
853 static char parsers_doc[] = "Efficient content parsing.";
863 static char parsers_doc[] = "Efficient content parsing.";
854
864
855 PyObject *encodedir(PyObject *self, PyObject *args);
865 PyObject *encodedir(PyObject *self, PyObject *args);
856 PyObject *pathencode(PyObject *self, PyObject *args);
866 PyObject *pathencode(PyObject *self, PyObject *args);
857 PyObject *lowerencode(PyObject *self, PyObject *args);
867 PyObject *lowerencode(PyObject *self, PyObject *args);
858 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
868 PyObject *parse_index2(PyObject *self, PyObject *args, PyObject *kwargs);
859
869
860 static PyMethodDef methods[] = {
870 static PyMethodDef methods[] = {
861 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
871 {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
862 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
872 {"nonnormalotherparententries", nonnormalotherparententries, METH_VARARGS,
863 "create a set containing non-normal and other parent entries of given "
873 "create a set containing non-normal and other parent entries of given "
864 "dirstate\n"},
874 "dirstate\n"},
865 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
875 {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
866 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
876 {"parse_index2", (PyCFunction)parse_index2, METH_VARARGS | METH_KEYWORDS,
867 "parse a revlog index\n"},
877 "parse a revlog index\n"},
868 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
878 {"isasciistr", isasciistr, METH_VARARGS, "check if an ASCII string\n"},
869 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
879 {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"},
870 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
880 {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"},
871 {"dict_new_presized", dict_new_presized, METH_VARARGS,
881 {"dict_new_presized", dict_new_presized, METH_VARARGS,
872 "construct a dict with an expected size\n"},
882 "construct a dict with an expected size\n"},
873 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
883 {"make_file_foldmap", make_file_foldmap, METH_VARARGS,
874 "make file foldmap\n"},
884 "make file foldmap\n"},
875 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
885 {"jsonescapeu8fast", jsonescapeu8fast, METH_VARARGS,
876 "escape a UTF-8 byte string to JSON (fast path)\n"},
886 "escape a UTF-8 byte string to JSON (fast path)\n"},
877 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
887 {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
878 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
888 {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
879 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
889 {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"},
880 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
890 {"fm1readmarkers", fm1readmarkers, METH_VARARGS,
881 "parse v1 obsolete markers\n"},
891 "parse v1 obsolete markers\n"},
882 {NULL, NULL}};
892 {NULL, NULL}};
883
893
884 void dirs_module_init(PyObject *mod);
894 void dirs_module_init(PyObject *mod);
885 void manifest_module_init(PyObject *mod);
895 void manifest_module_init(PyObject *mod);
886 void revlog_module_init(PyObject *mod);
896 void revlog_module_init(PyObject *mod);
887
897
888 static const int version = 20;
898 static const int version = 20;
889
899
890 static void module_init(PyObject *mod)
900 static void module_init(PyObject *mod)
891 {
901 {
892 PyObject *capsule = NULL;
902 PyObject *capsule = NULL;
893 PyModule_AddIntConstant(mod, "version", version);
903 PyModule_AddIntConstant(mod, "version", version);
894
904
895 /* This module constant has two purposes. First, it lets us unit test
905 /* This module constant has two purposes. First, it lets us unit test
896 * the ImportError raised without hard-coding any error text. This
906 * the ImportError raised without hard-coding any error text. This
897 * means we can change the text in the future without breaking tests,
907 * means we can change the text in the future without breaking tests,
898 * even across changesets without a recompile. Second, its presence
908 * even across changesets without a recompile. Second, its presence
899 * can be used to determine whether the version-checking logic is
909 * can be used to determine whether the version-checking logic is
900 * present, which also helps in testing across changesets without a
910 * present, which also helps in testing across changesets without a
901 * recompile. Note that this means the pure-Python version of parsers
911 * recompile. Note that this means the pure-Python version of parsers
902 * should not have this module constant. */
912 * should not have this module constant. */
903 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
913 PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext);
904
914
905 dirs_module_init(mod);
915 dirs_module_init(mod);
906 manifest_module_init(mod);
916 manifest_module_init(mod);
907 revlog_module_init(mod);
917 revlog_module_init(mod);
908
918
909 capsule = PyCapsule_New(
919 capsule = PyCapsule_New(
910 make_dirstate_item,
920 make_dirstate_item,
911 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
921 "mercurial.cext.parsers.make_dirstate_item_CAPI", NULL);
912 if (capsule != NULL)
922 if (capsule != NULL)
913 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
923 PyModule_AddObject(mod, "make_dirstate_item_CAPI", capsule);
914
924
915 if (PyType_Ready(&dirstateItemType) < 0) {
925 if (PyType_Ready(&dirstateItemType) < 0) {
916 return;
926 return;
917 }
927 }
918 Py_INCREF(&dirstateItemType);
928 Py_INCREF(&dirstateItemType);
919 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
929 PyModule_AddObject(mod, "DirstateItem", (PyObject *)&dirstateItemType);
920 }
930 }
921
931
922 static int check_python_version(void)
932 static int check_python_version(void)
923 {
933 {
924 PyObject *sys = PyImport_ImportModule("sys"), *ver;
934 PyObject *sys = PyImport_ImportModule("sys"), *ver;
925 long hexversion;
935 long hexversion;
926 if (!sys) {
936 if (!sys) {
927 return -1;
937 return -1;
928 }
938 }
929 ver = PyObject_GetAttrString(sys, "hexversion");
939 ver = PyObject_GetAttrString(sys, "hexversion");
930 Py_DECREF(sys);
940 Py_DECREF(sys);
931 if (!ver) {
941 if (!ver) {
932 return -1;
942 return -1;
933 }
943 }
934 hexversion = PyInt_AsLong(ver);
944 hexversion = PyInt_AsLong(ver);
935 Py_DECREF(ver);
945 Py_DECREF(ver);
936 /* sys.hexversion is a 32-bit number by default, so the -1 case
946 /* sys.hexversion is a 32-bit number by default, so the -1 case
937 * should only occur in unusual circumstances (e.g. if sys.hexversion
947 * should only occur in unusual circumstances (e.g. if sys.hexversion
938 * is manually set to an invalid value). */
948 * is manually set to an invalid value). */
939 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
949 if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) {
940 PyErr_Format(PyExc_ImportError,
950 PyErr_Format(PyExc_ImportError,
941 "%s: The Mercurial extension "
951 "%s: The Mercurial extension "
942 "modules were compiled with Python " PY_VERSION
952 "modules were compiled with Python " PY_VERSION
943 ", but "
953 ", but "
944 "Mercurial is currently using Python with "
954 "Mercurial is currently using Python with "
945 "sys.hexversion=%ld: "
955 "sys.hexversion=%ld: "
946 "Python %s\n at: %s",
956 "Python %s\n at: %s",
947 versionerrortext, hexversion, Py_GetVersion(),
957 versionerrortext, hexversion, Py_GetVersion(),
948 Py_GetProgramFullPath());
958 Py_GetProgramFullPath());
949 return -1;
959 return -1;
950 }
960 }
951 return 0;
961 return 0;
952 }
962 }
953
963
954 #ifdef IS_PY3K
964 #ifdef IS_PY3K
955 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
965 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
956 parsers_doc, -1, methods};
966 parsers_doc, -1, methods};
957
967
958 PyMODINIT_FUNC PyInit_parsers(void)
968 PyMODINIT_FUNC PyInit_parsers(void)
959 {
969 {
960 PyObject *mod;
970 PyObject *mod;
961
971
962 if (check_python_version() == -1)
972 if (check_python_version() == -1)
963 return NULL;
973 return NULL;
964 mod = PyModule_Create(&parsers_module);
974 mod = PyModule_Create(&parsers_module);
965 module_init(mod);
975 module_init(mod);
966 return mod;
976 return mod;
967 }
977 }
968 #else
978 #else
969 PyMODINIT_FUNC initparsers(void)
979 PyMODINIT_FUNC initparsers(void)
970 {
980 {
971 PyObject *mod;
981 PyObject *mod;
972
982
973 if (check_python_version() == -1) {
983 if (check_python_version() == -1) {
974 return;
984 return;
975 }
985 }
976 mod = Py_InitModule3("parsers", methods, parsers_doc);
986 mod = Py_InitModule3("parsers", methods, parsers_doc);
977 module_init(mod);
987 module_init(mod);
978 }
988 }
979 #endif
989 #endif
@@ -1,752 +1,752 b''
1 # dirstatemap.py
1 # dirstatemap.py
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import errno
8 import errno
9
9
10 from .i18n import _
10 from .i18n import _
11
11
12 from . import (
12 from . import (
13 error,
13 error,
14 pathutil,
14 pathutil,
15 policy,
15 policy,
16 pycompat,
16 pycompat,
17 txnutil,
17 txnutil,
18 util,
18 util,
19 )
19 )
20
20
21 from .dirstateutils import (
21 from .dirstateutils import (
22 docket as docketmod,
22 docket as docketmod,
23 )
23 )
24
24
25 parsers = policy.importmod('parsers')
25 parsers = policy.importmod('parsers')
26 rustmod = policy.importrust('dirstate')
26 rustmod = policy.importrust('dirstate')
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29
29
30 DirstateItem = parsers.DirstateItem
30 DirstateItem = parsers.DirstateItem
31
31
32
32
33 # a special value used internally for `size` if the file come from the other parent
33 # a special value used internally for `size` if the file come from the other parent
34 FROM_P2 = -2
34 FROM_P2 = -2
35
35
36 # a special value used internally for `size` if the file is modified/merged/added
36 # a special value used internally for `size` if the file is modified/merged/added
37 NONNORMAL = -1
37 NONNORMAL = -1
38
38
39 # a special value used internally for `time` if the time is ambigeous
39 # a special value used internally for `time` if the time is ambigeous
40 AMBIGUOUS_TIME = -1
40 AMBIGUOUS_TIME = -1
41
41
42 rangemask = 0x7FFFFFFF
42 rangemask = 0x7FFFFFFF
43
43
44
44
45 class dirstatemap(object):
45 class dirstatemap(object):
46 """Map encapsulating the dirstate's contents.
46 """Map encapsulating the dirstate's contents.
47
47
48 The dirstate contains the following state:
48 The dirstate contains the following state:
49
49
50 - `identity` is the identity of the dirstate file, which can be used to
50 - `identity` is the identity of the dirstate file, which can be used to
51 detect when changes have occurred to the dirstate file.
51 detect when changes have occurred to the dirstate file.
52
52
53 - `parents` is a pair containing the parents of the working copy. The
53 - `parents` is a pair containing the parents of the working copy. The
54 parents are updated by calling `setparents`.
54 parents are updated by calling `setparents`.
55
55
56 - the state map maps filenames to tuples of (state, mode, size, mtime),
56 - the state map maps filenames to tuples of (state, mode, size, mtime),
57 where state is a single character representing 'normal', 'added',
57 where state is a single character representing 'normal', 'added',
58 'removed', or 'merged'. It is read by treating the dirstate as a
58 'removed', or 'merged'. It is read by treating the dirstate as a
59 dict. File state is updated by calling the `addfile`, `removefile` and
59 dict. File state is updated by calling the `addfile`, `removefile` and
60 `dropfile` methods.
60 `dropfile` methods.
61
61
62 - `copymap` maps destination filenames to their source filename.
62 - `copymap` maps destination filenames to their source filename.
63
63
64 The dirstate also provides the following views onto the state:
64 The dirstate also provides the following views onto the state:
65
65
66 - `nonnormalset` is a set of the filenames that have state other
66 - `nonnormalset` is a set of the filenames that have state other
67 than 'normal', or are normal but have an mtime of -1 ('normallookup').
67 than 'normal', or are normal but have an mtime of -1 ('normallookup').
68
68
69 - `otherparentset` is a set of the filenames that are marked as coming
69 - `otherparentset` is a set of the filenames that are marked as coming
70 from the second parent when the dirstate is currently being merged.
70 from the second parent when the dirstate is currently being merged.
71
71
72 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
72 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
73 form that they appear as in the dirstate.
73 form that they appear as in the dirstate.
74
74
75 - `dirfoldmap` is a dict mapping normalized directory names to the
75 - `dirfoldmap` is a dict mapping normalized directory names to the
76 denormalized form that they appear as in the dirstate.
76 denormalized form that they appear as in the dirstate.
77 """
77 """
78
78
79 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
79 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
80 self._ui = ui
80 self._ui = ui
81 self._opener = opener
81 self._opener = opener
82 self._root = root
82 self._root = root
83 self._filename = b'dirstate'
83 self._filename = b'dirstate'
84 self._nodelen = 20
84 self._nodelen = 20
85 self._nodeconstants = nodeconstants
85 self._nodeconstants = nodeconstants
86 assert (
86 assert (
87 not use_dirstate_v2
87 not use_dirstate_v2
88 ), "should have detected unsupported requirement"
88 ), "should have detected unsupported requirement"
89
89
90 self._parents = None
90 self._parents = None
91 self._dirtyparents = False
91 self._dirtyparents = False
92
92
93 # for consistent view between _pl() and _read() invocations
93 # for consistent view between _pl() and _read() invocations
94 self._pendingmode = None
94 self._pendingmode = None
95
95
96 @propertycache
96 @propertycache
97 def _map(self):
97 def _map(self):
98 self._map = {}
98 self._map = {}
99 self.read()
99 self.read()
100 return self._map
100 return self._map
101
101
102 @propertycache
102 @propertycache
103 def copymap(self):
103 def copymap(self):
104 self.copymap = {}
104 self.copymap = {}
105 self._map
105 self._map
106 return self.copymap
106 return self.copymap
107
107
108 def clear(self):
108 def clear(self):
109 self._map.clear()
109 self._map.clear()
110 self.copymap.clear()
110 self.copymap.clear()
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
111 self.setparents(self._nodeconstants.nullid, self._nodeconstants.nullid)
112 util.clearcachedproperty(self, b"_dirs")
112 util.clearcachedproperty(self, b"_dirs")
113 util.clearcachedproperty(self, b"_alldirs")
113 util.clearcachedproperty(self, b"_alldirs")
114 util.clearcachedproperty(self, b"filefoldmap")
114 util.clearcachedproperty(self, b"filefoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
115 util.clearcachedproperty(self, b"dirfoldmap")
116 util.clearcachedproperty(self, b"nonnormalset")
116 util.clearcachedproperty(self, b"nonnormalset")
117 util.clearcachedproperty(self, b"otherparentset")
117 util.clearcachedproperty(self, b"otherparentset")
118
118
119 def items(self):
119 def items(self):
120 return pycompat.iteritems(self._map)
120 return pycompat.iteritems(self._map)
121
121
122 # forward for python2,3 compat
122 # forward for python2,3 compat
123 iteritems = items
123 iteritems = items
124
124
125 debug_iter = items
125 debug_iter = items
126
126
127 def __len__(self):
127 def __len__(self):
128 return len(self._map)
128 return len(self._map)
129
129
130 def __iter__(self):
130 def __iter__(self):
131 return iter(self._map)
131 return iter(self._map)
132
132
133 def get(self, key, default=None):
133 def get(self, key, default=None):
134 return self._map.get(key, default)
134 return self._map.get(key, default)
135
135
136 def __contains__(self, key):
136 def __contains__(self, key):
137 return key in self._map
137 return key in self._map
138
138
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 return self._map[key]
140 return self._map[key]
141
141
142 def keys(self):
142 def keys(self):
143 return self._map.keys()
143 return self._map.keys()
144
144
145 def preload(self):
145 def preload(self):
146 """Loads the underlying data, if it's not already loaded"""
146 """Loads the underlying data, if it's not already loaded"""
147 self._map
147 self._map
148
148
149 def addfile(
149 def addfile(
150 self,
150 self,
151 f,
151 f,
152 mode=0,
152 mode=0,
153 size=None,
153 size=None,
154 mtime=None,
154 mtime=None,
155 added=False,
155 added=False,
156 merged=False,
156 merged=False,
157 from_p2=False,
157 from_p2=False,
158 possibly_dirty=False,
158 possibly_dirty=False,
159 ):
159 ):
160 """Add a tracked file to the dirstate."""
160 """Add a tracked file to the dirstate."""
161 if added:
161 if added:
162 assert not merged
162 assert not merged
163 assert not possibly_dirty
163 assert not possibly_dirty
164 assert not from_p2
164 assert not from_p2
165 state = b'a'
165 state = b'a'
166 size = NONNORMAL
166 size = NONNORMAL
167 mtime = AMBIGUOUS_TIME
167 mtime = AMBIGUOUS_TIME
168 elif merged:
168 elif merged:
169 assert not possibly_dirty
169 assert not possibly_dirty
170 assert not from_p2
170 assert not from_p2
171 state = b'm'
171 state = b'm'
172 size = FROM_P2
172 size = FROM_P2
173 mtime = AMBIGUOUS_TIME
173 mtime = AMBIGUOUS_TIME
174 elif from_p2:
174 elif from_p2:
175 assert not possibly_dirty
175 assert not possibly_dirty
176 state = b'n'
176 state = b'n'
177 size = FROM_P2
177 size = FROM_P2
178 mtime = AMBIGUOUS_TIME
178 mtime = AMBIGUOUS_TIME
179 elif possibly_dirty:
179 elif possibly_dirty:
180 state = b'n'
180 state = b'n'
181 size = NONNORMAL
181 size = NONNORMAL
182 mtime = AMBIGUOUS_TIME
182 mtime = AMBIGUOUS_TIME
183 else:
183 else:
184 assert size != FROM_P2
184 assert size != FROM_P2
185 assert size != NONNORMAL
185 assert size != NONNORMAL
186 state = b'n'
186 state = b'n'
187 size = size & rangemask
187 size = size & rangemask
188 mtime = mtime & rangemask
188 mtime = mtime & rangemask
189 assert state is not None
189 assert state is not None
190 assert size is not None
190 assert size is not None
191 assert mtime is not None
191 assert mtime is not None
192 old_entry = self.get(f)
192 old_entry = self.get(f)
193 if (
193 if (
194 old_entry is None or old_entry.removed
194 old_entry is None or old_entry.removed
195 ) and "_dirs" in self.__dict__:
195 ) and "_dirs" in self.__dict__:
196 self._dirs.addpath(f)
196 self._dirs.addpath(f)
197 if old_entry is None and "_alldirs" in self.__dict__:
197 if old_entry is None and "_alldirs" in self.__dict__:
198 self._alldirs.addpath(f)
198 self._alldirs.addpath(f)
199 e = self._map[f] = DirstateItem(state, mode, size, mtime)
199 e = self._map[f] = DirstateItem(state, mode, size, mtime)
200 if e.dm_nonnormal:
200 if e.dm_nonnormal:
201 self.nonnormalset.add(f)
201 self.nonnormalset.add(f)
202 if size == FROM_P2:
202 if e.dm_otherparent:
203 self.otherparentset.add(f)
203 self.otherparentset.add(f)
204
204
205 def removefile(self, f, in_merge=False):
205 def removefile(self, f, in_merge=False):
206 """
206 """
207 Mark a file as removed in the dirstate.
207 Mark a file as removed in the dirstate.
208
208
209 The `size` parameter is used to store sentinel values that indicate
209 The `size` parameter is used to store sentinel values that indicate
210 the file's previous state. In the future, we should refactor this
210 the file's previous state. In the future, we should refactor this
211 to be more explicit about what that state is.
211 to be more explicit about what that state is.
212 """
212 """
213 entry = self.get(f)
213 entry = self.get(f)
214 size = 0
214 size = 0
215 if in_merge:
215 if in_merge:
216 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
216 # XXX we should not be able to have 'm' state and 'FROM_P2' if not
217 # during a merge. So I (marmoute) am not sure we need the
217 # during a merge. So I (marmoute) am not sure we need the
218 # conditionnal at all. Adding double checking this with assert
218 # conditionnal at all. Adding double checking this with assert
219 # would be nice.
219 # would be nice.
220 if entry is not None:
220 if entry is not None:
221 # backup the previous state
221 # backup the previous state
222 if entry.merged: # merge
222 if entry.merged: # merge
223 size = NONNORMAL
223 size = NONNORMAL
224 elif entry.from_p2:
224 elif entry.from_p2:
225 size = FROM_P2
225 size = FROM_P2
226 self.otherparentset.add(f)
226 self.otherparentset.add(f)
227 if entry is not None and not (entry.merged or entry.from_p2):
227 if entry is not None and not (entry.merged or entry.from_p2):
228 self.copymap.pop(f, None)
228 self.copymap.pop(f, None)
229
229
230 if entry is not None and not entry.removed and "_dirs" in self.__dict__:
230 if entry is not None and not entry.removed and "_dirs" in self.__dict__:
231 self._dirs.delpath(f)
231 self._dirs.delpath(f)
232 if entry is None and "_alldirs" in self.__dict__:
232 if entry is None and "_alldirs" in self.__dict__:
233 self._alldirs.addpath(f)
233 self._alldirs.addpath(f)
234 if "filefoldmap" in self.__dict__:
234 if "filefoldmap" in self.__dict__:
235 normed = util.normcase(f)
235 normed = util.normcase(f)
236 self.filefoldmap.pop(normed, None)
236 self.filefoldmap.pop(normed, None)
237 self._map[f] = DirstateItem(b'r', 0, size, 0)
237 self._map[f] = DirstateItem(b'r', 0, size, 0)
238 self.nonnormalset.add(f)
238 self.nonnormalset.add(f)
239
239
240 def dropfile(self, f):
240 def dropfile(self, f):
241 """
241 """
242 Remove a file from the dirstate. Returns True if the file was
242 Remove a file from the dirstate. Returns True if the file was
243 previously recorded.
243 previously recorded.
244 """
244 """
245 old_entry = self._map.pop(f, None)
245 old_entry = self._map.pop(f, None)
246 exists = False
246 exists = False
247 oldstate = b'?'
247 oldstate = b'?'
248 if old_entry is not None:
248 if old_entry is not None:
249 exists = True
249 exists = True
250 oldstate = old_entry.state
250 oldstate = old_entry.state
251 if exists:
251 if exists:
252 if oldstate != b"r" and "_dirs" in self.__dict__:
252 if oldstate != b"r" and "_dirs" in self.__dict__:
253 self._dirs.delpath(f)
253 self._dirs.delpath(f)
254 if "_alldirs" in self.__dict__:
254 if "_alldirs" in self.__dict__:
255 self._alldirs.delpath(f)
255 self._alldirs.delpath(f)
256 if "filefoldmap" in self.__dict__:
256 if "filefoldmap" in self.__dict__:
257 normed = util.normcase(f)
257 normed = util.normcase(f)
258 self.filefoldmap.pop(normed, None)
258 self.filefoldmap.pop(normed, None)
259 self.nonnormalset.discard(f)
259 self.nonnormalset.discard(f)
260 return exists
260 return exists
261
261
262 def clearambiguoustimes(self, files, now):
262 def clearambiguoustimes(self, files, now):
263 for f in files:
263 for f in files:
264 e = self.get(f)
264 e = self.get(f)
265 if e is not None and e.need_delay(now):
265 if e is not None and e.need_delay(now):
266 e.set_possibly_dirty()
266 e.set_possibly_dirty()
267 self.nonnormalset.add(f)
267 self.nonnormalset.add(f)
268
268
269 def nonnormalentries(self):
269 def nonnormalentries(self):
270 '''Compute the nonnormal dirstate entries from the dmap'''
270 '''Compute the nonnormal dirstate entries from the dmap'''
271 try:
271 try:
272 return parsers.nonnormalotherparententries(self._map)
272 return parsers.nonnormalotherparententries(self._map)
273 except AttributeError:
273 except AttributeError:
274 nonnorm = set()
274 nonnorm = set()
275 otherparent = set()
275 otherparent = set()
276 for fname, e in pycompat.iteritems(self._map):
276 for fname, e in pycompat.iteritems(self._map):
277 if e.dm_nonnormal:
277 if e.dm_nonnormal:
278 nonnorm.add(fname)
278 nonnorm.add(fname)
279 if e.from_p2:
279 if e.from_p2:
280 otherparent.add(fname)
280 otherparent.add(fname)
281 return nonnorm, otherparent
281 return nonnorm, otherparent
282
282
283 @propertycache
283 @propertycache
284 def filefoldmap(self):
284 def filefoldmap(self):
285 """Returns a dictionary mapping normalized case paths to their
285 """Returns a dictionary mapping normalized case paths to their
286 non-normalized versions.
286 non-normalized versions.
287 """
287 """
288 try:
288 try:
289 makefilefoldmap = parsers.make_file_foldmap
289 makefilefoldmap = parsers.make_file_foldmap
290 except AttributeError:
290 except AttributeError:
291 pass
291 pass
292 else:
292 else:
293 return makefilefoldmap(
293 return makefilefoldmap(
294 self._map, util.normcasespec, util.normcasefallback
294 self._map, util.normcasespec, util.normcasefallback
295 )
295 )
296
296
297 f = {}
297 f = {}
298 normcase = util.normcase
298 normcase = util.normcase
299 for name, s in pycompat.iteritems(self._map):
299 for name, s in pycompat.iteritems(self._map):
300 if not s.removed:
300 if not s.removed:
301 f[normcase(name)] = name
301 f[normcase(name)] = name
302 f[b'.'] = b'.' # prevents useless util.fspath() invocation
302 f[b'.'] = b'.' # prevents useless util.fspath() invocation
303 return f
303 return f
304
304
305 def hastrackeddir(self, d):
305 def hastrackeddir(self, d):
306 """
306 """
307 Returns True if the dirstate contains a tracked (not removed) file
307 Returns True if the dirstate contains a tracked (not removed) file
308 in this directory.
308 in this directory.
309 """
309 """
310 return d in self._dirs
310 return d in self._dirs
311
311
312 def hasdir(self, d):
312 def hasdir(self, d):
313 """
313 """
314 Returns True if the dirstate contains a file (tracked or removed)
314 Returns True if the dirstate contains a file (tracked or removed)
315 in this directory.
315 in this directory.
316 """
316 """
317 return d in self._alldirs
317 return d in self._alldirs
318
318
319 @propertycache
319 @propertycache
320 def _dirs(self):
320 def _dirs(self):
321 return pathutil.dirs(self._map, b'r')
321 return pathutil.dirs(self._map, b'r')
322
322
323 @propertycache
323 @propertycache
324 def _alldirs(self):
324 def _alldirs(self):
325 return pathutil.dirs(self._map)
325 return pathutil.dirs(self._map)
326
326
327 def _opendirstatefile(self):
327 def _opendirstatefile(self):
328 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
328 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
329 if self._pendingmode is not None and self._pendingmode != mode:
329 if self._pendingmode is not None and self._pendingmode != mode:
330 fp.close()
330 fp.close()
331 raise error.Abort(
331 raise error.Abort(
332 _(b'working directory state may be changed parallelly')
332 _(b'working directory state may be changed parallelly')
333 )
333 )
334 self._pendingmode = mode
334 self._pendingmode = mode
335 return fp
335 return fp
336
336
337 def parents(self):
337 def parents(self):
338 if not self._parents:
338 if not self._parents:
339 try:
339 try:
340 fp = self._opendirstatefile()
340 fp = self._opendirstatefile()
341 st = fp.read(2 * self._nodelen)
341 st = fp.read(2 * self._nodelen)
342 fp.close()
342 fp.close()
343 except IOError as err:
343 except IOError as err:
344 if err.errno != errno.ENOENT:
344 if err.errno != errno.ENOENT:
345 raise
345 raise
346 # File doesn't exist, so the current state is empty
346 # File doesn't exist, so the current state is empty
347 st = b''
347 st = b''
348
348
349 l = len(st)
349 l = len(st)
350 if l == self._nodelen * 2:
350 if l == self._nodelen * 2:
351 self._parents = (
351 self._parents = (
352 st[: self._nodelen],
352 st[: self._nodelen],
353 st[self._nodelen : 2 * self._nodelen],
353 st[self._nodelen : 2 * self._nodelen],
354 )
354 )
355 elif l == 0:
355 elif l == 0:
356 self._parents = (
356 self._parents = (
357 self._nodeconstants.nullid,
357 self._nodeconstants.nullid,
358 self._nodeconstants.nullid,
358 self._nodeconstants.nullid,
359 )
359 )
360 else:
360 else:
361 raise error.Abort(
361 raise error.Abort(
362 _(b'working directory state appears damaged!')
362 _(b'working directory state appears damaged!')
363 )
363 )
364
364
365 return self._parents
365 return self._parents
366
366
367 def setparents(self, p1, p2):
367 def setparents(self, p1, p2):
368 self._parents = (p1, p2)
368 self._parents = (p1, p2)
369 self._dirtyparents = True
369 self._dirtyparents = True
370
370
371 def read(self):
371 def read(self):
372 # ignore HG_PENDING because identity is used only for writing
372 # ignore HG_PENDING because identity is used only for writing
373 self.identity = util.filestat.frompath(
373 self.identity = util.filestat.frompath(
374 self._opener.join(self._filename)
374 self._opener.join(self._filename)
375 )
375 )
376
376
377 try:
377 try:
378 fp = self._opendirstatefile()
378 fp = self._opendirstatefile()
379 try:
379 try:
380 st = fp.read()
380 st = fp.read()
381 finally:
381 finally:
382 fp.close()
382 fp.close()
383 except IOError as err:
383 except IOError as err:
384 if err.errno != errno.ENOENT:
384 if err.errno != errno.ENOENT:
385 raise
385 raise
386 return
386 return
387 if not st:
387 if not st:
388 return
388 return
389
389
390 if util.safehasattr(parsers, b'dict_new_presized'):
390 if util.safehasattr(parsers, b'dict_new_presized'):
391 # Make an estimate of the number of files in the dirstate based on
391 # Make an estimate of the number of files in the dirstate based on
392 # its size. This trades wasting some memory for avoiding costly
392 # its size. This trades wasting some memory for avoiding costly
393 # resizes. Each entry have a prefix of 17 bytes followed by one or
393 # resizes. Each entry have a prefix of 17 bytes followed by one or
394 # two path names. Studies on various large-scale real-world repositories
394 # two path names. Studies on various large-scale real-world repositories
395 # found 54 bytes a reasonable upper limit for the average path names.
395 # found 54 bytes a reasonable upper limit for the average path names.
396 # Copy entries are ignored for the sake of this estimate.
396 # Copy entries are ignored for the sake of this estimate.
397 self._map = parsers.dict_new_presized(len(st) // 71)
397 self._map = parsers.dict_new_presized(len(st) // 71)
398
398
399 # Python's garbage collector triggers a GC each time a certain number
399 # Python's garbage collector triggers a GC each time a certain number
400 # of container objects (the number being defined by
400 # of container objects (the number being defined by
401 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
401 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
402 # for each file in the dirstate. The C version then immediately marks
402 # for each file in the dirstate. The C version then immediately marks
403 # them as not to be tracked by the collector. However, this has no
403 # them as not to be tracked by the collector. However, this has no
404 # effect on when GCs are triggered, only on what objects the GC looks
404 # effect on when GCs are triggered, only on what objects the GC looks
405 # into. This means that O(number of files) GCs are unavoidable.
405 # into. This means that O(number of files) GCs are unavoidable.
406 # Depending on when in the process's lifetime the dirstate is parsed,
406 # Depending on when in the process's lifetime the dirstate is parsed,
407 # this can get very expensive. As a workaround, disable GC while
407 # this can get very expensive. As a workaround, disable GC while
408 # parsing the dirstate.
408 # parsing the dirstate.
409 #
409 #
410 # (we cannot decorate the function directly since it is in a C module)
410 # (we cannot decorate the function directly since it is in a C module)
411 parse_dirstate = util.nogc(parsers.parse_dirstate)
411 parse_dirstate = util.nogc(parsers.parse_dirstate)
412 p = parse_dirstate(self._map, self.copymap, st)
412 p = parse_dirstate(self._map, self.copymap, st)
413 if not self._dirtyparents:
413 if not self._dirtyparents:
414 self.setparents(*p)
414 self.setparents(*p)
415
415
416 # Avoid excess attribute lookups by fast pathing certain checks
416 # Avoid excess attribute lookups by fast pathing certain checks
417 self.__contains__ = self._map.__contains__
417 self.__contains__ = self._map.__contains__
418 self.__getitem__ = self._map.__getitem__
418 self.__getitem__ = self._map.__getitem__
419 self.get = self._map.get
419 self.get = self._map.get
420
420
421 def write(self, _tr, st, now):
421 def write(self, _tr, st, now):
422 st.write(
422 st.write(
423 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
423 parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
424 )
424 )
425 st.close()
425 st.close()
426 self._dirtyparents = False
426 self._dirtyparents = False
427 self.nonnormalset, self.otherparentset = self.nonnormalentries()
427 self.nonnormalset, self.otherparentset = self.nonnormalentries()
428
428
429 @propertycache
429 @propertycache
430 def nonnormalset(self):
430 def nonnormalset(self):
431 nonnorm, otherparents = self.nonnormalentries()
431 nonnorm, otherparents = self.nonnormalentries()
432 self.otherparentset = otherparents
432 self.otherparentset = otherparents
433 return nonnorm
433 return nonnorm
434
434
435 @propertycache
435 @propertycache
436 def otherparentset(self):
436 def otherparentset(self):
437 nonnorm, otherparents = self.nonnormalentries()
437 nonnorm, otherparents = self.nonnormalentries()
438 self.nonnormalset = nonnorm
438 self.nonnormalset = nonnorm
439 return otherparents
439 return otherparents
440
440
441 def non_normal_or_other_parent_paths(self):
441 def non_normal_or_other_parent_paths(self):
442 return self.nonnormalset.union(self.otherparentset)
442 return self.nonnormalset.union(self.otherparentset)
443
443
444 @propertycache
444 @propertycache
445 def identity(self):
445 def identity(self):
446 self._map
446 self._map
447 return self.identity
447 return self.identity
448
448
449 @propertycache
449 @propertycache
450 def dirfoldmap(self):
450 def dirfoldmap(self):
451 f = {}
451 f = {}
452 normcase = util.normcase
452 normcase = util.normcase
453 for name in self._dirs:
453 for name in self._dirs:
454 f[normcase(name)] = name
454 f[normcase(name)] = name
455 return f
455 return f
456
456
457
457
458 if rustmod is not None:
458 if rustmod is not None:
459
459
460 class dirstatemap(object):
460 class dirstatemap(object):
461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
461 def __init__(self, ui, opener, root, nodeconstants, use_dirstate_v2):
462 self._use_dirstate_v2 = use_dirstate_v2
462 self._use_dirstate_v2 = use_dirstate_v2
463 self._nodeconstants = nodeconstants
463 self._nodeconstants = nodeconstants
464 self._ui = ui
464 self._ui = ui
465 self._opener = opener
465 self._opener = opener
466 self._root = root
466 self._root = root
467 self._filename = b'dirstate'
467 self._filename = b'dirstate'
468 self._nodelen = 20 # Also update Rust code when changing this!
468 self._nodelen = 20 # Also update Rust code when changing this!
469 self._parents = None
469 self._parents = None
470 self._dirtyparents = False
470 self._dirtyparents = False
471 self._docket = None
471 self._docket = None
472
472
473 # for consistent view between _pl() and _read() invocations
473 # for consistent view between _pl() and _read() invocations
474 self._pendingmode = None
474 self._pendingmode = None
475
475
476 self._use_dirstate_tree = self._ui.configbool(
476 self._use_dirstate_tree = self._ui.configbool(
477 b"experimental",
477 b"experimental",
478 b"dirstate-tree.in-memory",
478 b"dirstate-tree.in-memory",
479 False,
479 False,
480 )
480 )
481
481
482 def addfile(
482 def addfile(
483 self,
483 self,
484 f,
484 f,
485 mode=0,
485 mode=0,
486 size=None,
486 size=None,
487 mtime=None,
487 mtime=None,
488 added=False,
488 added=False,
489 merged=False,
489 merged=False,
490 from_p2=False,
490 from_p2=False,
491 possibly_dirty=False,
491 possibly_dirty=False,
492 ):
492 ):
493 return self._rustmap.addfile(
493 return self._rustmap.addfile(
494 f,
494 f,
495 mode,
495 mode,
496 size,
496 size,
497 mtime,
497 mtime,
498 added,
498 added,
499 merged,
499 merged,
500 from_p2,
500 from_p2,
501 possibly_dirty,
501 possibly_dirty,
502 )
502 )
503
503
504 def removefile(self, *args, **kwargs):
504 def removefile(self, *args, **kwargs):
505 return self._rustmap.removefile(*args, **kwargs)
505 return self._rustmap.removefile(*args, **kwargs)
506
506
507 def dropfile(self, *args, **kwargs):
507 def dropfile(self, *args, **kwargs):
508 return self._rustmap.dropfile(*args, **kwargs)
508 return self._rustmap.dropfile(*args, **kwargs)
509
509
510 def clearambiguoustimes(self, *args, **kwargs):
510 def clearambiguoustimes(self, *args, **kwargs):
511 return self._rustmap.clearambiguoustimes(*args, **kwargs)
511 return self._rustmap.clearambiguoustimes(*args, **kwargs)
512
512
513 def nonnormalentries(self):
513 def nonnormalentries(self):
514 return self._rustmap.nonnormalentries()
514 return self._rustmap.nonnormalentries()
515
515
516 def get(self, *args, **kwargs):
516 def get(self, *args, **kwargs):
517 return self._rustmap.get(*args, **kwargs)
517 return self._rustmap.get(*args, **kwargs)
518
518
519 @property
519 @property
520 def copymap(self):
520 def copymap(self):
521 return self._rustmap.copymap()
521 return self._rustmap.copymap()
522
522
523 def directories(self):
523 def directories(self):
524 return self._rustmap.directories()
524 return self._rustmap.directories()
525
525
526 def debug_iter(self):
526 def debug_iter(self):
527 return self._rustmap.debug_iter()
527 return self._rustmap.debug_iter()
528
528
529 def preload(self):
529 def preload(self):
530 self._rustmap
530 self._rustmap
531
531
532 def clear(self):
532 def clear(self):
533 self._rustmap.clear()
533 self._rustmap.clear()
534 self.setparents(
534 self.setparents(
535 self._nodeconstants.nullid, self._nodeconstants.nullid
535 self._nodeconstants.nullid, self._nodeconstants.nullid
536 )
536 )
537 util.clearcachedproperty(self, b"_dirs")
537 util.clearcachedproperty(self, b"_dirs")
538 util.clearcachedproperty(self, b"_alldirs")
538 util.clearcachedproperty(self, b"_alldirs")
539 util.clearcachedproperty(self, b"dirfoldmap")
539 util.clearcachedproperty(self, b"dirfoldmap")
540
540
541 def items(self):
541 def items(self):
542 return self._rustmap.items()
542 return self._rustmap.items()
543
543
544 def keys(self):
544 def keys(self):
545 return iter(self._rustmap)
545 return iter(self._rustmap)
546
546
547 def __contains__(self, key):
547 def __contains__(self, key):
548 return key in self._rustmap
548 return key in self._rustmap
549
549
550 def __getitem__(self, item):
550 def __getitem__(self, item):
551 return self._rustmap[item]
551 return self._rustmap[item]
552
552
553 def __len__(self):
553 def __len__(self):
554 return len(self._rustmap)
554 return len(self._rustmap)
555
555
556 def __iter__(self):
556 def __iter__(self):
557 return iter(self._rustmap)
557 return iter(self._rustmap)
558
558
559 # forward for python2,3 compat
559 # forward for python2,3 compat
560 iteritems = items
560 iteritems = items
561
561
562 def _opendirstatefile(self):
562 def _opendirstatefile(self):
563 fp, mode = txnutil.trypending(
563 fp, mode = txnutil.trypending(
564 self._root, self._opener, self._filename
564 self._root, self._opener, self._filename
565 )
565 )
566 if self._pendingmode is not None and self._pendingmode != mode:
566 if self._pendingmode is not None and self._pendingmode != mode:
567 fp.close()
567 fp.close()
568 raise error.Abort(
568 raise error.Abort(
569 _(b'working directory state may be changed parallelly')
569 _(b'working directory state may be changed parallelly')
570 )
570 )
571 self._pendingmode = mode
571 self._pendingmode = mode
572 return fp
572 return fp
573
573
574 def _readdirstatefile(self, size=-1):
574 def _readdirstatefile(self, size=-1):
575 try:
575 try:
576 with self._opendirstatefile() as fp:
576 with self._opendirstatefile() as fp:
577 return fp.read(size)
577 return fp.read(size)
578 except IOError as err:
578 except IOError as err:
579 if err.errno != errno.ENOENT:
579 if err.errno != errno.ENOENT:
580 raise
580 raise
581 # File doesn't exist, so the current state is empty
581 # File doesn't exist, so the current state is empty
582 return b''
582 return b''
583
583
584 def setparents(self, p1, p2):
584 def setparents(self, p1, p2):
585 self._parents = (p1, p2)
585 self._parents = (p1, p2)
586 self._dirtyparents = True
586 self._dirtyparents = True
587
587
588 def parents(self):
588 def parents(self):
589 if not self._parents:
589 if not self._parents:
590 if self._use_dirstate_v2:
590 if self._use_dirstate_v2:
591 self._parents = self.docket.parents
591 self._parents = self.docket.parents
592 else:
592 else:
593 read_len = self._nodelen * 2
593 read_len = self._nodelen * 2
594 st = self._readdirstatefile(read_len)
594 st = self._readdirstatefile(read_len)
595 l = len(st)
595 l = len(st)
596 if l == read_len:
596 if l == read_len:
597 self._parents = (
597 self._parents = (
598 st[: self._nodelen],
598 st[: self._nodelen],
599 st[self._nodelen : 2 * self._nodelen],
599 st[self._nodelen : 2 * self._nodelen],
600 )
600 )
601 elif l == 0:
601 elif l == 0:
602 self._parents = (
602 self._parents = (
603 self._nodeconstants.nullid,
603 self._nodeconstants.nullid,
604 self._nodeconstants.nullid,
604 self._nodeconstants.nullid,
605 )
605 )
606 else:
606 else:
607 raise error.Abort(
607 raise error.Abort(
608 _(b'working directory state appears damaged!')
608 _(b'working directory state appears damaged!')
609 )
609 )
610
610
611 return self._parents
611 return self._parents
612
612
613 @property
613 @property
614 def docket(self):
614 def docket(self):
615 if not self._docket:
615 if not self._docket:
616 if not self._use_dirstate_v2:
616 if not self._use_dirstate_v2:
617 raise error.ProgrammingError(
617 raise error.ProgrammingError(
618 b'dirstate only has a docket in v2 format'
618 b'dirstate only has a docket in v2 format'
619 )
619 )
620 self._docket = docketmod.DirstateDocket.parse(
620 self._docket = docketmod.DirstateDocket.parse(
621 self._readdirstatefile(), self._nodeconstants
621 self._readdirstatefile(), self._nodeconstants
622 )
622 )
623 return self._docket
623 return self._docket
624
624
625 @propertycache
625 @propertycache
626 def _rustmap(self):
626 def _rustmap(self):
627 """
627 """
628 Fills the Dirstatemap when called.
628 Fills the Dirstatemap when called.
629 """
629 """
630 # ignore HG_PENDING because identity is used only for writing
630 # ignore HG_PENDING because identity is used only for writing
631 self.identity = util.filestat.frompath(
631 self.identity = util.filestat.frompath(
632 self._opener.join(self._filename)
632 self._opener.join(self._filename)
633 )
633 )
634
634
635 if self._use_dirstate_v2:
635 if self._use_dirstate_v2:
636 if self.docket.uuid:
636 if self.docket.uuid:
637 # TODO: use mmap when possible
637 # TODO: use mmap when possible
638 data = self._opener.read(self.docket.data_filename())
638 data = self._opener.read(self.docket.data_filename())
639 else:
639 else:
640 data = b''
640 data = b''
641 self._rustmap = rustmod.DirstateMap.new_v2(
641 self._rustmap = rustmod.DirstateMap.new_v2(
642 data, self.docket.data_size, self.docket.tree_metadata
642 data, self.docket.data_size, self.docket.tree_metadata
643 )
643 )
644 parents = self.docket.parents
644 parents = self.docket.parents
645 else:
645 else:
646 self._rustmap, parents = rustmod.DirstateMap.new_v1(
646 self._rustmap, parents = rustmod.DirstateMap.new_v1(
647 self._use_dirstate_tree, self._readdirstatefile()
647 self._use_dirstate_tree, self._readdirstatefile()
648 )
648 )
649
649
650 if parents and not self._dirtyparents:
650 if parents and not self._dirtyparents:
651 self.setparents(*parents)
651 self.setparents(*parents)
652
652
653 self.__contains__ = self._rustmap.__contains__
653 self.__contains__ = self._rustmap.__contains__
654 self.__getitem__ = self._rustmap.__getitem__
654 self.__getitem__ = self._rustmap.__getitem__
655 self.get = self._rustmap.get
655 self.get = self._rustmap.get
656 return self._rustmap
656 return self._rustmap
657
657
658 def write(self, tr, st, now):
658 def write(self, tr, st, now):
659 if not self._use_dirstate_v2:
659 if not self._use_dirstate_v2:
660 p1, p2 = self.parents()
660 p1, p2 = self.parents()
661 packed = self._rustmap.write_v1(p1, p2, now)
661 packed = self._rustmap.write_v1(p1, p2, now)
662 st.write(packed)
662 st.write(packed)
663 st.close()
663 st.close()
664 self._dirtyparents = False
664 self._dirtyparents = False
665 return
665 return
666
666
667 # We can only append to an existing data file if there is one
667 # We can only append to an existing data file if there is one
668 can_append = self.docket.uuid is not None
668 can_append = self.docket.uuid is not None
669 packed, meta, append = self._rustmap.write_v2(now, can_append)
669 packed, meta, append = self._rustmap.write_v2(now, can_append)
670 if append:
670 if append:
671 docket = self.docket
671 docket = self.docket
672 data_filename = docket.data_filename()
672 data_filename = docket.data_filename()
673 if tr:
673 if tr:
674 tr.add(data_filename, docket.data_size)
674 tr.add(data_filename, docket.data_size)
675 with self._opener(data_filename, b'r+b') as fp:
675 with self._opener(data_filename, b'r+b') as fp:
676 fp.seek(docket.data_size)
676 fp.seek(docket.data_size)
677 assert fp.tell() == docket.data_size
677 assert fp.tell() == docket.data_size
678 written = fp.write(packed)
678 written = fp.write(packed)
679 if written is not None: # py2 may return None
679 if written is not None: # py2 may return None
680 assert written == len(packed), (written, len(packed))
680 assert written == len(packed), (written, len(packed))
681 docket.data_size += len(packed)
681 docket.data_size += len(packed)
682 docket.parents = self.parents()
682 docket.parents = self.parents()
683 docket.tree_metadata = meta
683 docket.tree_metadata = meta
684 st.write(docket.serialize())
684 st.write(docket.serialize())
685 st.close()
685 st.close()
686 else:
686 else:
687 old_docket = self.docket
687 old_docket = self.docket
688 new_docket = docketmod.DirstateDocket.with_new_uuid(
688 new_docket = docketmod.DirstateDocket.with_new_uuid(
689 self.parents(), len(packed), meta
689 self.parents(), len(packed), meta
690 )
690 )
691 data_filename = new_docket.data_filename()
691 data_filename = new_docket.data_filename()
692 if tr:
692 if tr:
693 tr.add(data_filename, 0)
693 tr.add(data_filename, 0)
694 self._opener.write(data_filename, packed)
694 self._opener.write(data_filename, packed)
695 # Write the new docket after the new data file has been
695 # Write the new docket after the new data file has been
696 # written. Because `st` was opened with `atomictemp=True`,
696 # written. Because `st` was opened with `atomictemp=True`,
697 # the actual `.hg/dirstate` file is only affected on close.
697 # the actual `.hg/dirstate` file is only affected on close.
698 st.write(new_docket.serialize())
698 st.write(new_docket.serialize())
699 st.close()
699 st.close()
700 # Remove the old data file after the new docket pointing to
700 # Remove the old data file after the new docket pointing to
701 # the new data file was written.
701 # the new data file was written.
702 if old_docket.uuid:
702 if old_docket.uuid:
703 data_filename = old_docket.data_filename()
703 data_filename = old_docket.data_filename()
704 unlink = lambda _tr=None: self._opener.unlink(data_filename)
704 unlink = lambda _tr=None: self._opener.unlink(data_filename)
705 if tr:
705 if tr:
706 category = b"dirstate-v2-clean-" + old_docket.uuid
706 category = b"dirstate-v2-clean-" + old_docket.uuid
707 tr.addpostclose(category, unlink)
707 tr.addpostclose(category, unlink)
708 else:
708 else:
709 unlink()
709 unlink()
710 self._docket = new_docket
710 self._docket = new_docket
711 # Reload from the newly-written file
711 # Reload from the newly-written file
712 util.clearcachedproperty(self, b"_rustmap")
712 util.clearcachedproperty(self, b"_rustmap")
713 self._dirtyparents = False
713 self._dirtyparents = False
714
714
715 @propertycache
715 @propertycache
716 def filefoldmap(self):
716 def filefoldmap(self):
717 """Returns a dictionary mapping normalized case paths to their
717 """Returns a dictionary mapping normalized case paths to their
718 non-normalized versions.
718 non-normalized versions.
719 """
719 """
720 return self._rustmap.filefoldmapasdict()
720 return self._rustmap.filefoldmapasdict()
721
721
722 def hastrackeddir(self, d):
722 def hastrackeddir(self, d):
723 return self._rustmap.hastrackeddir(d)
723 return self._rustmap.hastrackeddir(d)
724
724
725 def hasdir(self, d):
725 def hasdir(self, d):
726 return self._rustmap.hasdir(d)
726 return self._rustmap.hasdir(d)
727
727
728 @propertycache
728 @propertycache
729 def identity(self):
729 def identity(self):
730 self._rustmap
730 self._rustmap
731 return self.identity
731 return self.identity
732
732
733 @property
733 @property
734 def nonnormalset(self):
734 def nonnormalset(self):
735 nonnorm = self._rustmap.non_normal_entries()
735 nonnorm = self._rustmap.non_normal_entries()
736 return nonnorm
736 return nonnorm
737
737
738 @propertycache
738 @propertycache
739 def otherparentset(self):
739 def otherparentset(self):
740 otherparents = self._rustmap.other_parent_entries()
740 otherparents = self._rustmap.other_parent_entries()
741 return otherparents
741 return otherparents
742
742
743 def non_normal_or_other_parent_paths(self):
743 def non_normal_or_other_parent_paths(self):
744 return self._rustmap.non_normal_or_other_parent_paths()
744 return self._rustmap.non_normal_or_other_parent_paths()
745
745
746 @propertycache
746 @propertycache
747 def dirfoldmap(self):
747 def dirfoldmap(self):
748 f = {}
748 f = {}
749 normcase = util.normcase
749 normcase = util.normcase
750 for name in self._rustmap.tracked_dirs():
750 for name in self._rustmap.tracked_dirs():
751 f[normcase(name)] = name
751 f[normcase(name)] = name
752 return f
752 return f
@@ -1,613 +1,621 b''
1 # parsers.py - Python implementation of parsers.c
1 # parsers.py - Python implementation of parsers.c
2 #
2 #
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
3 # Copyright 2009 Olivia Mackall <olivia@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import struct
10 import struct
11 import zlib
11 import zlib
12
12
13 from ..node import (
13 from ..node import (
14 nullrev,
14 nullrev,
15 sha1nodeconstants,
15 sha1nodeconstants,
16 )
16 )
17 from ..thirdparty import attr
17 from ..thirdparty import attr
18 from .. import (
18 from .. import (
19 error,
19 error,
20 pycompat,
20 pycompat,
21 revlogutils,
21 revlogutils,
22 util,
22 util,
23 )
23 )
24
24
25 from ..revlogutils import nodemap as nodemaputil
25 from ..revlogutils import nodemap as nodemaputil
26 from ..revlogutils import constants as revlog_constants
26 from ..revlogutils import constants as revlog_constants
27
27
28 stringio = pycompat.bytesio
28 stringio = pycompat.bytesio
29
29
30
30
31 _pack = struct.pack
31 _pack = struct.pack
32 _unpack = struct.unpack
32 _unpack = struct.unpack
33 _compress = zlib.compress
33 _compress = zlib.compress
34 _decompress = zlib.decompress
34 _decompress = zlib.decompress
35
35
36
36
37 # a special value used internally for `size` if the file come from the other parent
37 # a special value used internally for `size` if the file come from the other parent
38 FROM_P2 = -2
38 FROM_P2 = -2
39
39
40 # a special value used internally for `size` if the file is modified/merged/added
40 # a special value used internally for `size` if the file is modified/merged/added
41 NONNORMAL = -1
41 NONNORMAL = -1
42
42
43 # a special value used internally for `time` if the time is ambigeous
43 # a special value used internally for `time` if the time is ambigeous
44 AMBIGUOUS_TIME = -1
44 AMBIGUOUS_TIME = -1
45
45
46
46
47 @attr.s(slots=True, init=False)
47 @attr.s(slots=True, init=False)
48 class DirstateItem(object):
48 class DirstateItem(object):
49 """represent a dirstate entry
49 """represent a dirstate entry
50
50
51 It contains:
51 It contains:
52
52
53 - state (one of 'n', 'a', 'r', 'm')
53 - state (one of 'n', 'a', 'r', 'm')
54 - mode,
54 - mode,
55 - size,
55 - size,
56 - mtime,
56 - mtime,
57 """
57 """
58
58
59 _state = attr.ib()
59 _state = attr.ib()
60 _mode = attr.ib()
60 _mode = attr.ib()
61 _size = attr.ib()
61 _size = attr.ib()
62 _mtime = attr.ib()
62 _mtime = attr.ib()
63
63
64 def __init__(self, state, mode, size, mtime):
64 def __init__(self, state, mode, size, mtime):
65 self._state = state
65 self._state = state
66 self._mode = mode
66 self._mode = mode
67 self._size = size
67 self._size = size
68 self._mtime = mtime
68 self._mtime = mtime
69
69
70 @classmethod
70 @classmethod
71 def from_v1_data(cls, state, mode, size, mtime):
71 def from_v1_data(cls, state, mode, size, mtime):
72 """Build a new DirstateItem object from V1 data
72 """Build a new DirstateItem object from V1 data
73
73
74 Since the dirstate-v1 format is frozen, the signature of this function
74 Since the dirstate-v1 format is frozen, the signature of this function
75 is not expected to change, unlike the __init__ one.
75 is not expected to change, unlike the __init__ one.
76 """
76 """
77 return cls(
77 return cls(
78 state=state,
78 state=state,
79 mode=mode,
79 mode=mode,
80 size=size,
80 size=size,
81 mtime=mtime,
81 mtime=mtime,
82 )
82 )
83
83
84 def set_possibly_dirty(self):
84 def set_possibly_dirty(self):
85 """Mark a file as "possibly dirty"
85 """Mark a file as "possibly dirty"
86
86
87 This means the next status call will have to actually check its content
87 This means the next status call will have to actually check its content
88 to make sure it is correct.
88 to make sure it is correct.
89 """
89 """
90 self._mtime = AMBIGUOUS_TIME
90 self._mtime = AMBIGUOUS_TIME
91
91
92 def __getitem__(self, idx):
92 def __getitem__(self, idx):
93 if idx == 0 or idx == -4:
93 if idx == 0 or idx == -4:
94 msg = b"do not use item[x], use item.state"
94 msg = b"do not use item[x], use item.state"
95 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
95 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
96 return self._state
96 return self._state
97 elif idx == 1 or idx == -3:
97 elif idx == 1 or idx == -3:
98 msg = b"do not use item[x], use item.mode"
98 msg = b"do not use item[x], use item.mode"
99 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
99 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
100 return self._mode
100 return self._mode
101 elif idx == 2 or idx == -2:
101 elif idx == 2 or idx == -2:
102 msg = b"do not use item[x], use item.size"
102 msg = b"do not use item[x], use item.size"
103 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
103 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
104 return self._size
104 return self._size
105 elif idx == 3 or idx == -1:
105 elif idx == 3 or idx == -1:
106 msg = b"do not use item[x], use item.mtime"
106 msg = b"do not use item[x], use item.mtime"
107 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
107 util.nouideprecwarn(msg, b'6.0', stacklevel=2)
108 return self._mtime
108 return self._mtime
109 else:
109 else:
110 raise IndexError(idx)
110 raise IndexError(idx)
111
111
112 @property
112 @property
113 def mode(self):
113 def mode(self):
114 return self._mode
114 return self._mode
115
115
116 @property
116 @property
117 def size(self):
117 def size(self):
118 return self._size
118 return self._size
119
119
120 @property
120 @property
121 def mtime(self):
121 def mtime(self):
122 return self._mtime
122 return self._mtime
123
123
124 @property
124 @property
125 def state(self):
125 def state(self):
126 """
126 """
127 States are:
127 States are:
128 n normal
128 n normal
129 m needs merging
129 m needs merging
130 r marked for removal
130 r marked for removal
131 a marked for addition
131 a marked for addition
132
132
133 XXX This "state" is a bit obscure and mostly a direct expression of the
133 XXX This "state" is a bit obscure and mostly a direct expression of the
134 dirstatev1 format. It would make sense to ultimately deprecate it in
134 dirstatev1 format. It would make sense to ultimately deprecate it in
135 favor of the more "semantic" attributes.
135 favor of the more "semantic" attributes.
136 """
136 """
137 return self._state
137 return self._state
138
138
139 @property
139 @property
140 def tracked(self):
140 def tracked(self):
141 """True is the file is tracked in the working copy"""
141 """True is the file is tracked in the working copy"""
142 return self._state in b"nma"
142 return self._state in b"nma"
143
143
144 @property
144 @property
145 def added(self):
145 def added(self):
146 """True if the file has been added"""
146 """True if the file has been added"""
147 return self._state == b'a'
147 return self._state == b'a'
148
148
149 @property
149 @property
150 def merged(self):
150 def merged(self):
151 """True if the file has been merged
151 """True if the file has been merged
152
152
153 Should only be set if a merge is in progress in the dirstate
153 Should only be set if a merge is in progress in the dirstate
154 """
154 """
155 return self._state == b'm'
155 return self._state == b'm'
156
156
157 @property
157 @property
158 def from_p2(self):
158 def from_p2(self):
159 """True if the file have been fetched from p2 during the current merge
159 """True if the file have been fetched from p2 during the current merge
160
160
161 This is only True is the file is currently tracked.
161 This is only True is the file is currently tracked.
162
162
163 Should only be set if a merge is in progress in the dirstate
163 Should only be set if a merge is in progress in the dirstate
164 """
164 """
165 return self._state == b'n' and self._size == FROM_P2
165 return self._state == b'n' and self._size == FROM_P2
166
166
167 @property
167 @property
168 def from_p2_removed(self):
168 def from_p2_removed(self):
169 """True if the file has been removed, but was "from_p2" initially
169 """True if the file has been removed, but was "from_p2" initially
170
170
171 This property seems like an abstraction leakage and should probably be
171 This property seems like an abstraction leakage and should probably be
172 dealt in this class (or maybe the dirstatemap) directly.
172 dealt in this class (or maybe the dirstatemap) directly.
173 """
173 """
174 return self._state == b'r' and self._size == FROM_P2
174 return self._state == b'r' and self._size == FROM_P2
175
175
176 @property
176 @property
177 def removed(self):
177 def removed(self):
178 """True if the file has been removed"""
178 """True if the file has been removed"""
179 return self._state == b'r'
179 return self._state == b'r'
180
180
181 @property
181 @property
182 def merged_removed(self):
182 def merged_removed(self):
183 """True if the file has been removed, but was "merged" initially
183 """True if the file has been removed, but was "merged" initially
184
184
185 This property seems like an abstraction leakage and should probably be
185 This property seems like an abstraction leakage and should probably be
186 dealt in this class (or maybe the dirstatemap) directly.
186 dealt in this class (or maybe the dirstatemap) directly.
187 """
187 """
188 return self._state == b'r' and self._size == NONNORMAL
188 return self._state == b'r' and self._size == NONNORMAL
189
189
190 @property
190 @property
191 def dm_nonnormal(self):
191 def dm_nonnormal(self):
192 """True is the entry is non-normal in the dirstatemap sense
192 """True is the entry is non-normal in the dirstatemap sense
193
193
194 There is no reason for any code, but the dirstatemap one to use this.
194 There is no reason for any code, but the dirstatemap one to use this.
195 """
195 """
196 return self.state != b'n' or self.mtime == AMBIGUOUS_TIME
196 return self.state != b'n' or self.mtime == AMBIGUOUS_TIME
197
197
198 @property
199 def dm_otherparent(self):
200 """True is the entry is `otherparent` in the dirstatemap sense
201
202 There is no reason for any code, but the dirstatemap one to use this.
203 """
204 return self._size == FROM_P2
205
198 def v1_state(self):
206 def v1_state(self):
199 """return a "state" suitable for v1 serialization"""
207 """return a "state" suitable for v1 serialization"""
200 return self._state
208 return self._state
201
209
202 def v1_mode(self):
210 def v1_mode(self):
203 """return a "mode" suitable for v1 serialization"""
211 """return a "mode" suitable for v1 serialization"""
204 return self._mode
212 return self._mode
205
213
206 def v1_size(self):
214 def v1_size(self):
207 """return a "size" suitable for v1 serialization"""
215 """return a "size" suitable for v1 serialization"""
208 return self._size
216 return self._size
209
217
210 def v1_mtime(self):
218 def v1_mtime(self):
211 """return a "mtime" suitable for v1 serialization"""
219 """return a "mtime" suitable for v1 serialization"""
212 return self._mtime
220 return self._mtime
213
221
214 def need_delay(self, now):
222 def need_delay(self, now):
215 """True if the stored mtime would be ambiguous with the current time"""
223 """True if the stored mtime would be ambiguous with the current time"""
216 return self._state == b'n' and self._mtime == now
224 return self._state == b'n' and self._mtime == now
217
225
218
226
219 def gettype(q):
227 def gettype(q):
220 return int(q & 0xFFFF)
228 return int(q & 0xFFFF)
221
229
222
230
223 class BaseIndexObject(object):
231 class BaseIndexObject(object):
224 # Can I be passed to an algorithme implemented in Rust ?
232 # Can I be passed to an algorithme implemented in Rust ?
225 rust_ext_compat = 0
233 rust_ext_compat = 0
226 # Format of an index entry according to Python's `struct` language
234 # Format of an index entry according to Python's `struct` language
227 index_format = revlog_constants.INDEX_ENTRY_V1
235 index_format = revlog_constants.INDEX_ENTRY_V1
228 # Size of a C unsigned long long int, platform independent
236 # Size of a C unsigned long long int, platform independent
229 big_int_size = struct.calcsize(b'>Q')
237 big_int_size = struct.calcsize(b'>Q')
230 # Size of a C long int, platform independent
238 # Size of a C long int, platform independent
231 int_size = struct.calcsize(b'>i')
239 int_size = struct.calcsize(b'>i')
232 # An empty index entry, used as a default value to be overridden, or nullrev
240 # An empty index entry, used as a default value to be overridden, or nullrev
233 null_item = (
241 null_item = (
234 0,
242 0,
235 0,
243 0,
236 0,
244 0,
237 -1,
245 -1,
238 -1,
246 -1,
239 -1,
247 -1,
240 -1,
248 -1,
241 sha1nodeconstants.nullid,
249 sha1nodeconstants.nullid,
242 0,
250 0,
243 0,
251 0,
244 revlog_constants.COMP_MODE_INLINE,
252 revlog_constants.COMP_MODE_INLINE,
245 revlog_constants.COMP_MODE_INLINE,
253 revlog_constants.COMP_MODE_INLINE,
246 )
254 )
247
255
248 @util.propertycache
256 @util.propertycache
249 def entry_size(self):
257 def entry_size(self):
250 return self.index_format.size
258 return self.index_format.size
251
259
252 @property
260 @property
253 def nodemap(self):
261 def nodemap(self):
254 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
262 msg = b"index.nodemap is deprecated, use index.[has_node|rev|get_rev]"
255 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
263 util.nouideprecwarn(msg, b'5.3', stacklevel=2)
256 return self._nodemap
264 return self._nodemap
257
265
258 @util.propertycache
266 @util.propertycache
259 def _nodemap(self):
267 def _nodemap(self):
260 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
268 nodemap = nodemaputil.NodeMap({sha1nodeconstants.nullid: nullrev})
261 for r in range(0, len(self)):
269 for r in range(0, len(self)):
262 n = self[r][7]
270 n = self[r][7]
263 nodemap[n] = r
271 nodemap[n] = r
264 return nodemap
272 return nodemap
265
273
266 def has_node(self, node):
274 def has_node(self, node):
267 """return True if the node exist in the index"""
275 """return True if the node exist in the index"""
268 return node in self._nodemap
276 return node in self._nodemap
269
277
270 def rev(self, node):
278 def rev(self, node):
271 """return a revision for a node
279 """return a revision for a node
272
280
273 If the node is unknown, raise a RevlogError"""
281 If the node is unknown, raise a RevlogError"""
274 return self._nodemap[node]
282 return self._nodemap[node]
275
283
276 def get_rev(self, node):
284 def get_rev(self, node):
277 """return a revision for a node
285 """return a revision for a node
278
286
279 If the node is unknown, return None"""
287 If the node is unknown, return None"""
280 return self._nodemap.get(node)
288 return self._nodemap.get(node)
281
289
282 def _stripnodes(self, start):
290 def _stripnodes(self, start):
283 if '_nodemap' in vars(self):
291 if '_nodemap' in vars(self):
284 for r in range(start, len(self)):
292 for r in range(start, len(self)):
285 n = self[r][7]
293 n = self[r][7]
286 del self._nodemap[n]
294 del self._nodemap[n]
287
295
288 def clearcaches(self):
296 def clearcaches(self):
289 self.__dict__.pop('_nodemap', None)
297 self.__dict__.pop('_nodemap', None)
290
298
291 def __len__(self):
299 def __len__(self):
292 return self._lgt + len(self._extra)
300 return self._lgt + len(self._extra)
293
301
294 def append(self, tup):
302 def append(self, tup):
295 if '_nodemap' in vars(self):
303 if '_nodemap' in vars(self):
296 self._nodemap[tup[7]] = len(self)
304 self._nodemap[tup[7]] = len(self)
297 data = self._pack_entry(len(self), tup)
305 data = self._pack_entry(len(self), tup)
298 self._extra.append(data)
306 self._extra.append(data)
299
307
300 def _pack_entry(self, rev, entry):
308 def _pack_entry(self, rev, entry):
301 assert entry[8] == 0
309 assert entry[8] == 0
302 assert entry[9] == 0
310 assert entry[9] == 0
303 return self.index_format.pack(*entry[:8])
311 return self.index_format.pack(*entry[:8])
304
312
305 def _check_index(self, i):
313 def _check_index(self, i):
306 if not isinstance(i, int):
314 if not isinstance(i, int):
307 raise TypeError(b"expecting int indexes")
315 raise TypeError(b"expecting int indexes")
308 if i < 0 or i >= len(self):
316 if i < 0 or i >= len(self):
309 raise IndexError
317 raise IndexError
310
318
311 def __getitem__(self, i):
319 def __getitem__(self, i):
312 if i == -1:
320 if i == -1:
313 return self.null_item
321 return self.null_item
314 self._check_index(i)
322 self._check_index(i)
315 if i >= self._lgt:
323 if i >= self._lgt:
316 data = self._extra[i - self._lgt]
324 data = self._extra[i - self._lgt]
317 else:
325 else:
318 index = self._calculate_index(i)
326 index = self._calculate_index(i)
319 data = self._data[index : index + self.entry_size]
327 data = self._data[index : index + self.entry_size]
320 r = self._unpack_entry(i, data)
328 r = self._unpack_entry(i, data)
321 if self._lgt and i == 0:
329 if self._lgt and i == 0:
322 offset = revlogutils.offset_type(0, gettype(r[0]))
330 offset = revlogutils.offset_type(0, gettype(r[0]))
323 r = (offset,) + r[1:]
331 r = (offset,) + r[1:]
324 return r
332 return r
325
333
326 def _unpack_entry(self, rev, data):
334 def _unpack_entry(self, rev, data):
327 r = self.index_format.unpack(data)
335 r = self.index_format.unpack(data)
328 r = r + (
336 r = r + (
329 0,
337 0,
330 0,
338 0,
331 revlog_constants.COMP_MODE_INLINE,
339 revlog_constants.COMP_MODE_INLINE,
332 revlog_constants.COMP_MODE_INLINE,
340 revlog_constants.COMP_MODE_INLINE,
333 )
341 )
334 return r
342 return r
335
343
336 def pack_header(self, header):
344 def pack_header(self, header):
337 """pack header information as binary"""
345 """pack header information as binary"""
338 v_fmt = revlog_constants.INDEX_HEADER
346 v_fmt = revlog_constants.INDEX_HEADER
339 return v_fmt.pack(header)
347 return v_fmt.pack(header)
340
348
341 def entry_binary(self, rev):
349 def entry_binary(self, rev):
342 """return the raw binary string representing a revision"""
350 """return the raw binary string representing a revision"""
343 entry = self[rev]
351 entry = self[rev]
344 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
352 p = revlog_constants.INDEX_ENTRY_V1.pack(*entry[:8])
345 if rev == 0:
353 if rev == 0:
346 p = p[revlog_constants.INDEX_HEADER.size :]
354 p = p[revlog_constants.INDEX_HEADER.size :]
347 return p
355 return p
348
356
349
357
350 class IndexObject(BaseIndexObject):
358 class IndexObject(BaseIndexObject):
351 def __init__(self, data):
359 def __init__(self, data):
352 assert len(data) % self.entry_size == 0, (
360 assert len(data) % self.entry_size == 0, (
353 len(data),
361 len(data),
354 self.entry_size,
362 self.entry_size,
355 len(data) % self.entry_size,
363 len(data) % self.entry_size,
356 )
364 )
357 self._data = data
365 self._data = data
358 self._lgt = len(data) // self.entry_size
366 self._lgt = len(data) // self.entry_size
359 self._extra = []
367 self._extra = []
360
368
361 def _calculate_index(self, i):
369 def _calculate_index(self, i):
362 return i * self.entry_size
370 return i * self.entry_size
363
371
364 def __delitem__(self, i):
372 def __delitem__(self, i):
365 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
373 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
366 raise ValueError(b"deleting slices only supports a:-1 with step 1")
374 raise ValueError(b"deleting slices only supports a:-1 with step 1")
367 i = i.start
375 i = i.start
368 self._check_index(i)
376 self._check_index(i)
369 self._stripnodes(i)
377 self._stripnodes(i)
370 if i < self._lgt:
378 if i < self._lgt:
371 self._data = self._data[: i * self.entry_size]
379 self._data = self._data[: i * self.entry_size]
372 self._lgt = i
380 self._lgt = i
373 self._extra = []
381 self._extra = []
374 else:
382 else:
375 self._extra = self._extra[: i - self._lgt]
383 self._extra = self._extra[: i - self._lgt]
376
384
377
385
378 class PersistentNodeMapIndexObject(IndexObject):
386 class PersistentNodeMapIndexObject(IndexObject):
379 """a Debug oriented class to test persistent nodemap
387 """a Debug oriented class to test persistent nodemap
380
388
381 We need a simple python object to test API and higher level behavior. See
389 We need a simple python object to test API and higher level behavior. See
382 the Rust implementation for more serious usage. This should be used only
390 the Rust implementation for more serious usage. This should be used only
383 through the dedicated `devel.persistent-nodemap` config.
391 through the dedicated `devel.persistent-nodemap` config.
384 """
392 """
385
393
386 def nodemap_data_all(self):
394 def nodemap_data_all(self):
387 """Return bytes containing a full serialization of a nodemap
395 """Return bytes containing a full serialization of a nodemap
388
396
389 The nodemap should be valid for the full set of revisions in the
397 The nodemap should be valid for the full set of revisions in the
390 index."""
398 index."""
391 return nodemaputil.persistent_data(self)
399 return nodemaputil.persistent_data(self)
392
400
393 def nodemap_data_incremental(self):
401 def nodemap_data_incremental(self):
394 """Return bytes containing a incremental update to persistent nodemap
402 """Return bytes containing a incremental update to persistent nodemap
395
403
396 This containst the data for an append-only update of the data provided
404 This containst the data for an append-only update of the data provided
397 in the last call to `update_nodemap_data`.
405 in the last call to `update_nodemap_data`.
398 """
406 """
399 if self._nm_root is None:
407 if self._nm_root is None:
400 return None
408 return None
401 docket = self._nm_docket
409 docket = self._nm_docket
402 changed, data = nodemaputil.update_persistent_data(
410 changed, data = nodemaputil.update_persistent_data(
403 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
411 self, self._nm_root, self._nm_max_idx, self._nm_docket.tip_rev
404 )
412 )
405
413
406 self._nm_root = self._nm_max_idx = self._nm_docket = None
414 self._nm_root = self._nm_max_idx = self._nm_docket = None
407 return docket, changed, data
415 return docket, changed, data
408
416
409 def update_nodemap_data(self, docket, nm_data):
417 def update_nodemap_data(self, docket, nm_data):
410 """provide full block of persisted binary data for a nodemap
418 """provide full block of persisted binary data for a nodemap
411
419
412 The data are expected to come from disk. See `nodemap_data_all` for a
420 The data are expected to come from disk. See `nodemap_data_all` for a
413 produceur of such data."""
421 produceur of such data."""
414 if nm_data is not None:
422 if nm_data is not None:
415 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
423 self._nm_root, self._nm_max_idx = nodemaputil.parse_data(nm_data)
416 if self._nm_root:
424 if self._nm_root:
417 self._nm_docket = docket
425 self._nm_docket = docket
418 else:
426 else:
419 self._nm_root = self._nm_max_idx = self._nm_docket = None
427 self._nm_root = self._nm_max_idx = self._nm_docket = None
420
428
421
429
422 class InlinedIndexObject(BaseIndexObject):
430 class InlinedIndexObject(BaseIndexObject):
423 def __init__(self, data, inline=0):
431 def __init__(self, data, inline=0):
424 self._data = data
432 self._data = data
425 self._lgt = self._inline_scan(None)
433 self._lgt = self._inline_scan(None)
426 self._inline_scan(self._lgt)
434 self._inline_scan(self._lgt)
427 self._extra = []
435 self._extra = []
428
436
429 def _inline_scan(self, lgt):
437 def _inline_scan(self, lgt):
430 off = 0
438 off = 0
431 if lgt is not None:
439 if lgt is not None:
432 self._offsets = [0] * lgt
440 self._offsets = [0] * lgt
433 count = 0
441 count = 0
434 while off <= len(self._data) - self.entry_size:
442 while off <= len(self._data) - self.entry_size:
435 start = off + self.big_int_size
443 start = off + self.big_int_size
436 (s,) = struct.unpack(
444 (s,) = struct.unpack(
437 b'>i',
445 b'>i',
438 self._data[start : start + self.int_size],
446 self._data[start : start + self.int_size],
439 )
447 )
440 if lgt is not None:
448 if lgt is not None:
441 self._offsets[count] = off
449 self._offsets[count] = off
442 count += 1
450 count += 1
443 off += self.entry_size + s
451 off += self.entry_size + s
444 if off != len(self._data):
452 if off != len(self._data):
445 raise ValueError(b"corrupted data")
453 raise ValueError(b"corrupted data")
446 return count
454 return count
447
455
448 def __delitem__(self, i):
456 def __delitem__(self, i):
449 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
457 if not isinstance(i, slice) or not i.stop == -1 or i.step is not None:
450 raise ValueError(b"deleting slices only supports a:-1 with step 1")
458 raise ValueError(b"deleting slices only supports a:-1 with step 1")
451 i = i.start
459 i = i.start
452 self._check_index(i)
460 self._check_index(i)
453 self._stripnodes(i)
461 self._stripnodes(i)
454 if i < self._lgt:
462 if i < self._lgt:
455 self._offsets = self._offsets[:i]
463 self._offsets = self._offsets[:i]
456 self._lgt = i
464 self._lgt = i
457 self._extra = []
465 self._extra = []
458 else:
466 else:
459 self._extra = self._extra[: i - self._lgt]
467 self._extra = self._extra[: i - self._lgt]
460
468
461 def _calculate_index(self, i):
469 def _calculate_index(self, i):
462 return self._offsets[i]
470 return self._offsets[i]
463
471
464
472
465 def parse_index2(data, inline, revlogv2=False):
473 def parse_index2(data, inline, revlogv2=False):
466 if not inline:
474 if not inline:
467 cls = IndexObject2 if revlogv2 else IndexObject
475 cls = IndexObject2 if revlogv2 else IndexObject
468 return cls(data), None
476 return cls(data), None
469 cls = InlinedIndexObject
477 cls = InlinedIndexObject
470 return cls(data, inline), (0, data)
478 return cls(data, inline), (0, data)
471
479
472
480
473 def parse_index_cl_v2(data):
481 def parse_index_cl_v2(data):
474 return IndexChangelogV2(data), None
482 return IndexChangelogV2(data), None
475
483
476
484
477 class IndexObject2(IndexObject):
485 class IndexObject2(IndexObject):
478 index_format = revlog_constants.INDEX_ENTRY_V2
486 index_format = revlog_constants.INDEX_ENTRY_V2
479
487
480 def replace_sidedata_info(
488 def replace_sidedata_info(
481 self,
489 self,
482 rev,
490 rev,
483 sidedata_offset,
491 sidedata_offset,
484 sidedata_length,
492 sidedata_length,
485 offset_flags,
493 offset_flags,
486 compression_mode,
494 compression_mode,
487 ):
495 ):
488 """
496 """
489 Replace an existing index entry's sidedata offset and length with new
497 Replace an existing index entry's sidedata offset and length with new
490 ones.
498 ones.
491 This cannot be used outside of the context of sidedata rewriting,
499 This cannot be used outside of the context of sidedata rewriting,
492 inside the transaction that creates the revision `rev`.
500 inside the transaction that creates the revision `rev`.
493 """
501 """
494 if rev < 0:
502 if rev < 0:
495 raise KeyError
503 raise KeyError
496 self._check_index(rev)
504 self._check_index(rev)
497 if rev < self._lgt:
505 if rev < self._lgt:
498 msg = b"cannot rewrite entries outside of this transaction"
506 msg = b"cannot rewrite entries outside of this transaction"
499 raise KeyError(msg)
507 raise KeyError(msg)
500 else:
508 else:
501 entry = list(self[rev])
509 entry = list(self[rev])
502 entry[0] = offset_flags
510 entry[0] = offset_flags
503 entry[8] = sidedata_offset
511 entry[8] = sidedata_offset
504 entry[9] = sidedata_length
512 entry[9] = sidedata_length
505 entry[11] = compression_mode
513 entry[11] = compression_mode
506 entry = tuple(entry)
514 entry = tuple(entry)
507 new = self._pack_entry(rev, entry)
515 new = self._pack_entry(rev, entry)
508 self._extra[rev - self._lgt] = new
516 self._extra[rev - self._lgt] = new
509
517
510 def _unpack_entry(self, rev, data):
518 def _unpack_entry(self, rev, data):
511 data = self.index_format.unpack(data)
519 data = self.index_format.unpack(data)
512 entry = data[:10]
520 entry = data[:10]
513 data_comp = data[10] & 3
521 data_comp = data[10] & 3
514 sidedata_comp = (data[10] & (3 << 2)) >> 2
522 sidedata_comp = (data[10] & (3 << 2)) >> 2
515 return entry + (data_comp, sidedata_comp)
523 return entry + (data_comp, sidedata_comp)
516
524
517 def _pack_entry(self, rev, entry):
525 def _pack_entry(self, rev, entry):
518 data = entry[:10]
526 data = entry[:10]
519 data_comp = entry[10] & 3
527 data_comp = entry[10] & 3
520 sidedata_comp = (entry[11] & 3) << 2
528 sidedata_comp = (entry[11] & 3) << 2
521 data += (data_comp | sidedata_comp,)
529 data += (data_comp | sidedata_comp,)
522
530
523 return self.index_format.pack(*data)
531 return self.index_format.pack(*data)
524
532
525 def entry_binary(self, rev):
533 def entry_binary(self, rev):
526 """return the raw binary string representing a revision"""
534 """return the raw binary string representing a revision"""
527 entry = self[rev]
535 entry = self[rev]
528 return self._pack_entry(rev, entry)
536 return self._pack_entry(rev, entry)
529
537
530 def pack_header(self, header):
538 def pack_header(self, header):
531 """pack header information as binary"""
539 """pack header information as binary"""
532 msg = 'version header should go in the docket, not the index: %d'
540 msg = 'version header should go in the docket, not the index: %d'
533 msg %= header
541 msg %= header
534 raise error.ProgrammingError(msg)
542 raise error.ProgrammingError(msg)
535
543
536
544
537 class IndexChangelogV2(IndexObject2):
545 class IndexChangelogV2(IndexObject2):
538 index_format = revlog_constants.INDEX_ENTRY_CL_V2
546 index_format = revlog_constants.INDEX_ENTRY_CL_V2
539
547
540 def _unpack_entry(self, rev, data, r=True):
548 def _unpack_entry(self, rev, data, r=True):
541 items = self.index_format.unpack(data)
549 items = self.index_format.unpack(data)
542 entry = items[:3] + (rev, rev) + items[3:8]
550 entry = items[:3] + (rev, rev) + items[3:8]
543 data_comp = items[8] & 3
551 data_comp = items[8] & 3
544 sidedata_comp = (items[8] >> 2) & 3
552 sidedata_comp = (items[8] >> 2) & 3
545 return entry + (data_comp, sidedata_comp)
553 return entry + (data_comp, sidedata_comp)
546
554
547 def _pack_entry(self, rev, entry):
555 def _pack_entry(self, rev, entry):
548 assert entry[3] == rev, entry[3]
556 assert entry[3] == rev, entry[3]
549 assert entry[4] == rev, entry[4]
557 assert entry[4] == rev, entry[4]
550 data = entry[:3] + entry[5:10]
558 data = entry[:3] + entry[5:10]
551 data_comp = entry[10] & 3
559 data_comp = entry[10] & 3
552 sidedata_comp = (entry[11] & 3) << 2
560 sidedata_comp = (entry[11] & 3) << 2
553 data += (data_comp | sidedata_comp,)
561 data += (data_comp | sidedata_comp,)
554 return self.index_format.pack(*data)
562 return self.index_format.pack(*data)
555
563
556
564
557 def parse_index_devel_nodemap(data, inline):
565 def parse_index_devel_nodemap(data, inline):
558 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
566 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
559 return PersistentNodeMapIndexObject(data), None
567 return PersistentNodeMapIndexObject(data), None
560
568
561
569
562 def parse_dirstate(dmap, copymap, st):
570 def parse_dirstate(dmap, copymap, st):
563 parents = [st[:20], st[20:40]]
571 parents = [st[:20], st[20:40]]
564 # dereference fields so they will be local in loop
572 # dereference fields so they will be local in loop
565 format = b">cllll"
573 format = b">cllll"
566 e_size = struct.calcsize(format)
574 e_size = struct.calcsize(format)
567 pos1 = 40
575 pos1 = 40
568 l = len(st)
576 l = len(st)
569
577
570 # the inner loop
578 # the inner loop
571 while pos1 < l:
579 while pos1 < l:
572 pos2 = pos1 + e_size
580 pos2 = pos1 + e_size
573 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
581 e = _unpack(b">cllll", st[pos1:pos2]) # a literal here is faster
574 pos1 = pos2 + e[4]
582 pos1 = pos2 + e[4]
575 f = st[pos2:pos1]
583 f = st[pos2:pos1]
576 if b'\0' in f:
584 if b'\0' in f:
577 f, c = f.split(b'\0')
585 f, c = f.split(b'\0')
578 copymap[f] = c
586 copymap[f] = c
579 dmap[f] = DirstateItem.from_v1_data(*e[:4])
587 dmap[f] = DirstateItem.from_v1_data(*e[:4])
580 return parents
588 return parents
581
589
582
590
583 def pack_dirstate(dmap, copymap, pl, now):
591 def pack_dirstate(dmap, copymap, pl, now):
584 now = int(now)
592 now = int(now)
585 cs = stringio()
593 cs = stringio()
586 write = cs.write
594 write = cs.write
587 write(b"".join(pl))
595 write(b"".join(pl))
588 for f, e in pycompat.iteritems(dmap):
596 for f, e in pycompat.iteritems(dmap):
589 if e.need_delay(now):
597 if e.need_delay(now):
590 # The file was last modified "simultaneously" with the current
598 # The file was last modified "simultaneously" with the current
591 # write to dirstate (i.e. within the same second for file-
599 # write to dirstate (i.e. within the same second for file-
592 # systems with a granularity of 1 sec). This commonly happens
600 # systems with a granularity of 1 sec). This commonly happens
593 # for at least a couple of files on 'update'.
601 # for at least a couple of files on 'update'.
594 # The user could change the file without changing its size
602 # The user could change the file without changing its size
595 # within the same second. Invalidate the file's mtime in
603 # within the same second. Invalidate the file's mtime in
596 # dirstate, forcing future 'status' calls to compare the
604 # dirstate, forcing future 'status' calls to compare the
597 # contents of the file if the size is the same. This prevents
605 # contents of the file if the size is the same. This prevents
598 # mistakenly treating such files as clean.
606 # mistakenly treating such files as clean.
599 e.set_possibly_dirty()
607 e.set_possibly_dirty()
600
608
601 if f in copymap:
609 if f in copymap:
602 f = b"%s\0%s" % (f, copymap[f])
610 f = b"%s\0%s" % (f, copymap[f])
603 e = _pack(
611 e = _pack(
604 b">cllll",
612 b">cllll",
605 e.v1_state(),
613 e.v1_state(),
606 e.v1_mode(),
614 e.v1_mode(),
607 e.v1_size(),
615 e.v1_size(),
608 e.v1_mtime(),
616 e.v1_mtime(),
609 len(f),
617 len(f),
610 )
618 )
611 write(e)
619 write(e)
612 write(f)
620 write(f)
613 return cs.getvalue()
621 return cs.getvalue()
General Comments 0
You need to be logged in to leave comments. Login now