##// END OF EJS Templates
revlog: replace PyInt_AS_LONG with a more portable helper function...
Augie Fackler -
r40634:fa331960 default
parent child Browse files
Show More
@@ -1,2494 +1,2517
1 1 /*
2 2 parsers.c - efficient content parsing
3 3
4 4 Copyright 2008 Matt Mackall <mpm@selenic.com> and others
5 5
6 6 This software may be used and distributed according to the terms of
7 7 the GNU General Public License, incorporated herein by reference.
8 8 */
9 9
10 10 #include <Python.h>
11 11 #include <assert.h>
12 12 #include <ctype.h>
13 13 #include <stddef.h>
14 14 #include <string.h>
15 15
16 16 #include "bitmanipulation.h"
17 17 #include "charencode.h"
18 18 #include "util.h"
19 19
20 20 #ifdef IS_PY3K
21 21 /* The mapping of Python types is meant to be temporary to get Python
22 22 * 3 to compile. We should remove this once Python 3 support is fully
23 23 * supported and proper types are used in the extensions themselves. */
24 24 #define PyInt_Check PyLong_Check
25 25 #define PyInt_FromLong PyLong_FromLong
26 26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AS_LONG PyLong_AS_LONG
28 27 #define PyInt_AsLong PyLong_AsLong
29 28 #endif
30 29
31 30 typedef struct indexObjectStruct indexObject;
32 31
33 32 typedef struct {
34 33 int children[16];
35 34 } nodetreenode;
36 35
37 36 /*
38 37 * A base-16 trie for fast node->rev mapping.
39 38 *
40 39 * Positive value is index of the next node in the trie
41 40 * Negative value is a leaf: -(rev + 2)
42 41 * Zero is empty
43 42 */
44 43 typedef struct {
45 44 indexObject *index;
46 45 nodetreenode *nodes;
47 46 unsigned length; /* # nodes in use */
48 47 unsigned capacity; /* # nodes allocated */
49 48 int depth; /* maximum depth of tree */
50 49 int splits; /* # splits performed */
51 50 } nodetree;
52 51
53 52 typedef struct {
54 53 PyObject_HEAD /* ; */
55 54 nodetree nt;
56 55 } nodetreeObject;
57 56
58 57 /*
59 58 * This class has two behaviors.
60 59 *
61 60 * When used in a list-like way (with integer keys), we decode an
62 61 * entry in a RevlogNG index file on demand. Our last entry is a
63 62 * sentinel, always a nullid. We have limited support for
64 63 * integer-keyed insert and delete, only at elements right before the
65 64 * sentinel.
66 65 *
67 66 * With string keys, we lazily perform a reverse mapping from node to
68 67 * rev, using a base-16 trie.
69 68 */
70 69 struct indexObjectStruct {
71 70 PyObject_HEAD
72 71 /* Type-specific fields go here. */
73 72 PyObject *data; /* raw bytes of index */
74 73 Py_buffer buf; /* buffer of data */
75 74 PyObject **cache; /* cached tuples */
76 75 const char **offsets; /* populated on demand */
77 76 Py_ssize_t raw_length; /* original number of elements */
78 77 Py_ssize_t length; /* current number of elements */
79 78 PyObject *added; /* populated on demand */
80 79 PyObject *headrevs; /* cache, invalidated on changes */
81 80 PyObject *filteredrevs; /* filtered revs set */
82 81 nodetree nt; /* base-16 trie */
83 82 int ntinitialized; /* 0 or 1 */
84 83 int ntrev; /* last rev scanned */
85 84 int ntlookups; /* # lookups */
86 85 int ntmisses; /* # lookups that miss the cache */
87 86 int inlined;
88 87 };
89 88
90 89 static Py_ssize_t index_length(const indexObject *self)
91 90 {
92 91 if (self->added == NULL)
93 92 return self->length;
94 93 return self->length + PyList_GET_SIZE(self->added);
95 94 }
96 95
97 96 static PyObject *nullentry = NULL;
98 97 static const char nullid[20] = {0};
99 98
100 99 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
101 100
102 101 #if LONG_MAX == 0x7fffffffL
103 102 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
104 103 #else
105 104 static const char *const tuple_format = PY23("kiiiiiis#", "kiiiiiiy#");
106 105 #endif
107 106
108 107 /* A RevlogNG v1 index entry is 64 bytes long. */
109 108 static const long v1_hdrsize = 64;
110 109
111 110 static void raise_revlog_error(void)
112 111 {
113 112 PyObject *mod = NULL, *dict = NULL, *errclass = NULL;
114 113
115 114 mod = PyImport_ImportModule("mercurial.error");
116 115 if (mod == NULL) {
117 116 goto cleanup;
118 117 }
119 118
120 119 dict = PyModule_GetDict(mod);
121 120 if (dict == NULL) {
122 121 goto cleanup;
123 122 }
124 123 Py_INCREF(dict);
125 124
126 125 errclass = PyDict_GetItemString(dict, "RevlogError");
127 126 if (errclass == NULL) {
128 127 PyErr_SetString(PyExc_SystemError,
129 128 "could not find RevlogError");
130 129 goto cleanup;
131 130 }
132 131
133 132 /* value of exception is ignored by callers */
134 133 PyErr_SetString(errclass, "RevlogError");
135 134
136 135 cleanup:
137 136 Py_XDECREF(dict);
138 137 Py_XDECREF(mod);
139 138 }
140 139
141 140 /*
142 141 * Return a pointer to the beginning of a RevlogNG record.
143 142 */
144 143 static const char *index_deref(indexObject *self, Py_ssize_t pos)
145 144 {
146 145 if (self->inlined && pos > 0) {
147 146 if (self->offsets == NULL) {
148 147 self->offsets = PyMem_Malloc(self->raw_length *
149 148 sizeof(*self->offsets));
150 149 if (self->offsets == NULL)
151 150 return (const char *)PyErr_NoMemory();
152 151 inline_scan(self, self->offsets);
153 152 }
154 153 return self->offsets[pos];
155 154 }
156 155
157 156 return (const char *)(self->buf.buf) + pos * v1_hdrsize;
158 157 }
159 158
160 159 static inline int index_get_parents(indexObject *self, Py_ssize_t rev, int *ps,
161 160 int maxrev)
162 161 {
163 162 if (rev >= self->length) {
163 long tmp;
164 164 PyObject *tuple =
165 165 PyList_GET_ITEM(self->added, rev - self->length);
166 ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5));
167 ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6));
166 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 5), &tmp)) {
167 return -1;
168 }
169 ps[0] = (int)tmp;
170 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 6), &tmp)) {
171 return -1;
172 }
173 ps[1] = (int)tmp;
168 174 } else {
169 175 const char *data = index_deref(self, rev);
170 176 ps[0] = getbe32(data + 24);
171 177 ps[1] = getbe32(data + 28);
172 178 }
173 179 /* If index file is corrupted, ps[] may point to invalid revisions. So
174 180 * there is a risk of buffer overflow to trust them unconditionally. */
175 181 if (ps[0] > maxrev || ps[1] > maxrev) {
176 182 PyErr_SetString(PyExc_ValueError, "parent out of range");
177 183 return -1;
178 184 }
179 185 return 0;
180 186 }
181 187
182 188 /*
183 189 * RevlogNG format (all in big endian, data may be inlined):
184 190 * 6 bytes: offset
185 191 * 2 bytes: flags
186 192 * 4 bytes: compressed length
187 193 * 4 bytes: uncompressed length
188 194 * 4 bytes: base revision
189 195 * 4 bytes: link revision
190 196 * 4 bytes: parent 1 revision
191 197 * 4 bytes: parent 2 revision
192 198 * 32 bytes: nodeid (only 20 bytes used)
193 199 */
194 200 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
195 201 {
196 202 uint64_t offset_flags;
197 203 int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
198 204 const char *c_node_id;
199 205 const char *data;
200 206 Py_ssize_t length = index_length(self);
201 207 PyObject *entry;
202 208
203 209 if (pos == -1) {
204 210 Py_INCREF(nullentry);
205 211 return nullentry;
206 212 }
207 213
208 214 if (pos < 0 || pos >= length) {
209 215 PyErr_SetString(PyExc_IndexError, "revlog index out of range");
210 216 return NULL;
211 217 }
212 218
213 219 if (pos >= self->length) {
214 220 PyObject *obj;
215 221 obj = PyList_GET_ITEM(self->added, pos - self->length);
216 222 Py_INCREF(obj);
217 223 return obj;
218 224 }
219 225
220 226 if (self->cache) {
221 227 if (self->cache[pos]) {
222 228 Py_INCREF(self->cache[pos]);
223 229 return self->cache[pos];
224 230 }
225 231 } else {
226 232 self->cache = calloc(self->raw_length, sizeof(PyObject *));
227 233 if (self->cache == NULL)
228 234 return PyErr_NoMemory();
229 235 }
230 236
231 237 data = index_deref(self, pos);
232 238 if (data == NULL)
233 239 return NULL;
234 240
235 241 offset_flags = getbe32(data + 4);
236 242 if (pos == 0) /* mask out version number for the first entry */
237 243 offset_flags &= 0xFFFF;
238 244 else {
239 245 uint32_t offset_high = getbe32(data);
240 246 offset_flags |= ((uint64_t)offset_high) << 32;
241 247 }
242 248
243 249 comp_len = getbe32(data + 8);
244 250 uncomp_len = getbe32(data + 12);
245 251 base_rev = getbe32(data + 16);
246 252 link_rev = getbe32(data + 20);
247 253 parent_1 = getbe32(data + 24);
248 254 parent_2 = getbe32(data + 28);
249 255 c_node_id = data + 32;
250 256
251 257 entry = Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
252 258 base_rev, link_rev, parent_1, parent_2, c_node_id,
253 259 20);
254 260
255 261 if (entry) {
256 262 PyObject_GC_UnTrack(entry);
257 263 Py_INCREF(entry);
258 264 }
259 265
260 266 self->cache[pos] = entry;
261 267
262 268 return entry;
263 269 }
264 270
265 271 /*
266 272 * Return the 20-byte SHA of the node corresponding to the given rev.
267 273 */
268 274 static const char *index_node(indexObject *self, Py_ssize_t pos)
269 275 {
270 276 Py_ssize_t length = index_length(self);
271 277 const char *data;
272 278
273 279 if (pos == -1)
274 280 return nullid;
275 281
276 282 if (pos >= length)
277 283 return NULL;
278 284
279 285 if (pos >= self->length) {
280 286 PyObject *tuple, *str;
281 287 tuple = PyList_GET_ITEM(self->added, pos - self->length);
282 288 str = PyTuple_GetItem(tuple, 7);
283 289 return str ? PyBytes_AS_STRING(str) : NULL;
284 290 }
285 291
286 292 data = index_deref(self, pos);
287 293 return data ? data + 32 : NULL;
288 294 }
289 295
290 296 /*
291 297 * Return the 20-byte SHA of the node corresponding to the given rev. The
292 298 * rev is assumed to be existing. If not, an exception is set.
293 299 */
294 300 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
295 301 {
296 302 const char *node = index_node(self, pos);
297 303 if (node == NULL) {
298 304 PyErr_Format(PyExc_IndexError, "could not access rev %d",
299 305 (int)pos);
300 306 }
301 307 return node;
302 308 }
303 309
304 310 static int nt_insert(nodetree *self, const char *node, int rev);
305 311
306 312 static int node_check(PyObject *obj, char **node)
307 313 {
308 314 Py_ssize_t nodelen;
309 315 if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
310 316 return -1;
311 317 if (nodelen == 20)
312 318 return 0;
313 319 PyErr_SetString(PyExc_ValueError, "20-byte hash required");
314 320 return -1;
315 321 }
316 322
317 323 static PyObject *index_append(indexObject *self, PyObject *obj)
318 324 {
319 325 char *node;
320 326 Py_ssize_t len;
321 327
322 328 if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
323 329 PyErr_SetString(PyExc_TypeError, "8-tuple required");
324 330 return NULL;
325 331 }
326 332
327 333 if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
328 334 return NULL;
329 335
330 336 len = index_length(self);
331 337
332 338 if (self->added == NULL) {
333 339 self->added = PyList_New(0);
334 340 if (self->added == NULL)
335 341 return NULL;
336 342 }
337 343
338 344 if (PyList_Append(self->added, obj) == -1)
339 345 return NULL;
340 346
341 347 if (self->ntinitialized)
342 348 nt_insert(&self->nt, node, (int)len);
343 349
344 350 Py_CLEAR(self->headrevs);
345 351 Py_RETURN_NONE;
346 352 }
347 353
348 354 static PyObject *index_stats(indexObject *self)
349 355 {
350 356 PyObject *obj = PyDict_New();
351 357 PyObject *s = NULL;
352 358 PyObject *t = NULL;
353 359
354 360 if (obj == NULL)
355 361 return NULL;
356 362
357 363 #define istat(__n, __d) \
358 364 do { \
359 365 s = PyBytes_FromString(__d); \
360 366 t = PyInt_FromSsize_t(self->__n); \
361 367 if (!s || !t) \
362 368 goto bail; \
363 369 if (PyDict_SetItem(obj, s, t) == -1) \
364 370 goto bail; \
365 371 Py_CLEAR(s); \
366 372 Py_CLEAR(t); \
367 373 } while (0)
368 374
369 375 if (self->added) {
370 376 Py_ssize_t len = PyList_GET_SIZE(self->added);
371 377 s = PyBytes_FromString("index entries added");
372 378 t = PyInt_FromSsize_t(len);
373 379 if (!s || !t)
374 380 goto bail;
375 381 if (PyDict_SetItem(obj, s, t) == -1)
376 382 goto bail;
377 383 Py_CLEAR(s);
378 384 Py_CLEAR(t);
379 385 }
380 386
381 387 if (self->raw_length != self->length)
382 388 istat(raw_length, "revs on disk");
383 389 istat(length, "revs in memory");
384 390 istat(ntlookups, "node trie lookups");
385 391 istat(ntmisses, "node trie misses");
386 392 istat(ntrev, "node trie last rev scanned");
387 393 if (self->ntinitialized) {
388 394 istat(nt.capacity, "node trie capacity");
389 395 istat(nt.depth, "node trie depth");
390 396 istat(nt.length, "node trie count");
391 397 istat(nt.splits, "node trie splits");
392 398 }
393 399
394 400 #undef istat
395 401
396 402 return obj;
397 403
398 404 bail:
399 405 Py_XDECREF(obj);
400 406 Py_XDECREF(s);
401 407 Py_XDECREF(t);
402 408 return NULL;
403 409 }
404 410
405 411 /*
406 412 * When we cache a list, we want to be sure the caller can't mutate
407 413 * the cached copy.
408 414 */
409 415 static PyObject *list_copy(PyObject *list)
410 416 {
411 417 Py_ssize_t len = PyList_GET_SIZE(list);
412 418 PyObject *newlist = PyList_New(len);
413 419 Py_ssize_t i;
414 420
415 421 if (newlist == NULL)
416 422 return NULL;
417 423
418 424 for (i = 0; i < len; i++) {
419 425 PyObject *obj = PyList_GET_ITEM(list, i);
420 426 Py_INCREF(obj);
421 427 PyList_SET_ITEM(newlist, i, obj);
422 428 }
423 429
424 430 return newlist;
425 431 }
426 432
427 433 static int check_filter(PyObject *filter, Py_ssize_t arg)
428 434 {
429 435 if (filter) {
430 436 PyObject *arglist, *result;
431 437 int isfiltered;
432 438
433 439 arglist = Py_BuildValue("(n)", arg);
434 440 if (!arglist) {
435 441 return -1;
436 442 }
437 443
438 444 result = PyEval_CallObject(filter, arglist);
439 445 Py_DECREF(arglist);
440 446 if (!result) {
441 447 return -1;
442 448 }
443 449
444 450 /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error,
445 451 * same as this function, so we can just return it directly.*/
446 452 isfiltered = PyObject_IsTrue(result);
447 453 Py_DECREF(result);
448 454 return isfiltered;
449 455 } else {
450 456 return 0;
451 457 }
452 458 }
453 459
454 460 static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list,
455 461 Py_ssize_t marker, char *phases)
456 462 {
457 463 PyObject *iter = NULL;
458 464 PyObject *iter_item = NULL;
459 465 Py_ssize_t min_idx = index_length(self) + 2;
460 466 long iter_item_long;
461 467
462 468 if (PyList_GET_SIZE(list) != 0) {
463 469 iter = PyObject_GetIter(list);
464 470 if (iter == NULL)
465 471 return -2;
466 472 while ((iter_item = PyIter_Next(iter))) {
467 iter_item_long = PyInt_AS_LONG(iter_item);
473 if (!pylong_to_long(iter_item, &iter_item_long)) {
474 Py_DECREF(iter_item);
475 return -2;
476 }
468 477 Py_DECREF(iter_item);
469 478 if (iter_item_long < min_idx)
470 479 min_idx = iter_item_long;
471 480 phases[iter_item_long] = (char)marker;
472 481 }
473 482 Py_DECREF(iter);
474 483 }
475 484
476 485 return min_idx;
477 486 }
478 487
479 488 static inline void set_phase_from_parents(char *phases, int parent_1,
480 489 int parent_2, Py_ssize_t i)
481 490 {
482 491 if (parent_1 >= 0 && phases[parent_1] > phases[i])
483 492 phases[i] = phases[parent_1];
484 493 if (parent_2 >= 0 && phases[parent_2] > phases[i])
485 494 phases[i] = phases[parent_2];
486 495 }
487 496
488 497 static PyObject *reachableroots2(indexObject *self, PyObject *args)
489 498 {
490 499
491 500 /* Input */
492 501 long minroot;
493 502 PyObject *includepatharg = NULL;
494 503 int includepath = 0;
495 504 /* heads and roots are lists */
496 505 PyObject *heads = NULL;
497 506 PyObject *roots = NULL;
498 507 PyObject *reachable = NULL;
499 508
500 509 PyObject *val;
501 510 Py_ssize_t len = index_length(self);
502 511 long revnum;
503 512 Py_ssize_t k;
504 513 Py_ssize_t i;
505 514 Py_ssize_t l;
506 515 int r;
507 516 int parents[2];
508 517
509 518 /* Internal data structure:
510 519 * tovisit: array of length len+1 (all revs + nullrev), filled upto
511 520 * lentovisit
512 521 *
513 522 * revstates: array of length len+1 (all revs + nullrev) */
514 523 int *tovisit = NULL;
515 524 long lentovisit = 0;
516 525 enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 };
517 526 char *revstates = NULL;
518 527
519 528 /* Get arguments */
520 529 if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads,
521 530 &PyList_Type, &roots, &PyBool_Type,
522 531 &includepatharg))
523 532 goto bail;
524 533
525 534 if (includepatharg == Py_True)
526 535 includepath = 1;
527 536
528 537 /* Initialize return set */
529 538 reachable = PyList_New(0);
530 539 if (reachable == NULL)
531 540 goto bail;
532 541
533 542 /* Initialize internal datastructures */
534 543 tovisit = (int *)malloc((len + 1) * sizeof(int));
535 544 if (tovisit == NULL) {
536 545 PyErr_NoMemory();
537 546 goto bail;
538 547 }
539 548
540 549 revstates = (char *)calloc(len + 1, 1);
541 550 if (revstates == NULL) {
542 551 PyErr_NoMemory();
543 552 goto bail;
544 553 }
545 554
546 555 l = PyList_GET_SIZE(roots);
547 556 for (i = 0; i < l; i++) {
548 557 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
549 558 if (revnum == -1 && PyErr_Occurred())
550 559 goto bail;
551 560 /* If root is out of range, e.g. wdir(), it must be unreachable
552 561 * from heads. So we can just ignore it. */
553 562 if (revnum + 1 < 0 || revnum + 1 >= len + 1)
554 563 continue;
555 564 revstates[revnum + 1] |= RS_ROOT;
556 565 }
557 566
558 567 /* Populate tovisit with all the heads */
559 568 l = PyList_GET_SIZE(heads);
560 569 for (i = 0; i < l; i++) {
561 570 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
562 571 if (revnum == -1 && PyErr_Occurred())
563 572 goto bail;
564 573 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
565 574 PyErr_SetString(PyExc_IndexError, "head out of range");
566 575 goto bail;
567 576 }
568 577 if (!(revstates[revnum + 1] & RS_SEEN)) {
569 578 tovisit[lentovisit++] = (int)revnum;
570 579 revstates[revnum + 1] |= RS_SEEN;
571 580 }
572 581 }
573 582
574 583 /* Visit the tovisit list and find the reachable roots */
575 584 k = 0;
576 585 while (k < lentovisit) {
577 586 /* Add the node to reachable if it is a root*/
578 587 revnum = tovisit[k++];
579 588 if (revstates[revnum + 1] & RS_ROOT) {
580 589 revstates[revnum + 1] |= RS_REACHABLE;
581 590 val = PyInt_FromLong(revnum);
582 591 if (val == NULL)
583 592 goto bail;
584 593 r = PyList_Append(reachable, val);
585 594 Py_DECREF(val);
586 595 if (r < 0)
587 596 goto bail;
588 597 if (includepath == 0)
589 598 continue;
590 599 }
591 600
592 601 /* Add its parents to the list of nodes to visit */
593 602 if (revnum == -1)
594 603 continue;
595 604 r = index_get_parents(self, revnum, parents, (int)len - 1);
596 605 if (r < 0)
597 606 goto bail;
598 607 for (i = 0; i < 2; i++) {
599 608 if (!(revstates[parents[i] + 1] & RS_SEEN) &&
600 609 parents[i] >= minroot) {
601 610 tovisit[lentovisit++] = parents[i];
602 611 revstates[parents[i] + 1] |= RS_SEEN;
603 612 }
604 613 }
605 614 }
606 615
607 616 /* Find all the nodes in between the roots we found and the heads
608 617 * and add them to the reachable set */
609 618 if (includepath == 1) {
610 619 long minidx = minroot;
611 620 if (minidx < 0)
612 621 minidx = 0;
613 622 for (i = minidx; i < len; i++) {
614 623 if (!(revstates[i + 1] & RS_SEEN))
615 624 continue;
616 625 r = index_get_parents(self, i, parents, (int)len - 1);
617 626 /* Corrupted index file, error is set from
618 627 * index_get_parents */
619 628 if (r < 0)
620 629 goto bail;
621 630 if (((revstates[parents[0] + 1] |
622 631 revstates[parents[1] + 1]) &
623 632 RS_REACHABLE) &&
624 633 !(revstates[i + 1] & RS_REACHABLE)) {
625 634 revstates[i + 1] |= RS_REACHABLE;
626 635 val = PyInt_FromSsize_t(i);
627 636 if (val == NULL)
628 637 goto bail;
629 638 r = PyList_Append(reachable, val);
630 639 Py_DECREF(val);
631 640 if (r < 0)
632 641 goto bail;
633 642 }
634 643 }
635 644 }
636 645
637 646 free(revstates);
638 647 free(tovisit);
639 648 return reachable;
640 649 bail:
641 650 Py_XDECREF(reachable);
642 651 free(revstates);
643 652 free(tovisit);
644 653 return NULL;
645 654 }
646 655
647 656 static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args)
648 657 {
649 658 PyObject *roots = Py_None;
650 659 PyObject *ret = NULL;
651 660 PyObject *phasessize = NULL;
652 661 PyObject *phaseroots = NULL;
653 662 PyObject *phaseset = NULL;
654 663 PyObject *phasessetlist = NULL;
655 664 PyObject *rev = NULL;
656 665 Py_ssize_t len = index_length(self);
657 666 Py_ssize_t numphase = 0;
658 667 Py_ssize_t minrevallphases = 0;
659 668 Py_ssize_t minrevphase = 0;
660 669 Py_ssize_t i = 0;
661 670 char *phases = NULL;
662 671 long phase;
663 672
664 673 if (!PyArg_ParseTuple(args, "O", &roots))
665 674 goto done;
666 675 if (roots == NULL || !PyList_Check(roots)) {
667 676 PyErr_SetString(PyExc_TypeError, "roots must be a list");
668 677 goto done;
669 678 }
670 679
671 680 phases = calloc(
672 681 len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */
673 682 if (phases == NULL) {
674 683 PyErr_NoMemory();
675 684 goto done;
676 685 }
677 686 /* Put the phase information of all the roots in phases */
678 687 numphase = PyList_GET_SIZE(roots) + 1;
679 688 minrevallphases = len + 1;
680 689 phasessetlist = PyList_New(numphase);
681 690 if (phasessetlist == NULL)
682 691 goto done;
683 692
684 693 PyList_SET_ITEM(phasessetlist, 0, Py_None);
685 694 Py_INCREF(Py_None);
686 695
687 696 for (i = 0; i < numphase - 1; i++) {
688 697 phaseroots = PyList_GET_ITEM(roots, i);
689 698 phaseset = PySet_New(NULL);
690 699 if (phaseset == NULL)
691 700 goto release;
692 701 PyList_SET_ITEM(phasessetlist, i + 1, phaseset);
693 702 if (!PyList_Check(phaseroots)) {
694 703 PyErr_SetString(PyExc_TypeError,
695 704 "roots item must be a list");
696 705 goto release;
697 706 }
698 707 minrevphase =
699 708 add_roots_get_min(self, phaseroots, i + 1, phases);
700 709 if (minrevphase == -2) /* Error from add_roots_get_min */
701 710 goto release;
702 711 minrevallphases = MIN(minrevallphases, minrevphase);
703 712 }
704 713 /* Propagate the phase information from the roots to the revs */
705 714 if (minrevallphases != -1) {
706 715 int parents[2];
707 716 for (i = minrevallphases; i < len; i++) {
708 717 if (index_get_parents(self, i, parents, (int)len - 1) <
709 718 0)
710 719 goto release;
711 720 set_phase_from_parents(phases, parents[0], parents[1],
712 721 i);
713 722 }
714 723 }
715 724 /* Transform phase list to a python list */
716 725 phasessize = PyInt_FromSsize_t(len);
717 726 if (phasessize == NULL)
718 727 goto release;
719 728 for (i = 0; i < len; i++) {
720 729 phase = phases[i];
721 730 /* We only store the sets of phase for non public phase, the
722 731 * public phase is computed as a difference */
723 732 if (phase != 0) {
724 733 phaseset = PyList_GET_ITEM(phasessetlist, phase);
725 734 rev = PyInt_FromSsize_t(i);
726 735 if (rev == NULL)
727 736 goto release;
728 737 PySet_Add(phaseset, rev);
729 738 Py_XDECREF(rev);
730 739 }
731 740 }
732 741 ret = PyTuple_Pack(2, phasessize, phasessetlist);
733 742
734 743 release:
735 744 Py_XDECREF(phasessize);
736 745 Py_XDECREF(phasessetlist);
737 746 done:
738 747 free(phases);
739 748 return ret;
740 749 }
741 750
742 751 static PyObject *index_headrevs(indexObject *self, PyObject *args)
743 752 {
744 753 Py_ssize_t i, j, len;
745 754 char *nothead = NULL;
746 755 PyObject *heads = NULL;
747 756 PyObject *filter = NULL;
748 757 PyObject *filteredrevs = Py_None;
749 758
750 759 if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) {
751 760 return NULL;
752 761 }
753 762
754 763 if (self->headrevs && filteredrevs == self->filteredrevs)
755 764 return list_copy(self->headrevs);
756 765
757 766 Py_DECREF(self->filteredrevs);
758 767 self->filteredrevs = filteredrevs;
759 768 Py_INCREF(filteredrevs);
760 769
761 770 if (filteredrevs != Py_None) {
762 771 filter = PyObject_GetAttrString(filteredrevs, "__contains__");
763 772 if (!filter) {
764 773 PyErr_SetString(
765 774 PyExc_TypeError,
766 775 "filteredrevs has no attribute __contains__");
767 776 goto bail;
768 777 }
769 778 }
770 779
771 780 len = index_length(self);
772 781 heads = PyList_New(0);
773 782 if (heads == NULL)
774 783 goto bail;
775 784 if (len == 0) {
776 785 PyObject *nullid = PyInt_FromLong(-1);
777 786 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
778 787 Py_XDECREF(nullid);
779 788 goto bail;
780 789 }
781 790 goto done;
782 791 }
783 792
784 793 nothead = calloc(len, 1);
785 794 if (nothead == NULL) {
786 795 PyErr_NoMemory();
787 796 goto bail;
788 797 }
789 798
790 799 for (i = len - 1; i >= 0; i--) {
791 800 int isfiltered;
792 801 int parents[2];
793 802
794 803 /* If nothead[i] == 1, it means we've seen an unfiltered child
795 804 * of this node already, and therefore this node is not
796 805 * filtered. So we can skip the expensive check_filter step.
797 806 */
798 807 if (nothead[i] != 1) {
799 808 isfiltered = check_filter(filter, i);
800 809 if (isfiltered == -1) {
801 810 PyErr_SetString(PyExc_TypeError,
802 811 "unable to check filter");
803 812 goto bail;
804 813 }
805 814
806 815 if (isfiltered) {
807 816 nothead[i] = 1;
808 817 continue;
809 818 }
810 819 }
811 820
812 821 if (index_get_parents(self, i, parents, (int)len - 1) < 0)
813 822 goto bail;
814 823 for (j = 0; j < 2; j++) {
815 824 if (parents[j] >= 0)
816 825 nothead[parents[j]] = 1;
817 826 }
818 827 }
819 828
820 829 for (i = 0; i < len; i++) {
821 830 PyObject *head;
822 831
823 832 if (nothead[i])
824 833 continue;
825 834 head = PyInt_FromSsize_t(i);
826 835 if (head == NULL || PyList_Append(heads, head) == -1) {
827 836 Py_XDECREF(head);
828 837 goto bail;
829 838 }
830 839 }
831 840
832 841 done:
833 842 self->headrevs = heads;
834 843 Py_XDECREF(filter);
835 844 free(nothead);
836 845 return list_copy(self->headrevs);
837 846 bail:
838 847 Py_XDECREF(filter);
839 848 Py_XDECREF(heads);
840 849 free(nothead);
841 850 return NULL;
842 851 }
843 852
844 853 /**
845 854 * Obtain the base revision index entry.
846 855 *
847 856 * Callers must ensure that rev >= 0 or illegal memory access may occur.
848 857 */
849 858 static inline int index_baserev(indexObject *self, int rev)
850 859 {
851 860 const char *data;
852 861
853 862 if (rev >= self->length) {
854 863 PyObject *tuple =
855 864 PyList_GET_ITEM(self->added, rev - self->length);
856 return (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 3));
865 long ret;
866 if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 3), &ret)) {
867 return -2;
868 }
869 return (int)ret;
857 870 } else {
858 871 data = index_deref(self, rev);
859 872 if (data == NULL) {
860 873 return -2;
861 874 }
862 875
863 876 return getbe32(data + 16);
864 877 }
865 878 }
866 879
867 880 static PyObject *index_deltachain(indexObject *self, PyObject *args)
868 881 {
869 882 int rev, generaldelta;
870 883 PyObject *stoparg;
871 884 int stoprev, iterrev, baserev = -1;
872 885 int stopped;
873 886 PyObject *chain = NULL, *result = NULL;
874 887 const Py_ssize_t length = index_length(self);
875 888
876 889 if (!PyArg_ParseTuple(args, "iOi", &rev, &stoparg, &generaldelta)) {
877 890 return NULL;
878 891 }
879 892
880 893 if (PyInt_Check(stoparg)) {
881 894 stoprev = (int)PyInt_AsLong(stoparg);
882 895 if (stoprev == -1 && PyErr_Occurred()) {
883 896 return NULL;
884 897 }
885 898 } else if (stoparg == Py_None) {
886 899 stoprev = -2;
887 900 } else {
888 901 PyErr_SetString(PyExc_ValueError,
889 902 "stoprev must be integer or None");
890 903 return NULL;
891 904 }
892 905
893 906 if (rev < 0 || rev >= length) {
894 907 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
895 908 return NULL;
896 909 }
897 910
898 911 chain = PyList_New(0);
899 912 if (chain == NULL) {
900 913 return NULL;
901 914 }
902 915
903 916 baserev = index_baserev(self, rev);
904 917
905 918 /* This should never happen. */
906 919 if (baserev <= -2) {
907 920 /* Error should be set by index_deref() */
908 921 assert(PyErr_Occurred());
909 922 goto bail;
910 923 }
911 924
912 925 iterrev = rev;
913 926
914 927 while (iterrev != baserev && iterrev != stoprev) {
915 928 PyObject *value = PyInt_FromLong(iterrev);
916 929 if (value == NULL) {
917 930 goto bail;
918 931 }
919 932 if (PyList_Append(chain, value)) {
920 933 Py_DECREF(value);
921 934 goto bail;
922 935 }
923 936 Py_DECREF(value);
924 937
925 938 if (generaldelta) {
926 939 iterrev = baserev;
927 940 } else {
928 941 iterrev--;
929 942 }
930 943
931 944 if (iterrev < 0) {
932 945 break;
933 946 }
934 947
935 948 if (iterrev >= length) {
936 949 PyErr_SetString(PyExc_IndexError,
937 950 "revision outside index");
938 951 return NULL;
939 952 }
940 953
941 954 baserev = index_baserev(self, iterrev);
942 955
943 956 /* This should never happen. */
944 957 if (baserev <= -2) {
945 958 /* Error should be set by index_deref() */
946 959 assert(PyErr_Occurred());
947 960 goto bail;
948 961 }
949 962 }
950 963
951 964 if (iterrev == stoprev) {
952 965 stopped = 1;
953 966 } else {
954 967 PyObject *value = PyInt_FromLong(iterrev);
955 968 if (value == NULL) {
956 969 goto bail;
957 970 }
958 971 if (PyList_Append(chain, value)) {
959 972 Py_DECREF(value);
960 973 goto bail;
961 974 }
962 975 Py_DECREF(value);
963 976
964 977 stopped = 0;
965 978 }
966 979
967 980 if (PyList_Reverse(chain)) {
968 981 goto bail;
969 982 }
970 983
971 984 result = Py_BuildValue("OO", chain, stopped ? Py_True : Py_False);
972 985 Py_DECREF(chain);
973 986 return result;
974 987
975 988 bail:
976 989 Py_DECREF(chain);
977 990 return NULL;
978 991 }
979 992
980 993 static inline int nt_level(const char *node, Py_ssize_t level)
981 994 {
982 995 int v = node[level >> 1];
983 996 if (!(level & 1))
984 997 v >>= 4;
985 998 return v & 0xf;
986 999 }
987 1000
988 1001 /*
989 1002 * Return values:
990 1003 *
991 1004 * -4: match is ambiguous (multiple candidates)
992 1005 * -2: not found
993 1006 * rest: valid rev
994 1007 */
995 1008 static int nt_find(nodetree *self, const char *node, Py_ssize_t nodelen,
996 1009 int hex)
997 1010 {
998 1011 int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
999 1012 int level, maxlevel, off;
1000 1013
1001 1014 if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
1002 1015 return -1;
1003 1016
1004 1017 if (hex)
1005 1018 maxlevel = nodelen > 40 ? 40 : (int)nodelen;
1006 1019 else
1007 1020 maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
1008 1021
1009 1022 for (level = off = 0; level < maxlevel; level++) {
1010 1023 int k = getnybble(node, level);
1011 1024 nodetreenode *n = &self->nodes[off];
1012 1025 int v = n->children[k];
1013 1026
1014 1027 if (v < 0) {
1015 1028 const char *n;
1016 1029 Py_ssize_t i;
1017 1030
1018 1031 v = -(v + 2);
1019 1032 n = index_node(self->index, v);
1020 1033 if (n == NULL)
1021 1034 return -2;
1022 1035 for (i = level; i < maxlevel; i++)
1023 1036 if (getnybble(node, i) != nt_level(n, i))
1024 1037 return -2;
1025 1038 return v;
1026 1039 }
1027 1040 if (v == 0)
1028 1041 return -2;
1029 1042 off = v;
1030 1043 }
1031 1044 /* multiple matches against an ambiguous prefix */
1032 1045 return -4;
1033 1046 }
1034 1047
1035 1048 static int nt_new(nodetree *self)
1036 1049 {
1037 1050 if (self->length == self->capacity) {
1038 1051 unsigned newcapacity;
1039 1052 nodetreenode *newnodes;
1040 1053 newcapacity = self->capacity * 2;
1041 1054 if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
1042 1055 PyErr_SetString(PyExc_MemoryError,
1043 1056 "overflow in nt_new");
1044 1057 return -1;
1045 1058 }
1046 1059 newnodes =
1047 1060 realloc(self->nodes, newcapacity * sizeof(nodetreenode));
1048 1061 if (newnodes == NULL) {
1049 1062 PyErr_SetString(PyExc_MemoryError, "out of memory");
1050 1063 return -1;
1051 1064 }
1052 1065 self->capacity = newcapacity;
1053 1066 self->nodes = newnodes;
1054 1067 memset(&self->nodes[self->length], 0,
1055 1068 sizeof(nodetreenode) * (self->capacity - self->length));
1056 1069 }
1057 1070 return self->length++;
1058 1071 }
1059 1072
1060 1073 static int nt_insert(nodetree *self, const char *node, int rev)
1061 1074 {
1062 1075 int level = 0;
1063 1076 int off = 0;
1064 1077
1065 1078 while (level < 40) {
1066 1079 int k = nt_level(node, level);
1067 1080 nodetreenode *n;
1068 1081 int v;
1069 1082
1070 1083 n = &self->nodes[off];
1071 1084 v = n->children[k];
1072 1085
1073 1086 if (v == 0) {
1074 1087 n->children[k] = -rev - 2;
1075 1088 return 0;
1076 1089 }
1077 1090 if (v < 0) {
1078 1091 const char *oldnode =
1079 1092 index_node_existing(self->index, -(v + 2));
1080 1093 int noff;
1081 1094
1082 1095 if (oldnode == NULL)
1083 1096 return -1;
1084 1097 if (!memcmp(oldnode, node, 20)) {
1085 1098 n->children[k] = -rev - 2;
1086 1099 return 0;
1087 1100 }
1088 1101 noff = nt_new(self);
1089 1102 if (noff == -1)
1090 1103 return -1;
1091 1104 /* self->nodes may have been changed by realloc */
1092 1105 self->nodes[off].children[k] = noff;
1093 1106 off = noff;
1094 1107 n = &self->nodes[off];
1095 1108 n->children[nt_level(oldnode, ++level)] = v;
1096 1109 if (level > self->depth)
1097 1110 self->depth = level;
1098 1111 self->splits += 1;
1099 1112 } else {
1100 1113 level += 1;
1101 1114 off = v;
1102 1115 }
1103 1116 }
1104 1117
1105 1118 return -1;
1106 1119 }
1107 1120
1108 1121 static PyObject *ntobj_insert(nodetreeObject *self, PyObject *args)
1109 1122 {
1110 1123 Py_ssize_t rev;
1111 1124 const char *node;
1112 1125 Py_ssize_t length;
1113 1126 if (!PyArg_ParseTuple(args, "n", &rev))
1114 1127 return NULL;
1115 1128 length = index_length(self->nt.index);
1116 1129 if (rev < 0 || rev >= length) {
1117 1130 PyErr_SetString(PyExc_ValueError, "revlog index out of range");
1118 1131 return NULL;
1119 1132 }
1120 1133 node = index_node_existing(self->nt.index, rev);
1121 1134 if (nt_insert(&self->nt, node, (int)rev) == -1)
1122 1135 return NULL;
1123 1136 Py_RETURN_NONE;
1124 1137 }
1125 1138
1126 1139 static int nt_delete_node(nodetree *self, const char *node)
1127 1140 {
1128 1141 /* rev==-2 happens to get encoded as 0, which is interpreted as not set
1129 1142 */
1130 1143 return nt_insert(self, node, -2);
1131 1144 }
1132 1145
1133 1146 static int nt_init(nodetree *self, indexObject *index, unsigned capacity)
1134 1147 {
1135 1148 /* Initialize before overflow-checking to avoid nt_dealloc() crash. */
1136 1149 self->nodes = NULL;
1137 1150
1138 1151 self->index = index;
1139 1152 /* The input capacity is in terms of revisions, while the field is in
1140 1153 * terms of nodetree nodes. */
1141 1154 self->capacity = (capacity < 4 ? 4 : capacity / 2);
1142 1155 self->depth = 0;
1143 1156 self->splits = 0;
1144 1157 if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
1145 1158 PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
1146 1159 return -1;
1147 1160 }
1148 1161 self->nodes = calloc(self->capacity, sizeof(nodetreenode));
1149 1162 if (self->nodes == NULL) {
1150 1163 PyErr_NoMemory();
1151 1164 return -1;
1152 1165 }
1153 1166 self->length = 1;
1154 1167 return 0;
1155 1168 }
1156 1169
1157 1170 static PyTypeObject indexType;
1158 1171
1159 1172 static int ntobj_init(nodetreeObject *self, PyObject *args)
1160 1173 {
1161 1174 PyObject *index;
1162 1175 unsigned capacity;
1163 1176 if (!PyArg_ParseTuple(args, "O!I", &indexType, &index, &capacity))
1164 1177 return -1;
1165 1178 Py_INCREF(index);
1166 1179 return nt_init(&self->nt, (indexObject *)index, capacity);
1167 1180 }
1168 1181
1169 1182 static int nt_partialmatch(nodetree *self, const char *node, Py_ssize_t nodelen)
1170 1183 {
1171 1184 return nt_find(self, node, nodelen, 1);
1172 1185 }
1173 1186
1174 1187 /*
1175 1188 * Find the length of the shortest unique prefix of node.
1176 1189 *
1177 1190 * Return values:
1178 1191 *
1179 1192 * -3: error (exception set)
1180 1193 * -2: not found (no exception set)
1181 1194 * rest: length of shortest prefix
1182 1195 */
1183 1196 static int nt_shortest(nodetree *self, const char *node)
1184 1197 {
1185 1198 int level, off;
1186 1199
1187 1200 for (level = off = 0; level < 40; level++) {
1188 1201 int k, v;
1189 1202 nodetreenode *n = &self->nodes[off];
1190 1203 k = nt_level(node, level);
1191 1204 v = n->children[k];
1192 1205 if (v < 0) {
1193 1206 const char *n;
1194 1207 v = -(v + 2);
1195 1208 n = index_node_existing(self->index, v);
1196 1209 if (n == NULL)
1197 1210 return -3;
1198 1211 if (memcmp(node, n, 20) != 0)
1199 1212 /*
1200 1213 * Found a unique prefix, but it wasn't for the
1201 1214 * requested node (i.e the requested node does
1202 1215 * not exist).
1203 1216 */
1204 1217 return -2;
1205 1218 return level + 1;
1206 1219 }
1207 1220 if (v == 0)
1208 1221 return -2;
1209 1222 off = v;
1210 1223 }
1211 1224 /*
1212 1225 * The node was still not unique after 40 hex digits, so this won't
1213 1226 * happen. Also, if we get here, then there's a programming error in
1214 1227 * this file that made us insert a node longer than 40 hex digits.
1215 1228 */
1216 1229 PyErr_SetString(PyExc_Exception, "broken node tree");
1217 1230 return -3;
1218 1231 }
1219 1232
1220 1233 static PyObject *ntobj_shortest(nodetreeObject *self, PyObject *args)
1221 1234 {
1222 1235 PyObject *val;
1223 1236 char *node;
1224 1237 int length;
1225 1238
1226 1239 if (!PyArg_ParseTuple(args, "O", &val))
1227 1240 return NULL;
1228 1241 if (node_check(val, &node) == -1)
1229 1242 return NULL;
1230 1243
1231 1244 length = nt_shortest(&self->nt, node);
1232 1245 if (length == -3)
1233 1246 return NULL;
1234 1247 if (length == -2) {
1235 1248 raise_revlog_error();
1236 1249 return NULL;
1237 1250 }
1238 1251 return PyInt_FromLong(length);
1239 1252 }
1240 1253
1241 1254 static void nt_dealloc(nodetree *self)
1242 1255 {
1243 1256 free(self->nodes);
1244 1257 self->nodes = NULL;
1245 1258 }
1246 1259
1247 1260 static void ntobj_dealloc(nodetreeObject *self)
1248 1261 {
1249 1262 Py_XDECREF(self->nt.index);
1250 1263 nt_dealloc(&self->nt);
1251 1264 PyObject_Del(self);
1252 1265 }
1253 1266
1254 1267 static PyMethodDef ntobj_methods[] = {
1255 1268 {"insert", (PyCFunction)ntobj_insert, METH_VARARGS,
1256 1269 "insert an index entry"},
1257 1270 {"shortest", (PyCFunction)ntobj_shortest, METH_VARARGS,
1258 1271 "find length of shortest hex nodeid of a binary ID"},
1259 1272 {NULL} /* Sentinel */
1260 1273 };
1261 1274
1262 1275 static PyTypeObject nodetreeType = {
1263 1276 PyVarObject_HEAD_INIT(NULL, 0) /* header */
1264 1277 "parsers.nodetree", /* tp_name */
1265 1278 sizeof(nodetreeObject), /* tp_basicsize */
1266 1279 0, /* tp_itemsize */
1267 1280 (destructor)ntobj_dealloc, /* tp_dealloc */
1268 1281 0, /* tp_print */
1269 1282 0, /* tp_getattr */
1270 1283 0, /* tp_setattr */
1271 1284 0, /* tp_compare */
1272 1285 0, /* tp_repr */
1273 1286 0, /* tp_as_number */
1274 1287 0, /* tp_as_sequence */
1275 1288 0, /* tp_as_mapping */
1276 1289 0, /* tp_hash */
1277 1290 0, /* tp_call */
1278 1291 0, /* tp_str */
1279 1292 0, /* tp_getattro */
1280 1293 0, /* tp_setattro */
1281 1294 0, /* tp_as_buffer */
1282 1295 Py_TPFLAGS_DEFAULT, /* tp_flags */
1283 1296 "nodetree", /* tp_doc */
1284 1297 0, /* tp_traverse */
1285 1298 0, /* tp_clear */
1286 1299 0, /* tp_richcompare */
1287 1300 0, /* tp_weaklistoffset */
1288 1301 0, /* tp_iter */
1289 1302 0, /* tp_iternext */
1290 1303 ntobj_methods, /* tp_methods */
1291 1304 0, /* tp_members */
1292 1305 0, /* tp_getset */
1293 1306 0, /* tp_base */
1294 1307 0, /* tp_dict */
1295 1308 0, /* tp_descr_get */
1296 1309 0, /* tp_descr_set */
1297 1310 0, /* tp_dictoffset */
1298 1311 (initproc)ntobj_init, /* tp_init */
1299 1312 0, /* tp_alloc */
1300 1313 };
1301 1314
1302 1315 static int index_init_nt(indexObject *self)
1303 1316 {
1304 1317 if (!self->ntinitialized) {
1305 1318 if (nt_init(&self->nt, self, (int)self->raw_length) == -1) {
1306 1319 nt_dealloc(&self->nt);
1307 1320 return -1;
1308 1321 }
1309 1322 if (nt_insert(&self->nt, nullid, -1) == -1) {
1310 1323 nt_dealloc(&self->nt);
1311 1324 return -1;
1312 1325 }
1313 1326 self->ntinitialized = 1;
1314 1327 self->ntrev = (int)index_length(self);
1315 1328 self->ntlookups = 1;
1316 1329 self->ntmisses = 0;
1317 1330 }
1318 1331 return 0;
1319 1332 }
1320 1333
1321 1334 /*
1322 1335 * Return values:
1323 1336 *
1324 1337 * -3: error (exception set)
1325 1338 * -2: not found (no exception set)
1326 1339 * rest: valid rev
1327 1340 */
1328 1341 static int index_find_node(indexObject *self, const char *node,
1329 1342 Py_ssize_t nodelen)
1330 1343 {
1331 1344 int rev;
1332 1345
1333 1346 if (index_init_nt(self) == -1)
1334 1347 return -3;
1335 1348
1336 1349 self->ntlookups++;
1337 1350 rev = nt_find(&self->nt, node, nodelen, 0);
1338 1351 if (rev >= -1)
1339 1352 return rev;
1340 1353
1341 1354 /*
1342 1355 * For the first handful of lookups, we scan the entire index,
1343 1356 * and cache only the matching nodes. This optimizes for cases
1344 1357 * like "hg tip", where only a few nodes are accessed.
1345 1358 *
1346 1359 * After that, we cache every node we visit, using a single
1347 1360 * scan amortized over multiple lookups. This gives the best
1348 1361 * bulk performance, e.g. for "hg log".
1349 1362 */
1350 1363 if (self->ntmisses++ < 4) {
1351 1364 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1352 1365 const char *n = index_node_existing(self, rev);
1353 1366 if (n == NULL)
1354 1367 return -3;
1355 1368 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1356 1369 if (nt_insert(&self->nt, n, rev) == -1)
1357 1370 return -3;
1358 1371 break;
1359 1372 }
1360 1373 }
1361 1374 } else {
1362 1375 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1363 1376 const char *n = index_node_existing(self, rev);
1364 1377 if (n == NULL)
1365 1378 return -3;
1366 1379 if (nt_insert(&self->nt, n, rev) == -1) {
1367 1380 self->ntrev = rev + 1;
1368 1381 return -3;
1369 1382 }
1370 1383 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1371 1384 break;
1372 1385 }
1373 1386 }
1374 1387 self->ntrev = rev;
1375 1388 }
1376 1389
1377 1390 if (rev >= 0)
1378 1391 return rev;
1379 1392 return -2;
1380 1393 }
1381 1394
1382 1395 static PyObject *index_getitem(indexObject *self, PyObject *value)
1383 1396 {
1384 1397 char *node;
1385 1398 int rev;
1386 1399
1387 if (PyInt_Check(value))
1388 return index_get(self, PyInt_AS_LONG(value));
1400 if (PyInt_Check(value)) {
1401 long idx;
1402 if (!pylong_to_long(value, &idx)) {
1403 return NULL;
1404 }
1405 return index_get(self, idx);
1406 }
1389 1407
1390 1408 if (node_check(value, &node) == -1)
1391 1409 return NULL;
1392 1410 rev = index_find_node(self, node, 20);
1393 1411 if (rev >= -1)
1394 1412 return PyInt_FromLong(rev);
1395 1413 if (rev == -2)
1396 1414 raise_revlog_error();
1397 1415 return NULL;
1398 1416 }
1399 1417
1400 1418 /*
1401 1419 * Fully populate the radix tree.
1402 1420 */
1403 1421 static int index_populate_nt(indexObject *self)
1404 1422 {
1405 1423 int rev;
1406 1424 if (self->ntrev > 0) {
1407 1425 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1408 1426 const char *n = index_node_existing(self, rev);
1409 1427 if (n == NULL)
1410 1428 return -1;
1411 1429 if (nt_insert(&self->nt, n, rev) == -1)
1412 1430 return -1;
1413 1431 }
1414 1432 self->ntrev = -1;
1415 1433 }
1416 1434 return 0;
1417 1435 }
1418 1436
1419 1437 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1420 1438 {
1421 1439 const char *fullnode;
1422 1440 int nodelen;
1423 1441 char *node;
1424 1442 int rev, i;
1425 1443
1426 1444 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1427 1445 return NULL;
1428 1446
1429 1447 if (nodelen < 1) {
1430 1448 PyErr_SetString(PyExc_ValueError, "key too short");
1431 1449 return NULL;
1432 1450 }
1433 1451
1434 1452 if (nodelen > 40) {
1435 1453 PyErr_SetString(PyExc_ValueError, "key too long");
1436 1454 return NULL;
1437 1455 }
1438 1456
1439 1457 for (i = 0; i < nodelen; i++)
1440 1458 hexdigit(node, i);
1441 1459 if (PyErr_Occurred()) {
1442 1460 /* input contains non-hex characters */
1443 1461 PyErr_Clear();
1444 1462 Py_RETURN_NONE;
1445 1463 }
1446 1464
1447 1465 if (index_init_nt(self) == -1)
1448 1466 return NULL;
1449 1467 if (index_populate_nt(self) == -1)
1450 1468 return NULL;
1451 1469 rev = nt_partialmatch(&self->nt, node, nodelen);
1452 1470
1453 1471 switch (rev) {
1454 1472 case -4:
1455 1473 raise_revlog_error();
1456 1474 return NULL;
1457 1475 case -2:
1458 1476 Py_RETURN_NONE;
1459 1477 case -1:
1460 1478 return PyBytes_FromStringAndSize(nullid, 20);
1461 1479 }
1462 1480
1463 1481 fullnode = index_node_existing(self, rev);
1464 1482 if (fullnode == NULL) {
1465 1483 return NULL;
1466 1484 }
1467 1485 return PyBytes_FromStringAndSize(fullnode, 20);
1468 1486 }
1469 1487
1470 1488 static PyObject *index_shortest(indexObject *self, PyObject *args)
1471 1489 {
1472 1490 PyObject *val;
1473 1491 char *node;
1474 1492 int length;
1475 1493
1476 1494 if (!PyArg_ParseTuple(args, "O", &val))
1477 1495 return NULL;
1478 1496 if (node_check(val, &node) == -1)
1479 1497 return NULL;
1480 1498
1481 1499 self->ntlookups++;
1482 1500 if (index_init_nt(self) == -1)
1483 1501 return NULL;
1484 1502 if (index_populate_nt(self) == -1)
1485 1503 return NULL;
1486 1504 length = nt_shortest(&self->nt, node);
1487 1505 if (length == -3)
1488 1506 return NULL;
1489 1507 if (length == -2) {
1490 1508 raise_revlog_error();
1491 1509 return NULL;
1492 1510 }
1493 1511 return PyInt_FromLong(length);
1494 1512 }
1495 1513
1496 1514 static PyObject *index_m_get(indexObject *self, PyObject *args)
1497 1515 {
1498 1516 PyObject *val;
1499 1517 char *node;
1500 1518 int rev;
1501 1519
1502 1520 if (!PyArg_ParseTuple(args, "O", &val))
1503 1521 return NULL;
1504 1522 if (node_check(val, &node) == -1)
1505 1523 return NULL;
1506 1524 rev = index_find_node(self, node, 20);
1507 1525 if (rev == -3)
1508 1526 return NULL;
1509 1527 if (rev == -2)
1510 1528 Py_RETURN_NONE;
1511 1529 return PyInt_FromLong(rev);
1512 1530 }
1513 1531
1514 1532 static int index_contains(indexObject *self, PyObject *value)
1515 1533 {
1516 1534 char *node;
1517 1535
1518 1536 if (PyInt_Check(value)) {
1519 long rev = PyInt_AS_LONG(value);
1537 long rev;
1538 if (!pylong_to_long(value, &rev)) {
1539 return -1;
1540 }
1520 1541 return rev >= -1 && rev < index_length(self);
1521 1542 }
1522 1543
1523 1544 if (node_check(value, &node) == -1)
1524 1545 return -1;
1525 1546
1526 1547 switch (index_find_node(self, node, 20)) {
1527 1548 case -3:
1528 1549 return -1;
1529 1550 case -2:
1530 1551 return 0;
1531 1552 default:
1532 1553 return 1;
1533 1554 }
1534 1555 }
1535 1556
1536 1557 typedef uint64_t bitmask;
1537 1558
1538 1559 /*
1539 1560 * Given a disjoint set of revs, return all candidates for the
1540 1561 * greatest common ancestor. In revset notation, this is the set
1541 1562 * "heads(::a and ::b and ...)"
1542 1563 */
1543 1564 static PyObject *find_gca_candidates(indexObject *self, const int *revs,
1544 1565 int revcount)
1545 1566 {
1546 1567 const bitmask allseen = (1ull << revcount) - 1;
1547 1568 const bitmask poison = 1ull << revcount;
1548 1569 PyObject *gca = PyList_New(0);
1549 1570 int i, v, interesting;
1550 1571 int maxrev = -1;
1551 1572 bitmask sp;
1552 1573 bitmask *seen;
1553 1574
1554 1575 if (gca == NULL)
1555 1576 return PyErr_NoMemory();
1556 1577
1557 1578 for (i = 0; i < revcount; i++) {
1558 1579 if (revs[i] > maxrev)
1559 1580 maxrev = revs[i];
1560 1581 }
1561 1582
1562 1583 seen = calloc(sizeof(*seen), maxrev + 1);
1563 1584 if (seen == NULL) {
1564 1585 Py_DECREF(gca);
1565 1586 return PyErr_NoMemory();
1566 1587 }
1567 1588
1568 1589 for (i = 0; i < revcount; i++)
1569 1590 seen[revs[i]] = 1ull << i;
1570 1591
1571 1592 interesting = revcount;
1572 1593
1573 1594 for (v = maxrev; v >= 0 && interesting; v--) {
1574 1595 bitmask sv = seen[v];
1575 1596 int parents[2];
1576 1597
1577 1598 if (!sv)
1578 1599 continue;
1579 1600
1580 1601 if (sv < poison) {
1581 1602 interesting -= 1;
1582 1603 if (sv == allseen) {
1583 1604 PyObject *obj = PyInt_FromLong(v);
1584 1605 if (obj == NULL)
1585 1606 goto bail;
1586 1607 if (PyList_Append(gca, obj) == -1) {
1587 1608 Py_DECREF(obj);
1588 1609 goto bail;
1589 1610 }
1590 1611 sv |= poison;
1591 1612 for (i = 0; i < revcount; i++) {
1592 1613 if (revs[i] == v)
1593 1614 goto done;
1594 1615 }
1595 1616 }
1596 1617 }
1597 1618 if (index_get_parents(self, v, parents, maxrev) < 0)
1598 1619 goto bail;
1599 1620
1600 1621 for (i = 0; i < 2; i++) {
1601 1622 int p = parents[i];
1602 1623 if (p == -1)
1603 1624 continue;
1604 1625 sp = seen[p];
1605 1626 if (sv < poison) {
1606 1627 if (sp == 0) {
1607 1628 seen[p] = sv;
1608 1629 interesting++;
1609 1630 } else if (sp != sv)
1610 1631 seen[p] |= sv;
1611 1632 } else {
1612 1633 if (sp && sp < poison)
1613 1634 interesting--;
1614 1635 seen[p] = sv;
1615 1636 }
1616 1637 }
1617 1638 }
1618 1639
1619 1640 done:
1620 1641 free(seen);
1621 1642 return gca;
1622 1643 bail:
1623 1644 free(seen);
1624 1645 Py_XDECREF(gca);
1625 1646 return NULL;
1626 1647 }
1627 1648
1628 1649 /*
1629 1650 * Given a disjoint set of revs, return the subset with the longest
1630 1651 * path to the root.
1631 1652 */
1632 1653 static PyObject *find_deepest(indexObject *self, PyObject *revs)
1633 1654 {
1634 1655 const Py_ssize_t revcount = PyList_GET_SIZE(revs);
1635 1656 static const Py_ssize_t capacity = 24;
1636 1657 int *depth, *interesting = NULL;
1637 1658 int i, j, v, ninteresting;
1638 1659 PyObject *dict = NULL, *keys = NULL;
1639 1660 long *seen = NULL;
1640 1661 int maxrev = -1;
1641 1662 long final;
1642 1663
1643 1664 if (revcount > capacity) {
1644 1665 PyErr_Format(PyExc_OverflowError,
1645 1666 "bitset size (%ld) > capacity (%ld)",
1646 1667 (long)revcount, (long)capacity);
1647 1668 return NULL;
1648 1669 }
1649 1670
1650 1671 for (i = 0; i < revcount; i++) {
1651 1672 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1652 1673 if (n > maxrev)
1653 1674 maxrev = n;
1654 1675 }
1655 1676
1656 1677 depth = calloc(sizeof(*depth), maxrev + 1);
1657 1678 if (depth == NULL)
1658 1679 return PyErr_NoMemory();
1659 1680
1660 1681 seen = calloc(sizeof(*seen), maxrev + 1);
1661 1682 if (seen == NULL) {
1662 1683 PyErr_NoMemory();
1663 1684 goto bail;
1664 1685 }
1665 1686
1666 1687 interesting = calloc(sizeof(*interesting), ((size_t)1) << revcount);
1667 1688 if (interesting == NULL) {
1668 1689 PyErr_NoMemory();
1669 1690 goto bail;
1670 1691 }
1671 1692
1672 1693 if (PyList_Sort(revs) == -1)
1673 1694 goto bail;
1674 1695
1675 1696 for (i = 0; i < revcount; i++) {
1676 1697 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
1677 1698 long b = 1l << i;
1678 1699 depth[n] = 1;
1679 1700 seen[n] = b;
1680 1701 interesting[b] = 1;
1681 1702 }
1682 1703
1683 1704 /* invariant: ninteresting is the number of non-zero entries in
1684 1705 * interesting. */
1685 1706 ninteresting = (int)revcount;
1686 1707
1687 1708 for (v = maxrev; v >= 0 && ninteresting > 1; v--) {
1688 1709 int dv = depth[v];
1689 1710 int parents[2];
1690 1711 long sv;
1691 1712
1692 1713 if (dv == 0)
1693 1714 continue;
1694 1715
1695 1716 sv = seen[v];
1696 1717 if (index_get_parents(self, v, parents, maxrev) < 0)
1697 1718 goto bail;
1698 1719
1699 1720 for (i = 0; i < 2; i++) {
1700 1721 int p = parents[i];
1701 1722 long sp;
1702 1723 int dp;
1703 1724
1704 1725 if (p == -1)
1705 1726 continue;
1706 1727
1707 1728 dp = depth[p];
1708 1729 sp = seen[p];
1709 1730 if (dp <= dv) {
1710 1731 depth[p] = dv + 1;
1711 1732 if (sp != sv) {
1712 1733 interesting[sv] += 1;
1713 1734 seen[p] = sv;
1714 1735 if (sp) {
1715 1736 interesting[sp] -= 1;
1716 1737 if (interesting[sp] == 0)
1717 1738 ninteresting -= 1;
1718 1739 }
1719 1740 }
1720 1741 } else if (dv == dp - 1) {
1721 1742 long nsp = sp | sv;
1722 1743 if (nsp == sp)
1723 1744 continue;
1724 1745 seen[p] = nsp;
1725 1746 interesting[sp] -= 1;
1726 1747 if (interesting[sp] == 0)
1727 1748 ninteresting -= 1;
1728 1749 if (interesting[nsp] == 0)
1729 1750 ninteresting += 1;
1730 1751 interesting[nsp] += 1;
1731 1752 }
1732 1753 }
1733 1754 interesting[sv] -= 1;
1734 1755 if (interesting[sv] == 0)
1735 1756 ninteresting -= 1;
1736 1757 }
1737 1758
1738 1759 final = 0;
1739 1760 j = ninteresting;
1740 1761 for (i = 0; i < (int)(2 << revcount) && j > 0; i++) {
1741 1762 if (interesting[i] == 0)
1742 1763 continue;
1743 1764 final |= i;
1744 1765 j -= 1;
1745 1766 }
1746 1767 if (final == 0) {
1747 1768 keys = PyList_New(0);
1748 1769 goto bail;
1749 1770 }
1750 1771
1751 1772 dict = PyDict_New();
1752 1773 if (dict == NULL)
1753 1774 goto bail;
1754 1775
1755 1776 for (i = 0; i < revcount; i++) {
1756 1777 PyObject *key;
1757 1778
1758 1779 if ((final & (1 << i)) == 0)
1759 1780 continue;
1760 1781
1761 1782 key = PyList_GET_ITEM(revs, i);
1762 1783 Py_INCREF(key);
1763 1784 Py_INCREF(Py_None);
1764 1785 if (PyDict_SetItem(dict, key, Py_None) == -1) {
1765 1786 Py_DECREF(key);
1766 1787 Py_DECREF(Py_None);
1767 1788 goto bail;
1768 1789 }
1769 1790 }
1770 1791
1771 1792 keys = PyDict_Keys(dict);
1772 1793
1773 1794 bail:
1774 1795 free(depth);
1775 1796 free(seen);
1776 1797 free(interesting);
1777 1798 Py_XDECREF(dict);
1778 1799
1779 1800 return keys;
1780 1801 }
1781 1802
1782 1803 /*
1783 1804 * Given a (possibly overlapping) set of revs, return all the
1784 1805 * common ancestors heads: heads(::args[0] and ::a[1] and ...)
1785 1806 */
1786 1807 static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args)
1787 1808 {
1788 1809 PyObject *ret = NULL;
1789 1810 Py_ssize_t argcount, i, len;
1790 1811 bitmask repeat = 0;
1791 1812 int revcount = 0;
1792 1813 int *revs;
1793 1814
1794 1815 argcount = PySequence_Length(args);
1795 1816 revs = PyMem_Malloc(argcount * sizeof(*revs));
1796 1817 if (argcount > 0 && revs == NULL)
1797 1818 return PyErr_NoMemory();
1798 1819 len = index_length(self);
1799 1820
1800 1821 for (i = 0; i < argcount; i++) {
1801 1822 static const int capacity = 24;
1802 1823 PyObject *obj = PySequence_GetItem(args, i);
1803 1824 bitmask x;
1804 1825 long val;
1805 1826
1806 1827 if (!PyInt_Check(obj)) {
1807 1828 PyErr_SetString(PyExc_TypeError,
1808 1829 "arguments must all be ints");
1809 1830 Py_DECREF(obj);
1810 1831 goto bail;
1811 1832 }
1812 1833 val = PyInt_AsLong(obj);
1813 1834 Py_DECREF(obj);
1814 1835 if (val == -1) {
1815 1836 ret = PyList_New(0);
1816 1837 goto done;
1817 1838 }
1818 1839 if (val < 0 || val >= len) {
1819 1840 PyErr_SetString(PyExc_IndexError, "index out of range");
1820 1841 goto bail;
1821 1842 }
1822 1843 /* this cheesy bloom filter lets us avoid some more
1823 1844 * expensive duplicate checks in the common set-is-disjoint
1824 1845 * case */
1825 1846 x = 1ull << (val & 0x3f);
1826 1847 if (repeat & x) {
1827 1848 int k;
1828 1849 for (k = 0; k < revcount; k++) {
1829 1850 if (val == revs[k])
1830 1851 goto duplicate;
1831 1852 }
1832 1853 } else
1833 1854 repeat |= x;
1834 1855 if (revcount >= capacity) {
1835 1856 PyErr_Format(PyExc_OverflowError,
1836 1857 "bitset size (%d) > capacity (%d)",
1837 1858 revcount, capacity);
1838 1859 goto bail;
1839 1860 }
1840 1861 revs[revcount++] = (int)val;
1841 1862 duplicate:;
1842 1863 }
1843 1864
1844 1865 if (revcount == 0) {
1845 1866 ret = PyList_New(0);
1846 1867 goto done;
1847 1868 }
1848 1869 if (revcount == 1) {
1849 1870 PyObject *obj;
1850 1871 ret = PyList_New(1);
1851 1872 if (ret == NULL)
1852 1873 goto bail;
1853 1874 obj = PyInt_FromLong(revs[0]);
1854 1875 if (obj == NULL)
1855 1876 goto bail;
1856 1877 PyList_SET_ITEM(ret, 0, obj);
1857 1878 goto done;
1858 1879 }
1859 1880
1860 1881 ret = find_gca_candidates(self, revs, revcount);
1861 1882 if (ret == NULL)
1862 1883 goto bail;
1863 1884
1864 1885 done:
1865 1886 PyMem_Free(revs);
1866 1887 return ret;
1867 1888
1868 1889 bail:
1869 1890 PyMem_Free(revs);
1870 1891 Py_XDECREF(ret);
1871 1892 return NULL;
1872 1893 }
1873 1894
1874 1895 /*
1875 1896 * Given a (possibly overlapping) set of revs, return the greatest
1876 1897 * common ancestors: those with the longest path to the root.
1877 1898 */
1878 1899 static PyObject *index_ancestors(indexObject *self, PyObject *args)
1879 1900 {
1880 1901 PyObject *ret;
1881 1902 PyObject *gca = index_commonancestorsheads(self, args);
1882 1903 if (gca == NULL)
1883 1904 return NULL;
1884 1905
1885 1906 if (PyList_GET_SIZE(gca) <= 1) {
1886 1907 return gca;
1887 1908 }
1888 1909
1889 1910 ret = find_deepest(self, gca);
1890 1911 Py_DECREF(gca);
1891 1912 return ret;
1892 1913 }
1893 1914
1894 1915 /*
1895 1916 * Invalidate any trie entries introduced by added revs.
1896 1917 */
1897 1918 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
1898 1919 {
1899 1920 Py_ssize_t i, len = PyList_GET_SIZE(self->added);
1900 1921
1901 1922 for (i = start; i < len; i++) {
1902 1923 PyObject *tuple = PyList_GET_ITEM(self->added, i);
1903 1924 PyObject *node = PyTuple_GET_ITEM(tuple, 7);
1904 1925
1905 1926 nt_delete_node(&self->nt, PyBytes_AS_STRING(node));
1906 1927 }
1907 1928
1908 1929 if (start == 0)
1909 1930 Py_CLEAR(self->added);
1910 1931 }
1911 1932
1912 1933 /*
1913 1934 * Delete a numeric range of revs, which must be at the end of the
1914 1935 * range, but exclude the sentinel nullid entry.
1915 1936 */
1916 1937 static int index_slice_del(indexObject *self, PyObject *item)
1917 1938 {
1918 1939 Py_ssize_t start, stop, step, slicelength;
1919 1940 Py_ssize_t length = index_length(self) + 1;
1920 1941 int ret = 0;
1921 1942
1922 1943 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
1923 1944 #ifdef IS_PY3K
1924 1945 if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
1925 1946 &slicelength) < 0)
1926 1947 #else
1927 1948 if (PySlice_GetIndicesEx((PySliceObject *)item, length, &start, &stop,
1928 1949 &step, &slicelength) < 0)
1929 1950 #endif
1930 1951 return -1;
1931 1952
1932 1953 if (slicelength <= 0)
1933 1954 return 0;
1934 1955
1935 1956 if ((step < 0 && start < stop) || (step > 0 && start > stop))
1936 1957 stop = start;
1937 1958
1938 1959 if (step < 0) {
1939 1960 stop = start + 1;
1940 1961 start = stop + step * (slicelength - 1) - 1;
1941 1962 step = -step;
1942 1963 }
1943 1964
1944 1965 if (step != 1) {
1945 1966 PyErr_SetString(PyExc_ValueError,
1946 1967 "revlog index delete requires step size of 1");
1947 1968 return -1;
1948 1969 }
1949 1970
1950 1971 if (stop != length - 1) {
1951 1972 PyErr_SetString(PyExc_IndexError,
1952 1973 "revlog index deletion indices are invalid");
1953 1974 return -1;
1954 1975 }
1955 1976
1956 1977 if (start < self->length) {
1957 1978 if (self->ntinitialized) {
1958 1979 Py_ssize_t i;
1959 1980
1960 1981 for (i = start + 1; i < self->length; i++) {
1961 1982 const char *node = index_node_existing(self, i);
1962 1983 if (node == NULL)
1963 1984 return -1;
1964 1985
1965 1986 nt_delete_node(&self->nt, node);
1966 1987 }
1967 1988 if (self->added)
1968 1989 index_invalidate_added(self, 0);
1969 1990 if (self->ntrev > start)
1970 1991 self->ntrev = (int)start;
1971 1992 }
1972 1993 self->length = start;
1973 1994 if (start < self->raw_length) {
1974 1995 if (self->cache) {
1975 1996 Py_ssize_t i;
1976 1997 for (i = start; i < self->raw_length; i++)
1977 1998 Py_CLEAR(self->cache[i]);
1978 1999 }
1979 2000 self->raw_length = start;
1980 2001 }
1981 2002 goto done;
1982 2003 }
1983 2004
1984 2005 if (self->ntinitialized) {
1985 2006 index_invalidate_added(self, start - self->length);
1986 2007 if (self->ntrev > start)
1987 2008 self->ntrev = (int)start;
1988 2009 }
1989 2010 if (self->added)
1990 2011 ret = PyList_SetSlice(self->added, start - self->length,
1991 2012 PyList_GET_SIZE(self->added), NULL);
1992 2013 done:
1993 2014 Py_CLEAR(self->headrevs);
1994 2015 return ret;
1995 2016 }
1996 2017
1997 2018 /*
1998 2019 * Supported ops:
1999 2020 *
2000 2021 * slice deletion
2001 2022 * string assignment (extend node->rev mapping)
2002 2023 * string deletion (shrink node->rev mapping)
2003 2024 */
2004 2025 static int index_assign_subscript(indexObject *self, PyObject *item,
2005 2026 PyObject *value)
2006 2027 {
2007 2028 char *node;
2008 2029 long rev;
2009 2030
2010 2031 if (PySlice_Check(item) && value == NULL)
2011 2032 return index_slice_del(self, item);
2012 2033
2013 2034 if (node_check(item, &node) == -1)
2014 2035 return -1;
2015 2036
2016 2037 if (value == NULL)
2017 2038 return self->ntinitialized ? nt_delete_node(&self->nt, node)
2018 2039 : 0;
2019 2040 rev = PyInt_AsLong(value);
2020 2041 if (rev > INT_MAX || rev < 0) {
2021 2042 if (!PyErr_Occurred())
2022 2043 PyErr_SetString(PyExc_ValueError, "rev out of range");
2023 2044 return -1;
2024 2045 }
2025 2046
2026 2047 if (index_init_nt(self) == -1)
2027 2048 return -1;
2028 2049 return nt_insert(&self->nt, node, (int)rev);
2029 2050 }
2030 2051
2031 2052 /*
2032 2053 * Find all RevlogNG entries in an index that has inline data. Update
2033 2054 * the optional "offsets" table with those entries.
2034 2055 */
2035 2056 static Py_ssize_t inline_scan(indexObject *self, const char **offsets)
2036 2057 {
2037 2058 const char *data = (const char *)self->buf.buf;
2038 2059 Py_ssize_t pos = 0;
2039 2060 Py_ssize_t end = self->buf.len;
2040 2061 long incr = v1_hdrsize;
2041 2062 Py_ssize_t len = 0;
2042 2063
2043 2064 while (pos + v1_hdrsize <= end && pos >= 0) {
2044 2065 uint32_t comp_len;
2045 2066 /* 3rd element of header is length of compressed inline data */
2046 2067 comp_len = getbe32(data + pos + 8);
2047 2068 incr = v1_hdrsize + comp_len;
2048 2069 if (offsets)
2049 2070 offsets[len] = data + pos;
2050 2071 len++;
2051 2072 pos += incr;
2052 2073 }
2053 2074
2054 2075 if (pos != end) {
2055 2076 if (!PyErr_Occurred())
2056 2077 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2057 2078 return -1;
2058 2079 }
2059 2080
2060 2081 return len;
2061 2082 }
2062 2083
2063 2084 static int index_init(indexObject *self, PyObject *args)
2064 2085 {
2065 2086 PyObject *data_obj, *inlined_obj;
2066 2087 Py_ssize_t size;
2067 2088
2068 2089 /* Initialize before argument-checking to avoid index_dealloc() crash.
2069 2090 */
2070 2091 self->raw_length = 0;
2071 2092 self->added = NULL;
2072 2093 self->cache = NULL;
2073 2094 self->data = NULL;
2074 2095 memset(&self->buf, 0, sizeof(self->buf));
2075 2096 self->headrevs = NULL;
2076 2097 self->filteredrevs = Py_None;
2077 2098 Py_INCREF(Py_None);
2078 2099 self->ntinitialized = 0;
2079 2100 self->offsets = NULL;
2080 2101
2081 2102 if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
2082 2103 return -1;
2083 2104 if (!PyObject_CheckBuffer(data_obj)) {
2084 2105 PyErr_SetString(PyExc_TypeError,
2085 2106 "data does not support buffer interface");
2086 2107 return -1;
2087 2108 }
2088 2109
2089 2110 if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
2090 2111 return -1;
2091 2112 size = self->buf.len;
2092 2113
2093 2114 self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
2094 2115 self->data = data_obj;
2095 2116
2096 2117 self->ntlookups = self->ntmisses = 0;
2097 2118 self->ntrev = -1;
2098 2119 Py_INCREF(self->data);
2099 2120
2100 2121 if (self->inlined) {
2101 2122 Py_ssize_t len = inline_scan(self, NULL);
2102 2123 if (len == -1)
2103 2124 goto bail;
2104 2125 self->raw_length = len;
2105 2126 self->length = len;
2106 2127 } else {
2107 2128 if (size % v1_hdrsize) {
2108 2129 PyErr_SetString(PyExc_ValueError, "corrupt index file");
2109 2130 goto bail;
2110 2131 }
2111 2132 self->raw_length = size / v1_hdrsize;
2112 2133 self->length = self->raw_length;
2113 2134 }
2114 2135
2115 2136 return 0;
2116 2137 bail:
2117 2138 return -1;
2118 2139 }
2119 2140
2120 2141 static PyObject *index_nodemap(indexObject *self)
2121 2142 {
2122 2143 Py_INCREF(self);
2123 2144 return (PyObject *)self;
2124 2145 }
2125 2146
2126 2147 static void _index_clearcaches(indexObject *self)
2127 2148 {
2128 2149 if (self->cache) {
2129 2150 Py_ssize_t i;
2130 2151
2131 2152 for (i = 0; i < self->raw_length; i++)
2132 2153 Py_CLEAR(self->cache[i]);
2133 2154 free(self->cache);
2134 2155 self->cache = NULL;
2135 2156 }
2136 2157 if (self->offsets) {
2137 2158 PyMem_Free((void *)self->offsets);
2138 2159 self->offsets = NULL;
2139 2160 }
2140 2161 if (self->ntinitialized) {
2141 2162 nt_dealloc(&self->nt);
2142 2163 }
2143 2164 self->ntinitialized = 0;
2144 2165 Py_CLEAR(self->headrevs);
2145 2166 }
2146 2167
2147 2168 static PyObject *index_clearcaches(indexObject *self)
2148 2169 {
2149 2170 _index_clearcaches(self);
2150 2171 self->ntrev = -1;
2151 2172 self->ntlookups = self->ntmisses = 0;
2152 2173 Py_RETURN_NONE;
2153 2174 }
2154 2175
2155 2176 static void index_dealloc(indexObject *self)
2156 2177 {
2157 2178 _index_clearcaches(self);
2158 2179 Py_XDECREF(self->filteredrevs);
2159 2180 if (self->buf.buf) {
2160 2181 PyBuffer_Release(&self->buf);
2161 2182 memset(&self->buf, 0, sizeof(self->buf));
2162 2183 }
2163 2184 Py_XDECREF(self->data);
2164 2185 Py_XDECREF(self->added);
2165 2186 PyObject_Del(self);
2166 2187 }
2167 2188
2168 2189 static PySequenceMethods index_sequence_methods = {
2169 2190 (lenfunc)index_length, /* sq_length */
2170 2191 0, /* sq_concat */
2171 2192 0, /* sq_repeat */
2172 2193 (ssizeargfunc)index_get, /* sq_item */
2173 2194 0, /* sq_slice */
2174 2195 0, /* sq_ass_item */
2175 2196 0, /* sq_ass_slice */
2176 2197 (objobjproc)index_contains, /* sq_contains */
2177 2198 };
2178 2199
2179 2200 static PyMappingMethods index_mapping_methods = {
2180 2201 (lenfunc)index_length, /* mp_length */
2181 2202 (binaryfunc)index_getitem, /* mp_subscript */
2182 2203 (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
2183 2204 };
2184 2205
2185 2206 static PyMethodDef index_methods[] = {
2186 2207 {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS,
2187 2208 "return the gca set of the given revs"},
2188 2209 {"commonancestorsheads", (PyCFunction)index_commonancestorsheads,
2189 2210 METH_VARARGS,
2190 2211 "return the heads of the common ancestors of the given revs"},
2191 2212 {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
2192 2213 "clear the index caches"},
2193 2214 {"get", (PyCFunction)index_m_get, METH_VARARGS, "get an index entry"},
2194 2215 {"computephasesmapsets", (PyCFunction)compute_phases_map_sets, METH_VARARGS,
2195 2216 "compute phases"},
2196 2217 {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS,
2197 2218 "reachableroots"},
2198 2219 {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS,
2199 2220 "get head revisions"}, /* Can do filtering since 3.2 */
2200 2221 {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS,
2201 2222 "get filtered head revisions"}, /* Can always do filtering */
2202 2223 {"deltachain", (PyCFunction)index_deltachain, METH_VARARGS,
2203 2224 "determine revisions with deltas to reconstruct fulltext"},
2204 2225 {"append", (PyCFunction)index_append, METH_O, "append an index entry"},
2205 2226 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2206 2227 "match a potentially ambiguous node ID"},
2207 2228 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2208 2229 "find length of shortest hex nodeid of a binary ID"},
2209 2230 {"stats", (PyCFunction)index_stats, METH_NOARGS, "stats for the index"},
2210 2231 {NULL} /* Sentinel */
2211 2232 };
2212 2233
2213 2234 static PyGetSetDef index_getset[] = {
2214 2235 {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
2215 2236 {NULL} /* Sentinel */
2216 2237 };
2217 2238
2218 2239 static PyTypeObject indexType = {
2219 2240 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2220 2241 "parsers.index", /* tp_name */
2221 2242 sizeof(indexObject), /* tp_basicsize */
2222 2243 0, /* tp_itemsize */
2223 2244 (destructor)index_dealloc, /* tp_dealloc */
2224 2245 0, /* tp_print */
2225 2246 0, /* tp_getattr */
2226 2247 0, /* tp_setattr */
2227 2248 0, /* tp_compare */
2228 2249 0, /* tp_repr */
2229 2250 0, /* tp_as_number */
2230 2251 &index_sequence_methods, /* tp_as_sequence */
2231 2252 &index_mapping_methods, /* tp_as_mapping */
2232 2253 0, /* tp_hash */
2233 2254 0, /* tp_call */
2234 2255 0, /* tp_str */
2235 2256 0, /* tp_getattro */
2236 2257 0, /* tp_setattro */
2237 2258 0, /* tp_as_buffer */
2238 2259 Py_TPFLAGS_DEFAULT, /* tp_flags */
2239 2260 "revlog index", /* tp_doc */
2240 2261 0, /* tp_traverse */
2241 2262 0, /* tp_clear */
2242 2263 0, /* tp_richcompare */
2243 2264 0, /* tp_weaklistoffset */
2244 2265 0, /* tp_iter */
2245 2266 0, /* tp_iternext */
2246 2267 index_methods, /* tp_methods */
2247 2268 0, /* tp_members */
2248 2269 index_getset, /* tp_getset */
2249 2270 0, /* tp_base */
2250 2271 0, /* tp_dict */
2251 2272 0, /* tp_descr_get */
2252 2273 0, /* tp_descr_set */
2253 2274 0, /* tp_dictoffset */
2254 2275 (initproc)index_init, /* tp_init */
2255 2276 0, /* tp_alloc */
2256 2277 };
2257 2278
2258 2279 /*
2259 2280 * returns a tuple of the form (index, index, cache) with elements as
2260 2281 * follows:
2261 2282 *
2262 2283 * index: an index object that lazily parses RevlogNG records
2263 2284 * cache: if data is inlined, a tuple (0, index_file_content), else None
2264 2285 * index_file_content could be a string, or a buffer
2265 2286 *
2266 2287 * added complications are for backwards compatibility
2267 2288 */
2268 2289 PyObject *parse_index2(PyObject *self, PyObject *args)
2269 2290 {
2270 2291 PyObject *tuple = NULL, *cache = NULL;
2271 2292 indexObject *idx;
2272 2293 int ret;
2273 2294
2274 2295 idx = PyObject_New(indexObject, &indexType);
2275 2296 if (idx == NULL)
2276 2297 goto bail;
2277 2298
2278 2299 ret = index_init(idx, args);
2279 2300 if (ret == -1)
2280 2301 goto bail;
2281 2302
2282 2303 if (idx->inlined) {
2283 2304 cache = Py_BuildValue("iO", 0, idx->data);
2284 2305 if (cache == NULL)
2285 2306 goto bail;
2286 2307 } else {
2287 2308 cache = Py_None;
2288 2309 Py_INCREF(cache);
2289 2310 }
2290 2311
2291 2312 tuple = Py_BuildValue("NN", idx, cache);
2292 2313 if (!tuple)
2293 2314 goto bail;
2294 2315 return tuple;
2295 2316
2296 2317 bail:
2297 2318 Py_XDECREF(idx);
2298 2319 Py_XDECREF(cache);
2299 2320 Py_XDECREF(tuple);
2300 2321 return NULL;
2301 2322 }
2302 2323
2303 2324 #ifdef WITH_RUST
2304 2325
2305 2326 /* rustlazyancestors: iteration over ancestors implemented in Rust
2306 2327 *
2307 2328 * This class holds a reference to an index and to the Rust iterator.
2308 2329 */
2309 2330 typedef struct rustlazyancestorsObjectStruct rustlazyancestorsObject;
2310 2331
2311 2332 struct rustlazyancestorsObjectStruct {
2312 2333 PyObject_HEAD
2313 2334 /* Type-specific fields go here. */
2314 2335 indexObject *index; /* Ref kept to avoid GC'ing the index */
2315 2336 void *iter; /* Rust iterator */
2316 2337 };
2317 2338
2318 2339 /* FFI exposed from Rust code */
2319 2340 rustlazyancestorsObject *
2320 2341 rustlazyancestors_init(indexObject *index,
2321 2342 /* to pass index_get_parents() */
2322 2343 int (*)(indexObject *, Py_ssize_t, int *, int),
2323 2344 /* intrevs vector */
2324 2345 Py_ssize_t initrevslen, long *initrevs, long stoprev,
2325 2346 int inclusive);
2326 2347 void rustlazyancestors_drop(rustlazyancestorsObject *self);
2327 2348 int rustlazyancestors_next(rustlazyancestorsObject *self);
2328 2349 int rustlazyancestors_contains(rustlazyancestorsObject *self, long rev);
2329 2350
2330 2351 /* CPython instance methods */
2331 2352 static int rustla_init(rustlazyancestorsObject *self, PyObject *args)
2332 2353 {
2333 2354 PyObject *initrevsarg = NULL;
2334 2355 PyObject *inclusivearg = NULL;
2335 2356 long stoprev = 0;
2336 2357 long *initrevs = NULL;
2337 2358 int inclusive = 0;
2338 2359 Py_ssize_t i;
2339 2360
2340 2361 indexObject *index;
2341 2362 if (!PyArg_ParseTuple(args, "O!O!lO!", &indexType, &index, &PyList_Type,
2342 2363 &initrevsarg, &stoprev, &PyBool_Type,
2343 2364 &inclusivearg))
2344 2365 return -1;
2345 2366
2346 2367 Py_INCREF(index);
2347 2368 self->index = index;
2348 2369
2349 2370 if (inclusivearg == Py_True)
2350 2371 inclusive = 1;
2351 2372
2352 2373 Py_ssize_t linit = PyList_GET_SIZE(initrevsarg);
2353 2374
2354 2375 initrevs = (long *)calloc(linit, sizeof(long));
2355 2376
2356 2377 if (initrevs == NULL) {
2357 2378 PyErr_NoMemory();
2358 2379 goto bail;
2359 2380 }
2360 2381
2361 2382 for (i = 0; i < linit; i++) {
2362 2383 initrevs[i] = PyInt_AsLong(PyList_GET_ITEM(initrevsarg, i));
2363 2384 }
2364 2385 if (PyErr_Occurred())
2365 2386 goto bail;
2366 2387
2367 2388 self->iter = rustlazyancestors_init(index, index_get_parents, linit,
2368 2389 initrevs, stoprev, inclusive);
2369 2390 if (self->iter == NULL) {
2370 2391 /* if this is because of GraphError::ParentOutOfRange
2371 2392 * index_get_parents() has already set the proper ValueError */
2372 2393 goto bail;
2373 2394 }
2374 2395
2375 2396 free(initrevs);
2376 2397 return 0;
2377 2398
2378 2399 bail:
2379 2400 free(initrevs);
2380 2401 return -1;
2381 2402 };
2382 2403
2383 2404 static void rustla_dealloc(rustlazyancestorsObject *self)
2384 2405 {
2385 2406 Py_XDECREF(self->index);
2386 2407 if (self->iter != NULL) { /* can happen if rustla_init failed */
2387 2408 rustlazyancestors_drop(self->iter);
2388 2409 }
2389 2410 PyObject_Del(self);
2390 2411 }
2391 2412
2392 2413 static PyObject *rustla_next(rustlazyancestorsObject *self)
2393 2414 {
2394 2415 int res = rustlazyancestors_next(self->iter);
2395 2416 if (res == -1) {
2396 2417 /* Setting an explicit exception seems unnecessary
2397 2418 * as examples from Python source code (Objects/rangeobjets.c
2398 2419 * and Modules/_io/stringio.c) seem to demonstrate.
2399 2420 */
2400 2421 return NULL;
2401 2422 }
2402 2423 return PyInt_FromLong(res);
2403 2424 }
2404 2425
2405 2426 static int rustla_contains(rustlazyancestorsObject *self, PyObject *rev)
2406 2427 {
2407 if (!(PyInt_Check(rev))) {
2428 long lrev;
2429 if (!pylong_to_long(rev, &lrev)) {
2430 PyErr_Clear();
2408 2431 return 0;
2409 2432 }
2410 return rustlazyancestors_contains(self->iter, PyInt_AS_LONG(rev));
2433 return rustlazyancestors_contains(self->iter, lrev);
2411 2434 }
2412 2435
2413 2436 static PySequenceMethods rustla_sequence_methods = {
2414 2437 0, /* sq_length */
2415 2438 0, /* sq_concat */
2416 2439 0, /* sq_repeat */
2417 2440 0, /* sq_item */
2418 2441 0, /* sq_slice */
2419 2442 0, /* sq_ass_item */
2420 2443 0, /* sq_ass_slice */
2421 2444 (objobjproc)rustla_contains, /* sq_contains */
2422 2445 };
2423 2446
2424 2447 static PyTypeObject rustlazyancestorsType = {
2425 2448 PyVarObject_HEAD_INIT(NULL, 0) /* header */
2426 2449 "parsers.rustlazyancestors", /* tp_name */
2427 2450 sizeof(rustlazyancestorsObject), /* tp_basicsize */
2428 2451 0, /* tp_itemsize */
2429 2452 (destructor)rustla_dealloc, /* tp_dealloc */
2430 2453 0, /* tp_print */
2431 2454 0, /* tp_getattr */
2432 2455 0, /* tp_setattr */
2433 2456 0, /* tp_compare */
2434 2457 0, /* tp_repr */
2435 2458 0, /* tp_as_number */
2436 2459 &rustla_sequence_methods, /* tp_as_sequence */
2437 2460 0, /* tp_as_mapping */
2438 2461 0, /* tp_hash */
2439 2462 0, /* tp_call */
2440 2463 0, /* tp_str */
2441 2464 0, /* tp_getattro */
2442 2465 0, /* tp_setattro */
2443 2466 0, /* tp_as_buffer */
2444 2467 Py_TPFLAGS_DEFAULT, /* tp_flags */
2445 2468 "Iterator over ancestors, implemented in Rust", /* tp_doc */
2446 2469 0, /* tp_traverse */
2447 2470 0, /* tp_clear */
2448 2471 0, /* tp_richcompare */
2449 2472 0, /* tp_weaklistoffset */
2450 2473 0, /* tp_iter */
2451 2474 (iternextfunc)rustla_next, /* tp_iternext */
2452 2475 0, /* tp_methods */
2453 2476 0, /* tp_members */
2454 2477 0, /* tp_getset */
2455 2478 0, /* tp_base */
2456 2479 0, /* tp_dict */
2457 2480 0, /* tp_descr_get */
2458 2481 0, /* tp_descr_set */
2459 2482 0, /* tp_dictoffset */
2460 2483 (initproc)rustla_init, /* tp_init */
2461 2484 0, /* tp_alloc */
2462 2485 };
2463 2486 #endif /* WITH_RUST */
2464 2487
2465 2488 void revlog_module_init(PyObject *mod)
2466 2489 {
2467 2490 indexType.tp_new = PyType_GenericNew;
2468 2491 if (PyType_Ready(&indexType) < 0)
2469 2492 return;
2470 2493 Py_INCREF(&indexType);
2471 2494 PyModule_AddObject(mod, "index", (PyObject *)&indexType);
2472 2495
2473 2496 nodetreeType.tp_new = PyType_GenericNew;
2474 2497 if (PyType_Ready(&nodetreeType) < 0)
2475 2498 return;
2476 2499 Py_INCREF(&nodetreeType);
2477 2500 PyModule_AddObject(mod, "nodetree", (PyObject *)&nodetreeType);
2478 2501
2479 2502 if (!nullentry) {
2480 2503 nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0,
2481 2504 0, -1, -1, -1, -1, nullid, 20);
2482 2505 }
2483 2506 if (nullentry)
2484 2507 PyObject_GC_UnTrack(nullentry);
2485 2508
2486 2509 #ifdef WITH_RUST
2487 2510 rustlazyancestorsType.tp_new = PyType_GenericNew;
2488 2511 if (PyType_Ready(&rustlazyancestorsType) < 0)
2489 2512 return;
2490 2513 Py_INCREF(&rustlazyancestorsType);
2491 2514 PyModule_AddObject(mod, "rustlazyancestors",
2492 2515 (PyObject *)&rustlazyancestorsType);
2493 2516 #endif
2494 2517 }
@@ -1,61 +1,74
1 1 /*
2 2 util.h - utility functions for interfacing with the various python APIs.
3 3
4 4 This software may be used and distributed according to the terms of
5 5 the GNU General Public License, incorporated herein by reference.
6 6 */
7 7
8 8 #ifndef _HG_UTIL_H_
9 9 #define _HG_UTIL_H_
10 10
11 11 #include "compat.h"
12 12
13 13 #if PY_MAJOR_VERSION >= 3
14 14 #define IS_PY3K
15 15 #endif
16 16
17 17 /* helper to switch things like string literal depending on Python version */
18 18 #ifdef IS_PY3K
19 19 #define PY23(py2, py3) py3
20 20 #else
21 21 #define PY23(py2, py3) py2
22 22 #endif
23 23
24 24 /* clang-format off */
25 25 typedef struct {
26 26 PyObject_HEAD
27 27 char state;
28 28 int mode;
29 29 int size;
30 30 int mtime;
31 31 } dirstateTupleObject;
32 32 /* clang-format on */
33 33
34 34 extern PyTypeObject dirstateTupleType;
35 35 #define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType)
36 36
37 37 #ifndef MIN
38 38 #define MIN(a, b) (((a) < (b)) ? (a) : (b))
39 39 #endif
40 40 /* VC9 doesn't include bool and lacks stdbool.h based on my searching */
41 41 #if defined(_MSC_VER) || __STDC_VERSION__ < 199901L
42 42 #define true 1
43 43 #define false 0
44 44 typedef unsigned char bool;
45 45 #else
46 46 #include <stdbool.h>
47 47 #endif
48 48
49 49 static inline PyObject *_dict_new_presized(Py_ssize_t expected_size)
50 50 {
51 51 /* _PyDict_NewPresized expects a minused parameter, but it actually
52 52 creates a dictionary that's the nearest power of two bigger than the
53 53 parameter. For example, with the initial minused = 1000, the
54 54 dictionary created has size 1024. Of course in a lot of cases that
55 55 can be greater than the maximum load factor Python's dict object
56 56 expects (= 2/3), so as soon as we cross the threshold we'll resize
57 57 anyway. So create a dictionary that's at least 3/2 the size. */
58 58 return _PyDict_NewPresized(((1 + expected_size) / 2) * 3);
59 59 }
60 60
61 /* Convert a PyInt or PyLong to a long. Returns false if there is an
62 error, in which case an exception will already have been set. */
63 static inline bool pylong_to_long(PyObject *pylong, long *out)
64 {
65 *out = PyLong_AsLong(pylong);
66 /* Fast path to avoid hitting PyErr_Occurred if the value was obviously
67 * not an error. */
68 if (*out != -1) {
69 return true;
70 }
71 return PyErr_Occurred() == NULL;
72 }
73
61 74 #endif /* _HG_UTIL_H_ */
General Comments 0
You need to be logged in to leave comments. Login now