Show More
@@ -89,7 +89,7 b'' | |||||
89 | */ |
|
89 | */ | |
90 | #ifndef HG_SHELL |
|
90 | #ifndef HG_SHELL | |
91 | #define HG_SHELL NULL |
|
91 | #define HG_SHELL NULL | |
92 |
/ |
|
92 | /* #define HG_SHELL "/bin/bash" */ | |
93 | #endif |
|
93 | #endif | |
94 |
|
94 | |||
95 | /* |
|
95 | /* | |
@@ -118,40 +118,40 b' static const int debug = 0;' | |||||
118 |
|
118 | |||
119 | static void print_cmdline(int argc, char **argv) |
|
119 | static void print_cmdline(int argc, char **argv) | |
120 | { |
|
120 | { | |
121 |
|
|
121 | FILE *fp = stderr; | |
122 |
|
|
122 | int i; | |
123 |
|
123 | |||
124 |
|
|
124 | fputs("command: ", fp); | |
125 |
|
125 | |||
126 |
|
|
126 | for (i = 0; i < argc; i++) { | |
127 |
|
|
127 | char *spc = strpbrk(argv[i], " \t\r\n"); | |
128 |
|
|
128 | if (spc) { | |
129 |
|
|
129 | fputc('\'', fp); | |
130 | } |
|
130 | } | |
131 |
|
|
131 | fputs(argv[i], fp); | |
132 |
|
|
132 | if (spc) { | |
133 |
|
|
133 | fputc('\'', fp); | |
134 | } |
|
134 | } | |
135 |
|
|
135 | if (i < argc - 1) { | |
136 |
|
|
136 | fputc(' ', fp); | |
137 | } |
|
137 | } | |
138 | } |
|
138 | } | |
139 |
|
|
139 | fputc('\n', fp); | |
140 |
|
|
140 | fflush(fp); | |
141 | } |
|
141 | } | |
142 |
|
142 | |||
143 | static void usage(const char *reason, int exitcode) |
|
143 | static void usage(const char *reason, int exitcode) | |
144 | { |
|
144 | { | |
145 |
|
|
145 | char *hg_help = HG_HELP; | |
146 |
|
146 | |||
147 |
|
|
147 | if (reason) { | |
148 |
|
|
148 | fprintf(stderr, "*** Error: %s.\n", reason); | |
149 | } |
|
149 | } | |
150 |
|
|
150 | fprintf(stderr, "*** This program has been invoked incorrectly.\n"); | |
151 |
|
|
151 | if (hg_help) { | |
152 |
|
|
152 | fprintf(stderr, "*** %s\n", hg_help); | |
153 | } |
|
153 | } | |
154 |
|
|
154 | exit(exitcode ? exitcode : EX_USAGE); | |
155 | } |
|
155 | } | |
156 |
|
156 | |||
157 | /* |
|
157 | /* | |
@@ -162,43 +162,43 b' static void usage(const char *reason, in' | |||||
162 | */ |
|
162 | */ | |
163 | static void forward_through_gateway(int argc, char **argv) |
|
163 | static void forward_through_gateway(int argc, char **argv) | |
164 | { |
|
164 | { | |
165 |
|
|
165 | char *ssh = SSH; | |
166 |
|
|
166 | char *hg_host = HG_HOST; | |
167 |
|
|
167 | char *hg_user = HG_USER; | |
168 |
|
|
168 | char **nargv = alloca((10 + argc) * sizeof(char *)); | |
169 |
|
|
169 | int i = 0, j; | |
170 |
|
170 | |||
171 |
|
|
171 | nargv[i++] = ssh; | |
172 |
|
|
172 | nargv[i++] = "-q"; | |
173 |
|
|
173 | nargv[i++] = "-T"; | |
174 |
|
|
174 | nargv[i++] = "-x"; | |
175 |
|
|
175 | if (hg_user) { | |
176 |
|
|
176 | nargv[i++] = "-l"; | |
177 |
|
|
177 | nargv[i++] = hg_user; | |
178 | } |
|
178 | } | |
179 |
|
|
179 | nargv[i++] = hg_host; | |
180 |
|
180 | |||
181 | /* |
|
181 | /* | |
182 |
|
|
182 | * sshd called us with added "-c", because it thinks we are a shell. | |
183 |
|
|
183 | * drop it if we find it. | |
184 | */ |
|
184 | */ | |
185 |
|
|
185 | j = 1; | |
186 |
|
|
186 | if (j < argc && strcmp(argv[j], "-c") == 0) { | |
187 | j++; |
|
187 | j++; | |
188 | } |
|
188 | } | |
189 |
|
189 | |||
190 |
|
|
190 | for (; j < argc; i++, j++) { | |
191 |
|
|
191 | nargv[i] = argv[j]; | |
192 | } |
|
192 | } | |
193 |
|
|
193 | nargv[i] = NULL; | |
194 |
|
194 | |||
195 |
|
|
195 | if (debug) { | |
196 |
|
|
196 | print_cmdline(i, nargv); | |
197 | } |
|
197 | } | |
198 |
|
198 | |||
199 |
|
|
199 | execv(ssh, nargv); | |
200 |
|
|
200 | perror(ssh); | |
201 |
|
|
201 | exit(EX_UNAVAILABLE); | |
202 | } |
|
202 | } | |
203 |
|
203 | |||
204 | /* |
|
204 | /* | |
@@ -209,44 +209,44 b' static void forward_through_gateway(int ' | |||||
209 | */ |
|
209 | */ | |
210 | static void run_shell(int argc, char **argv) |
|
210 | static void run_shell(int argc, char **argv) | |
211 | { |
|
211 | { | |
212 |
|
|
212 | char *hg_shell = HG_SHELL; | |
213 |
|
|
213 | char **nargv; | |
214 |
|
|
214 | char *c; | |
215 |
|
|
215 | int i; | |
216 |
|
216 | |||
217 |
|
|
217 | nargv = alloca((argc + 3) * sizeof(char *)); | |
218 |
|
|
218 | c = strrchr(hg_shell, '/'); | |
219 |
|
219 | |||
220 |
|
|
220 | /* tell "real" shell it is login shell, if needed. */ | |
221 |
|
221 | |||
222 |
|
|
222 | if (argv[0][0] == '-' && c) { | |
223 |
|
|
223 | nargv[0] = strdup(c); | |
224 |
|
|
224 | if (nargv[0] == NULL) { | |
225 |
|
|
225 | perror("malloc"); | |
226 |
|
|
226 | exit(EX_OSERR); | |
227 | } |
|
227 | } | |
228 |
|
|
228 | nargv[0][0] = '-'; | |
229 |
|
|
229 | } else { | |
230 |
|
|
230 | nargv[0] = hg_shell; | |
231 | } |
|
231 | } | |
232 |
|
232 | |||
233 |
|
|
233 | for (i = 1; i < argc; i++) { | |
234 |
|
|
234 | nargv[i] = argv[i]; | |
235 | } |
|
235 | } | |
236 |
|
|
236 | nargv[i] = NULL; | |
237 |
|
237 | |||
238 |
|
|
238 | if (debug) { | |
239 |
|
|
239 | print_cmdline(i, nargv); | |
240 | } |
|
240 | } | |
241 |
|
241 | |||
242 |
|
|
242 | execv(hg_shell, nargv); | |
243 |
|
|
243 | perror(hg_shell); | |
244 |
|
|
244 | exit(EX_OSFILE); | |
245 | } |
|
245 | } | |
246 |
|
246 | |||
247 | enum cmdline { |
|
247 | enum cmdline { | |
248 |
|
|
248 | hg_init, | |
249 |
|
|
249 | hg_serve, | |
250 | }; |
|
250 | }; | |
251 |
|
251 | |||
252 |
|
252 | |||
@@ -256,25 +256,25 b' enum cmdline {' | |||||
256 | */ |
|
256 | */ | |
257 | static int validate_repo(const char *repo_root, const char *subdir) |
|
257 | static int validate_repo(const char *repo_root, const char *subdir) | |
258 | { |
|
258 | { | |
259 |
|
|
259 | char *abs_path; | |
260 |
|
|
260 | struct stat st; | |
261 |
|
|
261 | int ret; | |
262 |
|
262 | |||
263 |
|
|
263 | if (asprintf(&abs_path, "%s.hg/%s", repo_root, subdir) == -1) { | |
264 | ret = -1; |
|
264 | ret = -1; | |
265 | goto bail; |
|
265 | goto bail; | |
266 | } |
|
266 | } | |
267 |
|
267 | |||
268 |
|
|
268 | /* verify that we really are looking at valid repo. */ | |
269 |
|
269 | |||
270 |
|
|
270 | if (stat(abs_path, &st) == -1) { | |
271 | ret = 0; |
|
271 | ret = 0; | |
272 |
|
|
272 | } else { | |
273 | ret = 1; |
|
273 | ret = 1; | |
274 | } |
|
274 | } | |
275 |
|
275 | |||
276 | bail: |
|
276 | bail: | |
277 |
|
|
277 | return ret; | |
278 | } |
|
278 | } | |
279 |
|
279 | |||
280 | /* |
|
280 | /* | |
@@ -282,158 +282,159 b' bail:' | |||||
282 | */ |
|
282 | */ | |
283 | static void serve_data(int argc, char **argv) |
|
283 | static void serve_data(int argc, char **argv) | |
284 | { |
|
284 | { | |
285 |
|
|
285 | char *hg_root = HG_ROOT; | |
286 |
|
|
286 | char *repo, *repo_root; | |
287 |
|
|
287 | enum cmdline cmd; | |
288 |
|
|
288 | char *nargv[6]; | |
289 |
|
|
289 | size_t repolen; | |
290 |
|
|
290 | int i; | |
291 |
|
||||
292 | /* |
|
|||
293 | * check argv for looking okay. we should be invoked with argv |
|
|||
294 | * resembling like this: |
|
|||
295 | * |
|
|||
296 | * hgsh |
|
|||
297 | * -c |
|
|||
298 | * hg -R some/path serve --stdio |
|
|||
299 | * |
|
|||
300 | * the "-c" is added by sshd, because it thinks we are login shell. |
|
|||
301 | */ |
|
|||
302 |
|
291 | |||
303 | if (argc != 3) { |
|
292 | /* | |
304 | goto badargs; |
|
293 | * check argv for looking okay. we should be invoked with argv | |
305 | } |
|
294 | * resembling like this: | |
306 |
|
295 | * | ||
307 | if (strcmp(argv[1], "-c") != 0) { |
|
296 | * hgsh | |
308 | goto badargs; |
|
297 | * -c | |
309 | } |
|
298 | * hg -R some/path serve --stdio | |
310 |
|
299 | * | ||
311 | if (sscanf(argv[2], "hg init %as", &repo) == 1) { |
|
300 | * the "-c" is added by sshd, because it thinks we are login shell. | |
312 | cmd = hg_init; |
|
301 | */ | |
313 | } |
|
|||
314 | else if (sscanf(argv[2], "hg -R %as serve --stdio", &repo) == 1) { |
|
|||
315 | cmd = hg_serve; |
|
|||
316 | } else { |
|
|||
317 | goto badargs; |
|
|||
318 | } |
|
|||
319 |
|
||||
320 | repolen = repo ? strlen(repo) : 0; |
|
|||
321 |
|
302 | |||
322 | if (repolen == 0) { |
|
303 | if (argc != 3) { | |
323 |
|
|
304 | goto badargs; | |
324 | } |
|
305 | } | |
325 |
|
||||
326 | if (hg_root) { |
|
|||
327 | if (asprintf(&repo_root, "%s/%s/", hg_root, repo) == -1) { |
|
|||
328 | goto badargs; |
|
|||
329 | } |
|
|||
330 |
|
306 | |||
331 | /* |
|
307 | if (strcmp(argv[1], "-c") != 0) { | |
332 | * attempt to stop break out from inside the repository tree. could |
|
308 | goto badargs; | |
333 | * do something more clever here, because e.g. we could traverse a |
|
309 | } | |
334 | * symlink that looks safe, but really breaks us out of tree. |
|
|||
335 | */ |
|
|||
336 |
|
||||
337 | if (strstr(repo_root, "/../") != NULL) { |
|
|||
338 | goto badargs; |
|
|||
339 | } |
|
|||
340 |
|
||||
341 | /* only hg init expects no repo. */ |
|
|||
342 |
|
310 | |||
343 | if (cmd != hg_init) { |
|
311 | if (sscanf(argv[2], "hg init %as", &repo) == 1) { | |
344 | int valid; |
|
312 | cmd = hg_init; | |
345 |
|
313 | } | ||
346 | valid = validate_repo(repo_root, "data"); |
|
314 | else if (sscanf(argv[2], "hg -R %as serve --stdio", &repo) == 1) { | |
347 |
|
315 | cmd = hg_serve; | ||
348 | if (valid == -1) { |
|
316 | } else { | |
349 | goto badargs; |
|
317 | goto badargs; | |
350 | } |
|
318 | } | |
351 |
|
319 | |||
352 | if (valid == 0) { |
|
320 | repolen = repo ? strlen(repo) : 0; | |
353 | valid = validate_repo(repo_root, "store"); |
|
|||
354 |
|
321 | |||
355 |
|
|
322 | if (repolen == 0) { | |
356 |
|
|
323 | goto badargs; | |
357 | } |
|
|||
358 | } |
|
|||
359 |
|
||||
360 | if (valid == 0) { |
|
|||
361 | perror(repo); |
|
|||
362 | exit(EX_DATAERR); |
|
|||
363 | } |
|
|||
364 | } |
|
324 | } | |
365 |
|
325 | |||
366 | if (chdir(hg_root) == -1) { |
|
326 | if (hg_root) { | |
367 | perror(hg_root); |
|
327 | if (asprintf(&repo_root, "%s/%s/", hg_root, repo) == -1) { | |
368 | exit(EX_SOFTWARE); |
|
328 | goto badargs; | |
369 | } |
|
329 | } | |
370 | } |
|
330 | ||
|
331 | /* | |||
|
332 | * attempt to stop break out from inside the | |||
|
333 | * repository tree. could do something more clever | |||
|
334 | * here, because e.g. we could traverse a symlink that | |||
|
335 | * looks safe, but really breaks us out of tree. | |||
|
336 | */ | |||
|
337 | ||||
|
338 | if (strstr(repo_root, "/../") != NULL) { | |||
|
339 | goto badargs; | |||
|
340 | } | |||
371 |
|
341 | |||
372 | i = 0; |
|
342 | /* only hg init expects no repo. */ | |
|
343 | ||||
|
344 | if (cmd != hg_init) { | |||
|
345 | int valid; | |||
|
346 | ||||
|
347 | valid = validate_repo(repo_root, "data"); | |||
|
348 | ||||
|
349 | if (valid == -1) { | |||
|
350 | goto badargs; | |||
|
351 | } | |||
|
352 | ||||
|
353 | if (valid == 0) { | |||
|
354 | valid = validate_repo(repo_root, "store"); | |||
|
355 | ||||
|
356 | if (valid == -1) { | |||
|
357 | goto badargs; | |||
|
358 | } | |||
|
359 | } | |||
373 |
|
360 | |||
374 | switch (cmd) { |
|
361 | if (valid == 0) { | |
375 | case hg_serve: |
|
362 | perror(repo); | |
376 | nargv[i++] = HG; |
|
363 | exit(EX_DATAERR); | |
377 | nargv[i++] = "-R"; |
|
364 | } | |
378 | nargv[i++] = repo; |
|
365 | } | |
379 | nargv[i++] = "serve"; |
|
366 | ||
380 | nargv[i++] = "--stdio"; |
|
367 | if (chdir(hg_root) == -1) { | |
381 | break; |
|
368 | perror(hg_root); | |
382 | case hg_init: |
|
369 | exit(EX_SOFTWARE); | |
383 | nargv[i++] = HG; |
|
370 | } | |
384 | nargv[i++] = "init"; |
|
371 | } | |
385 | nargv[i++] = repo; |
|
372 | ||
386 | break; |
|
373 | i = 0; | |
387 | } |
|
|||
388 |
|
374 | |||
389 | nargv[i] = NULL; |
|
375 | switch (cmd) { | |
|
376 | case hg_serve: | |||
|
377 | nargv[i++] = HG; | |||
|
378 | nargv[i++] = "-R"; | |||
|
379 | nargv[i++] = repo; | |||
|
380 | nargv[i++] = "serve"; | |||
|
381 | nargv[i++] = "--stdio"; | |||
|
382 | break; | |||
|
383 | case hg_init: | |||
|
384 | nargv[i++] = HG; | |||
|
385 | nargv[i++] = "init"; | |||
|
386 | nargv[i++] = repo; | |||
|
387 | break; | |||
|
388 | } | |||
390 |
|
389 | |||
391 | if (debug) { |
|
390 | nargv[i] = NULL; | |
392 | print_cmdline(i, nargv); |
|
|||
393 | } |
|
|||
394 |
|
391 | |||
395 | execv(HG, nargv); |
|
392 | if (debug) { | |
396 | perror(HG); |
|
393 | print_cmdline(i, nargv); | |
397 | exit(EX_UNAVAILABLE); |
|
394 | } | |
|
395 | ||||
|
396 | execv(HG, nargv); | |||
|
397 | perror(HG); | |||
|
398 | exit(EX_UNAVAILABLE); | |||
398 |
|
399 | |||
399 | badargs: |
|
400 | badargs: | |
400 |
|
|
401 | /* print useless error message. */ | |
401 |
|
402 | |||
402 |
|
|
403 | usage("invalid arguments", EX_DATAERR); | |
403 | } |
|
404 | } | |
404 |
|
405 | |||
405 | int main(int argc, char **argv) |
|
406 | int main(int argc, char **argv) | |
406 | { |
|
407 | { | |
407 |
|
|
408 | char host[1024]; | |
408 |
|
|
409 | char *c; | |
409 |
|
410 | |||
410 |
|
|
411 | if (gethostname(host, sizeof(host)) == -1) { | |
411 |
|
|
412 | perror("gethostname"); | |
412 |
|
|
413 | exit(EX_OSERR); | |
413 | } |
|
414 | } | |
414 |
|
415 | |||
415 |
|
|
416 | if ((c = strchr(host, '.')) != NULL) { | |
416 |
|
|
417 | *c = '\0'; | |
417 | } |
|
418 | } | |
418 |
|
419 | |||
419 |
|
|
420 | if (getenv("SSH_CLIENT")) { | |
420 |
|
|
421 | char *hg_gateway = HG_GATEWAY; | |
421 |
|
|
422 | char *hg_host = HG_HOST; | |
422 |
|
423 | |||
423 |
|
|
424 | if (hg_gateway && strcmp(host, hg_gateway) == 0) { | |
424 |
|
|
425 | forward_through_gateway(argc, argv); | |
425 | } |
|
426 | } | |
426 |
|
427 | |||
427 |
|
|
428 | if (hg_host && strcmp(host, hg_host) != 0) { | |
428 |
|
|
429 | usage("invoked on unexpected host", EX_USAGE); | |
429 | } |
|
430 | } | |
430 |
|
431 | |||
431 |
|
|
432 | serve_data(argc, argv); | |
432 |
|
|
433 | } else if (HG_SHELL) { | |
433 |
|
|
434 | run_shell(argc, argv); | |
434 |
|
|
435 | } else { | |
435 |
|
|
436 | usage("invalid arguments", EX_DATAERR); | |
436 | } |
|
437 | } | |
437 |
|
438 | |||
438 |
|
|
439 | return 0; | |
439 | } |
|
440 | } |
@@ -29,7 +29,7 b' def memusage(ui):' | |||||
29 | finally: |
|
29 | finally: | |
30 | if status is not None: |
|
30 | if status is not None: | |
31 | status.close() |
|
31 | status.close() | |
32 | ui.write_err(", ".join(["%s: %.1f MiB" % (key, value/1024.0) |
|
32 | ui.write_err(", ".join(["%s: %.1f MiB" % (key, value / 1024.0) | |
33 | for key, value in result.iteritems()]) + "\n") |
|
33 | for key, value in result.iteritems()]) + "\n") | |
34 |
|
34 | |||
35 | def extsetup(ui): |
|
35 | def extsetup(ui): |
@@ -36,7 +36,7 b' def perfwalk(ui, repo, *pats):' | |||||
36 | except: |
|
36 | except: | |
37 | try: |
|
37 | try: | |
38 | m = cmdutil.match(repo, pats, {}) |
|
38 | m = cmdutil.match(repo, pats, {}) | |
39 | timer(lambda: len([b for a,b,c in repo.dirstate.statwalk([], m)])) |
|
39 | timer(lambda: len([b for a, b, c in repo.dirstate.statwalk([], m)])) | |
40 | except: |
|
40 | except: | |
41 | timer(lambda: len(list(cmdutil.walk(repo, pats, {})))) |
|
41 | timer(lambda: len(list(cmdutil.walk(repo, pats, {})))) | |
42 |
|
42 |
@@ -101,9 +101,9 b' def report(ui, olddatafn, newdatafn):' | |||||
101 | # argh: have to pass an int to %d, because a float >= 2^32 |
|
101 | # argh: have to pass an int to %d, because a float >= 2^32 | |
102 | # blows up under Python 2.5 or earlier |
|
102 | # blows up under Python 2.5 or earlier | |
103 | ui.write('old file size: %12d bytes (%6.1f MiB)\n' |
|
103 | ui.write('old file size: %12d bytes (%6.1f MiB)\n' | |
104 | % (int(oldsize), oldsize/1024/1024)) |
|
104 | % (int(oldsize), oldsize / 1024 / 1024)) | |
105 | ui.write('new file size: %12d bytes (%6.1f MiB)\n' |
|
105 | ui.write('new file size: %12d bytes (%6.1f MiB)\n' | |
106 | % (int(newsize), newsize/1024/1024)) |
|
106 | % (int(newsize), newsize / 1024 / 1024)) | |
107 |
|
107 | |||
108 | shrink_percent = (oldsize - newsize) / oldsize * 100 |
|
108 | shrink_percent = (oldsize - newsize) / oldsize * 100 | |
109 | shrink_factor = oldsize / newsize |
|
109 | shrink_factor = oldsize / newsize | |
@@ -219,4 +219,4 b' cmdtable = {' | |||||
219 | } |
|
219 | } | |
220 |
|
220 | |||
221 | if __name__ == "__main__": |
|
221 | if __name__ == "__main__": | |
222 |
|
|
222 | print "shrink-revlog.py is now an extension (see hg help extensions)" |
@@ -19,7 +19,7 b' def get_desc(docstr):' | |||||
19 |
|
19 | |||
20 | i = docstr.find("\n") |
|
20 | i = docstr.find("\n") | |
21 | if i != -1: |
|
21 | if i != -1: | |
22 | desc = docstr[i+2:] |
|
22 | desc = docstr[i + 2:] | |
23 | else: |
|
23 | else: | |
24 | desc = " %s" % shortdesc |
|
24 | desc = " %s" % shortdesc | |
25 | return (shortdesc, desc) |
|
25 | return (shortdesc, desc) | |
@@ -76,7 +76,8 b' def show_doc(ui):' | |||||
76 | cmds.sort() |
|
76 | cmds.sort() | |
77 |
|
77 | |||
78 | for f in cmds: |
|
78 | for f in cmds: | |
79 |
if f.startswith("debug"): |
|
79 | if f.startswith("debug"): | |
|
80 | continue | |||
80 | d = get_cmd(h[f]) |
|
81 | d = get_cmd(h[f]) | |
81 | # synopsis |
|
82 | # synopsis | |
82 | ui.write(".. _%s:\n\n" % d['cmd']) |
|
83 | ui.write(".. _%s:\n\n" % d['cmd']) |
@@ -112,7 +112,7 b' class Writer(writers.Writer):' | |||||
112 | class Table: |
|
112 | class Table: | |
113 | def __init__(self): |
|
113 | def __init__(self): | |
114 | self._rows = [] |
|
114 | self._rows = [] | |
115 |
self._options = ['center', |
|
115 | self._options = ['center',] | |
116 | self._tab_char = '\t' |
|
116 | self._tab_char = '\t' | |
117 | self._coldefs = [] |
|
117 | self._coldefs = [] | |
118 | def new_row(self): |
|
118 | def new_row(self): | |
@@ -123,7 +123,7 b' class Table:' | |||||
123 | def append_cell(self, cell_lines): |
|
123 | def append_cell(self, cell_lines): | |
124 | """cell_lines is an array of lines""" |
|
124 | """cell_lines is an array of lines""" | |
125 | start = 0 |
|
125 | start = 0 | |
126 | if len(cell_lines)>0 and cell_lines[0] == '.sp\n': |
|
126 | if len(cell_lines) > 0 and cell_lines[0] == '.sp\n': | |
127 | start = 1 |
|
127 | start = 1 | |
128 | self._rows[-1].append(cell_lines[start:]) |
|
128 | self._rows[-1].append(cell_lines[start:]) | |
129 | if len(self._coldefs) < len(self._rows[-1]): |
|
129 | if len(self._coldefs) < len(self._rows[-1]): | |
@@ -223,8 +223,8 b' class Translator(nodes.NodeVisitor):' | |||||
223 | 'term' : ('\n.B ', '\n'), |
|
223 | 'term' : ('\n.B ', '\n'), | |
224 | 'title_reference' : ('\\fI', '\\fP'), |
|
224 | 'title_reference' : ('\\fI', '\\fP'), | |
225 |
|
225 | |||
226 |
'topic-title' : ('.SS ', |
|
226 | 'topic-title' : ('.SS ',), | |
227 |
'sidebar-title' : ('.SS ', |
|
227 | 'sidebar-title' : ('.SS ',), | |
228 |
|
228 | |||
229 | 'problematic' : ('\n.nf\n', '\n.fi\n'), |
|
229 | 'problematic' : ('\n.nf\n', '\n.fi\n'), | |
230 | } |
|
230 | } | |
@@ -255,18 +255,18 b' class Translator(nodes.NodeVisitor):' | |||||
255 | # ensure we get a ".TH" as viewers require it. |
|
255 | # ensure we get a ".TH" as viewers require it. | |
256 | self.head.append(self.header()) |
|
256 | self.head.append(self.header()) | |
257 | # filter body |
|
257 | # filter body | |
258 | for i in xrange(len(self.body)-1,0,-1): |
|
258 | for i in xrange(len(self.body)-1, 0, -1): | |
259 | # remove superfluous vertical gaps. |
|
259 | # remove superfluous vertical gaps. | |
260 | if self.body[i] == '.sp\n': |
|
260 | if self.body[i] == '.sp\n': | |
261 | if self.body[i-1][:4] in ('.BI ','.IP '): |
|
261 | if self.body[i - 1][:4] in ('.BI ','.IP '): | |
262 | self.body[i] = '.\n' |
|
262 | self.body[i] = '.\n' | |
263 | elif (self.body[i-1][:3] == '.B ' and |
|
263 | elif (self.body[i - 1][:3] == '.B ' and | |
264 | self.body[i-2][:4] == '.TP\n'): |
|
264 | self.body[i - 2][:4] == '.TP\n'): | |
265 | self.body[i] = '.\n' |
|
265 | self.body[i] = '.\n' | |
266 | elif (self.body[i-1] == '\n' and |
|
266 | elif (self.body[i - 1] == '\n' and | |
267 | self.body[i-2][0] != '.' and |
|
267 | self.body[i - 2][0] != '.' and | |
268 | (self.body[i-3][:7] == '.TP\n.B ' |
|
268 | (self.body[i - 3][:7] == '.TP\n.B ' | |
269 | or self.body[i-3][:4] == '\n.B ') |
|
269 | or self.body[i - 3][:4] == '\n.B ') | |
270 | ): |
|
270 | ): | |
271 | self.body[i] = '.\n' |
|
271 | self.body[i] = '.\n' | |
272 | return ''.join(self.head + self.body + self.foot) |
|
272 | return ''.join(self.head + self.body + self.foot) | |
@@ -451,7 +451,7 b' class Translator(nodes.NodeVisitor):' | |||||
451 | depart_caution = depart_admonition |
|
451 | depart_caution = depart_admonition | |
452 |
|
452 | |||
453 | def visit_citation(self, node): |
|
453 | def visit_citation(self, node): | |
454 | num,text = node.astext().split(None,1) |
|
454 | num, text = node.astext().split(None, 1) | |
455 | num = num.strip() |
|
455 | num = num.strip() | |
456 | self.body.append('.IP [%s] 5\n' % num) |
|
456 | self.body.append('.IP [%s] 5\n' % num) | |
457 |
|
457 | |||
@@ -578,19 +578,18 b' class Translator(nodes.NodeVisitor):' | |||||
578 | self.defs['indent'][0] % BLOCKQOUTE_INDENT, |
|
578 | self.defs['indent'][0] % BLOCKQOUTE_INDENT, | |
579 | self._docinfo[name], |
|
579 | self._docinfo[name], | |
580 | self.defs['indent'][1], |
|
580 | self.defs['indent'][1], | |
581 |
self.defs['indent'][1] |
|
581 | self.defs['indent'][1])) | |
582 | ) ) |
|
|||
583 | elif not name in skip: |
|
582 | elif not name in skip: | |
584 | if name in self._docinfo_names: |
|
583 | if name in self._docinfo_names: | |
585 | label = self._docinfo_names[name] |
|
584 | label = self._docinfo_names[name] | |
586 | else: |
|
585 | else: | |
587 | label = self.language.labels.get(name, name) |
|
586 | label = self.language.labels.get(name, name) | |
588 |
self.body.append("\n%s: %s\n" % (label, self._docinfo[name]) |
|
587 | self.body.append("\n%s: %s\n" % (label, self._docinfo[name])) | |
589 | if self._docinfo['copyright']: |
|
588 | if self._docinfo['copyright']: | |
590 | self.body.append('.SH COPYRIGHT\n%s\n' |
|
589 | self.body.append('.SH COPYRIGHT\n%s\n' | |
591 | % self._docinfo['copyright']) |
|
590 | % self._docinfo['copyright']) | |
592 |
self.body.append( |
|
591 | self.body.append(self.comment( | |
593 |
'Generated by docutils manpage writer.\n' |
|
592 | 'Generated by docutils manpage writer.\n')) | |
594 |
|
593 | |||
595 | def visit_emphasis(self, node): |
|
594 | def visit_emphasis(self, node): | |
596 | self.body.append(self.defs['emphasis'][0]) |
|
595 | self.body.append(self.defs['emphasis'][0]) | |
@@ -672,7 +671,7 b' class Translator(nodes.NodeVisitor):' | |||||
672 | pass |
|
671 | pass | |
673 |
|
672 | |||
674 | def visit_footnote(self, node): |
|
673 | def visit_footnote(self, node): | |
675 | num,text = node.astext().split(None,1) |
|
674 | num, text = node.astext().split(None, 1) | |
676 | num = num.strip() |
|
675 | num = num.strip() | |
677 | self.body.append('.IP [%s] 5\n' % self.deunicode(num)) |
|
676 | self.body.append('.IP [%s] 5\n' % self.deunicode(num)) | |
678 |
|
677 | |||
@@ -786,7 +785,7 b' class Translator(nodes.NodeVisitor):' | |||||
786 | # man 7 man argues to use ".IP" instead of ".TP" |
|
785 | # man 7 man argues to use ".IP" instead of ".TP" | |
787 | self.body.append('.IP %s %d\n' % ( |
|
786 | self.body.append('.IP %s %d\n' % ( | |
788 | self._list_char[-1].next(), |
|
787 | self._list_char[-1].next(), | |
789 |
self._list_char[-1].get_width(),) |
|
788 | self._list_char[-1].get_width(),)) | |
790 |
|
789 | |||
791 | def depart_list_item(self, node): |
|
790 | def depart_list_item(self, node): | |
792 | pass |
|
791 | pass | |
@@ -858,7 +857,7 b' class Translator(nodes.NodeVisitor):' | |||||
858 |
|
857 | |||
859 | def visit_option(self, node): |
|
858 | def visit_option(self, node): | |
860 | # each form of the option will be presented separately |
|
859 | # each form of the option will be presented separately | |
861 | if self.context[-1]>0: |
|
860 | if self.context[-1] > 0: | |
862 | self.body.append(', ') |
|
861 | self.body.append(', ') | |
863 | if self.context[-3] == '.BI': |
|
862 | if self.context[-3] == '.BI': | |
864 | self.body.append('\\') |
|
863 | self.body.append('\\') | |
@@ -877,7 +876,7 b' class Translator(nodes.NodeVisitor):' | |||||
877 | def visit_option_argument(self, node): |
|
876 | def visit_option_argument(self, node): | |
878 | self.context[-3] = '.BI' # bold/italic alternate |
|
877 | self.context[-3] = '.BI' # bold/italic alternate | |
879 | if node['delimiter'] != ' ': |
|
878 | if node['delimiter'] != ' ': | |
880 |
self.body.append('\\fB%s ' % node['delimiter'] |
|
879 | self.body.append('\\fB%s ' % node['delimiter']) | |
881 | elif self.body[len(self.body)-1].endswith('='): |
|
880 | elif self.body[len(self.body)-1].endswith('='): | |
882 | # a blank only means no blank in output, just changing font |
|
881 | # a blank only means no blank in output, just changing font | |
883 | self.body.append(' ') |
|
882 | self.body.append(' ') |
@@ -297,7 +297,8 b' class bugzilla_2_18(bugzilla_2_16):' | |||||
297 |
|
297 | |||
298 | def __init__(self, ui): |
|
298 | def __init__(self, ui): | |
299 | bugzilla_2_16.__init__(self, ui) |
|
299 | bugzilla_2_16.__init__(self, ui) | |
300 | self.default_notify = "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" |
|
300 | self.default_notify = \ | |
|
301 | "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" | |||
301 |
|
302 | |||
302 | class bugzilla_3_0(bugzilla_2_18): |
|
303 | class bugzilla_3_0(bugzilla_2_18): | |
303 | '''support for bugzilla 3.0 series.''' |
|
304 | '''support for bugzilla 3.0 series.''' | |
@@ -369,7 +370,8 b' class bugzilla(object):' | |||||
369 | break |
|
370 | break | |
370 | start = m.end() |
|
371 | start = m.end() | |
371 | for id in bugzilla._split_re.split(m.group(1)): |
|
372 | for id in bugzilla._split_re.split(m.group(1)): | |
372 |
if not id: |
|
373 | if not id: | |
|
374 | continue | |||
373 | ids.add(int(id)) |
|
375 | ids.add(int(id)) | |
374 | if ids: |
|
376 | if ids: | |
375 | ids = self.filter_real_bug_ids(ids) |
|
377 | ids = self.filter_real_bug_ids(ids) | |
@@ -389,7 +391,7 b' class bugzilla(object):' | |||||
389 | c = root.find('/') |
|
391 | c = root.find('/') | |
390 | if c == -1: |
|
392 | if c == -1: | |
391 | break |
|
393 | break | |
392 | root = root[c+1:] |
|
394 | root = root[c + 1:] | |
393 | count -= 1 |
|
395 | count -= 1 | |
394 | return root |
|
396 | return root | |
395 |
|
397 |
@@ -169,7 +169,7 b' def churn(ui, repo, *pats, **opts):' | |||||
169 | '*' * charnum(sum(count))) |
|
169 | '*' * charnum(sum(count))) | |
170 |
|
170 | |||
171 | def charnum(count): |
|
171 | def charnum(count): | |
172 | return int(round(count*width/maxcount)) |
|
172 | return int(round(count * width / maxcount)) | |
173 |
|
173 | |||
174 | for name, count in rate: |
|
174 | for name, count in rate: | |
175 | ui.write(format(name, count)) |
|
175 | ui.write(format(name, count)) | |
@@ -180,7 +180,8 b' cmdtable = {' | |||||
180 | (churn, |
|
180 | (churn, | |
181 | [('r', 'rev', [], _('count rate for the specified revision or range')), |
|
181 | [('r', 'rev', [], _('count rate for the specified revision or range')), | |
182 | ('d', 'date', '', _('count rate for revisions matching date spec')), |
|
182 | ('d', 'date', '', _('count rate for revisions matching date spec')), | |
183 |
('t', 'template', '{author|email}', |
|
183 | ('t', 'template', '{author|email}', | |
|
184 | _('template to group changesets')), | |||
184 | ('f', 'dateformat', '', |
|
185 | ('f', 'dateformat', '', | |
185 | _('strftime-compatible format for grouping by date')), |
|
186 | _('strftime-compatible format for grouping by date')), | |
186 | ('c', 'changesets', False, _('count rate by number of changesets')), |
|
187 | ('c', 'changesets', False, _('count rate by number of changesets')), |
@@ -190,8 +190,8 b' def colorqseries(orig, ui, repo, *dummy,' | |||||
190 | if opts['missing']: |
|
190 | if opts['missing']: | |
191 | effects = _patch_effects['missing'] |
|
191 | effects = _patch_effects['missing'] | |
192 | # Determine if patch is applied. |
|
192 | # Determine if patch is applied. | |
193 |
elif [ |
|
193 | elif [applied for applied in repo.mq.applied | |
194 |
if patchname == applied.name |
|
194 | if patchname == applied.name]: | |
195 | effects = _patch_effects['applied'] |
|
195 | effects = _patch_effects['applied'] | |
196 | else: |
|
196 | else: | |
197 | effects = _patch_effects['unapplied'] |
|
197 | effects = _patch_effects['unapplied'] |
@@ -279,7 +279,8 b' cmdtable = {' | |||||
279 | # Main options shared with cvsps-2.1 |
|
279 | # Main options shared with cvsps-2.1 | |
280 | ('b', 'branches', [], _('only return changes on specified branches')), |
|
280 | ('b', 'branches', [], _('only return changes on specified branches')), | |
281 | ('p', 'prefix', '', _('prefix to remove from file names')), |
|
281 | ('p', 'prefix', '', _('prefix to remove from file names')), | |
282 | ('r', 'revisions', [], _('only return changes after or between specified tags')), |
|
282 | ('r', 'revisions', [], | |
|
283 | _('only return changes after or between specified tags')), | |||
283 | ('u', 'update-cache', None, _("update cvs log cache")), |
|
284 | ('u', 'update-cache', None, _("update cvs log cache")), | |
284 | ('x', 'new-cache', None, _("create new cvs log cache")), |
|
285 | ('x', 'new-cache', None, _("create new cvs log cache")), | |
285 | ('z', 'fuzz', 60, _('set commit time fuzz in seconds')), |
|
286 | ('z', 'fuzz', 60, _('set commit time fuzz in seconds')), |
@@ -203,7 +203,8 b' class bzr_source(converter_source):' | |||||
203 | changes.append((frompath, revid)) |
|
203 | changes.append((frompath, revid)) | |
204 | changes.append((topath, revid)) |
|
204 | changes.append((topath, revid)) | |
205 | # add to mode cache |
|
205 | # add to mode cache | |
206 |
mode = ((entry.executable and 'x') |
|
206 | mode = ((entry.executable and 'x') | |
|
207 | or (entry.kind == 'symlink' and 's') | |||
207 | or '') |
|
208 | or '') | |
208 | self._modecache[(topath, revid)] = mode |
|
209 | self._modecache[(topath, revid)] = mode | |
209 | # register the change as move |
|
210 | # register the change as move |
@@ -24,7 +24,8 b' def decodeargs(s):' | |||||
24 | s = base64.decodestring(s) |
|
24 | s = base64.decodestring(s) | |
25 | return pickle.loads(s) |
|
25 | return pickle.loads(s) | |
26 |
|
26 | |||
27 |
class MissingTool(Exception): |
|
27 | class MissingTool(Exception): | |
|
28 | pass | |||
28 |
|
29 | |||
29 | def checktool(exe, name=None, abort=True): |
|
30 | def checktool(exe, name=None, abort=True): | |
30 | name = name or exe |
|
31 | name = name or exe | |
@@ -32,7 +33,8 b' def checktool(exe, name=None, abort=True' | |||||
32 | exc = abort and util.Abort or MissingTool |
|
33 | exc = abort and util.Abort or MissingTool | |
33 | raise exc(_('cannot find required "%s" tool') % name) |
|
34 | raise exc(_('cannot find required "%s" tool') % name) | |
34 |
|
35 | |||
35 |
class NoRepo(Exception): |
|
36 | class NoRepo(Exception): | |
|
37 | pass | |||
36 |
|
38 | |||
37 | SKIPREV = 'SKIP' |
|
39 | SKIPREV = 'SKIP' | |
38 |
|
40 | |||
@@ -322,7 +324,7 b' class commandline(object):' | |||||
322 | # Since ARG_MAX is for command line _and_ environment, lower our limit |
|
324 | # Since ARG_MAX is for command line _and_ environment, lower our limit | |
323 | # (and make happy Windows shells while doing this). |
|
325 | # (and make happy Windows shells while doing this). | |
324 |
|
326 | |||
325 | self._argmax = self._argmax/2 - 1 |
|
327 | self._argmax = self._argmax / 2 - 1 | |
326 | return self._argmax |
|
328 | return self._argmax | |
327 |
|
329 | |||
328 | def limit_arglist(self, arglist, cmd, *args, **kwargs): |
|
330 | def limit_arglist(self, arglist, cmd, *args, **kwargs): | |
@@ -367,8 +369,9 b' class mapfile(dict):' | |||||
367 | try: |
|
369 | try: | |
368 | key, value = line.splitlines()[0].rsplit(' ', 1) |
|
370 | key, value = line.splitlines()[0].rsplit(' ', 1) | |
369 | except ValueError: |
|
371 | except ValueError: | |
370 | raise util.Abort(_('syntax error in %s(%d): key/value pair expected') |
|
372 | raise util.Abort( | |
371 | % (self.path, i+1)) |
|
373 | _('syntax error in %s(%d): key/value pair expected') | |
|
374 | % (self.path, i + 1)) | |||
372 | if key not in self: |
|
375 | if key not in self: | |
373 | self.order.append(key) |
|
376 | self.order.append(key) | |
374 | super(mapfile, self).__setitem__(key, value) |
|
377 | super(mapfile, self).__setitem__(key, value) |
@@ -108,7 +108,8 b' class converter(object):' | |||||
108 | parents = {} |
|
108 | parents = {} | |
109 | while visit: |
|
109 | while visit: | |
110 | n = visit.pop(0) |
|
110 | n = visit.pop(0) | |
111 |
if n in known or n in self.map: |
|
111 | if n in known or n in self.map: | |
|
112 | continue | |||
112 | known.add(n) |
|
113 | known.add(n) | |
113 | commit = self.cachecommit(n) |
|
114 | commit = self.cachecommit(n) | |
114 | parents[n] = [] |
|
115 | parents[n] = [] |
@@ -46,7 +46,8 b' class convert_cvs(converter_source):' | |||||
46 | # patchset number? |
|
46 | # patchset number? | |
47 | maxrev = int(self.rev) |
|
47 | maxrev = int(self.rev) | |
48 | except ValueError: |
|
48 | except ValueError: | |
49 |
raise util.Abort(_('revision %s is not a patchset number') |
|
49 | raise util.Abort(_('revision %s is not a patchset number') | |
|
50 | % self.rev) | |||
50 |
|
51 | |||
51 | d = os.getcwd() |
|
52 | d = os.getcwd() | |
52 | try: |
|
53 | try: | |
@@ -65,7 +66,7 b' class convert_cvs(converter_source):' | |||||
65 | mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None)) |
|
66 | mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None)) | |
66 |
|
67 | |||
67 | for cs in db: |
|
68 | for cs in db: | |
68 | if maxrev and cs.id>maxrev: |
|
69 | if maxrev and cs.id > maxrev: | |
69 | break |
|
70 | break | |
70 | id = str(cs.id) |
|
71 | id = str(cs.id) | |
71 | cs.author = self.recode(cs.author) |
|
72 | cs.author = self.recode(cs.author) | |
@@ -76,7 +77,8 b' class convert_cvs(converter_source):' | |||||
76 |
|
77 | |||
77 | files = {} |
|
78 | files = {} | |
78 | for f in cs.entries: |
|
79 | for f in cs.entries: | |
79 |
files[f.file] = "%s%s" % ('.'.join([str(x) |
|
80 | files[f.file] = "%s%s" % ('.'.join([str(x) | |
|
81 | for x in f.revision]), | |||
80 | ['', '(DEAD)'][f.dead]) |
|
82 | ['', '(DEAD)'][f.dead]) | |
81 |
|
83 | |||
82 | # add current commit to set |
|
84 | # add current commit to set | |
@@ -186,8 +188,8 b' class convert_cvs(converter_source):' | |||||
186 | self.writep.flush() |
|
188 | self.writep.flush() | |
187 | r = self.readp.readline() |
|
189 | r = self.readp.readline() | |
188 | if not r.startswith("Valid-requests"): |
|
190 | if not r.startswith("Valid-requests"): | |
189 |
raise util.Abort(_( |
|
191 | raise util.Abort(_('unexpected response from CVS server ' | |
190 |
" |
|
192 | '(expected "Valid-requests", but got %r)') | |
191 | % r) |
|
193 | % r) | |
192 | if "UseUnchanged" in r: |
|
194 | if "UseUnchanged" in r: | |
193 | self.writep.write("UseUnchanged\n") |
|
195 | self.writep.write("UseUnchanged\n") | |
@@ -208,7 +210,8 b' class convert_cvs(converter_source):' | |||||
208 | while count > 0: |
|
210 | while count > 0: | |
209 | data = fp.read(min(count, chunksize)) |
|
211 | data = fp.read(min(count, chunksize)) | |
210 | if not data: |
|
212 | if not data: | |
211 |
raise util.Abort(_("%d bytes missing from remote file") |
|
213 | raise util.Abort(_("%d bytes missing from remote file") | |
|
214 | % count) | |||
212 | count -= len(data) |
|
215 | count -= len(data) | |
213 | output.write(data) |
|
216 | output.write(data) | |
214 | return output.getvalue() |
|
217 | return output.getvalue() |
@@ -402,7 +402,7 b' def createlog(ui, directory=None, root="' | |||||
402 | # normal branch |
|
402 | # normal branch | |
403 | if revparts[:-2] == e.revision: |
|
403 | if revparts[:-2] == e.revision: | |
404 | branchpoints.add(branch) |
|
404 | branchpoints.add(branch) | |
405 | elif revparts == (1,1,1): # vendor branch |
|
405 | elif revparts == (1, 1, 1): # vendor branch | |
406 | if revparts in e.branches: |
|
406 | if revparts in e.branches: | |
407 | branchpoints.add(branch) |
|
407 | branchpoints.add(branch) | |
408 | e.branchpoints = branchpoints |
|
408 | e.branchpoints = branchpoints | |
@@ -632,7 +632,7 b' def createchangeset(ui, log, fuzz=60, me' | |||||
632 | branches = {} # changeset index where we saw a branch |
|
632 | branches = {} # changeset index where we saw a branch | |
633 | n = len(changesets) |
|
633 | n = len(changesets) | |
634 | i = 0 |
|
634 | i = 0 | |
635 | while i<n: |
|
635 | while i < n: | |
636 | c = changesets[i] |
|
636 | c = changesets[i] | |
637 |
|
637 | |||
638 | for f in c.entries: |
|
638 | for f in c.entries: | |
@@ -702,9 +702,12 b' def createchangeset(ui, log, fuzz=60, me' | |||||
702 | m = None # if no group found then merge to HEAD |
|
702 | m = None # if no group found then merge to HEAD | |
703 | if m in branches and c.branch != m: |
|
703 | if m in branches and c.branch != m: | |
704 | # insert empty changeset for merge |
|
704 | # insert empty changeset for merge | |
705 |
cc = changeset( |
|
705 | cc = changeset( | |
706 | comment='convert-repo: CVS merge from branch %s' % c.branch, |
|
706 | author=c.author, branch=m, date=c.date, | |
707 | entries=[], tags=[], parents=[changesets[branches[m]], c]) |
|
707 | comment='convert-repo: CVS merge from branch %s' | |
|
708 | % c.branch, | |||
|
709 | entries=[], tags=[], | |||
|
710 | parents=[changesets[branches[m]], c]) | |||
708 | changesets.insert(i + 1, cc) |
|
711 | changesets.insert(i + 1, cc) | |
709 | branches[m] = i + 1 |
|
712 | branches[m] = i + 1 | |
710 |
|
713 | |||
@@ -774,7 +777,7 b' def debugcvsps(ui, *args, **opts):' | |||||
774 |
|
777 | |||
775 | if opts["ancestors"]: |
|
778 | if opts["ancestors"]: | |
776 | if cs.branch not in branches and cs.parents and cs.parents[0].id: |
|
779 | if cs.branch not in branches and cs.parents and cs.parents[0].id: | |
777 | ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch, |
|
780 | ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch, | |
778 | cs.parents[0].id) |
|
781 | cs.parents[0].id) | |
779 | branches[cs.branch] = cs.id |
|
782 | branches[cs.branch] = cs.id | |
780 |
|
783 | |||
@@ -791,14 +794,15 b' def debugcvsps(ui, *args, **opts):' | |||||
791 | '%Y/%m/%d %H:%M:%S %1%2')) |
|
794 | '%Y/%m/%d %H:%M:%S %1%2')) | |
792 | ui.write('Author: %s\n' % cs.author) |
|
795 | ui.write('Author: %s\n' % cs.author) | |
793 | ui.write('Branch: %s\n' % (cs.branch or 'HEAD')) |
|
796 | ui.write('Branch: %s\n' % (cs.branch or 'HEAD')) | |
794 | ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1], |
|
797 | ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1], | |
795 | ','.join(cs.tags) or '(none)')) |
|
798 | ','.join(cs.tags) or '(none)')) | |
796 | branchpoints = getattr(cs, 'branchpoints', None) |
|
799 | branchpoints = getattr(cs, 'branchpoints', None) | |
797 | if branchpoints: |
|
800 | if branchpoints: | |
798 | ui.write('Branchpoints: %s \n' % ', '.join(branchpoints)) |
|
801 | ui.write('Branchpoints: %s \n' % ', '.join(branchpoints)) | |
799 | if opts["parents"] and cs.parents: |
|
802 | if opts["parents"] and cs.parents: | |
800 | if len(cs.parents)>1: |
|
803 | if len(cs.parents) > 1: | |
801 |
ui.write('Parents: %s\n' % |
|
804 | ui.write('Parents: %s\n' % | |
|
805 | (','.join([str(p.id) for p in cs.parents]))) | |||
802 | else: |
|
806 | else: | |
803 | ui.write('Parent: %d\n' % cs.parents[0].id) |
|
807 | ui.write('Parent: %d\n' % cs.parents[0].id) | |
804 |
|
808 | |||
@@ -818,8 +822,10 b' def debugcvsps(ui, *args, **opts):' | |||||
818 | fn = f.file |
|
822 | fn = f.file | |
819 | if fn.startswith(opts["prefix"]): |
|
823 | if fn.startswith(opts["prefix"]): | |
820 | fn = fn[len(opts["prefix"]):] |
|
824 | fn = fn[len(opts["prefix"]):] | |
821 |
ui.write('\t%s:%s->%s%s \n' % ( |
|
825 | ui.write('\t%s:%s->%s%s \n' % ( | |
822 |
|
|
826 | fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL', | |
|
827 | '.'.join([str(x) for x in f.revision]), | |||
|
828 | ['', '(DEAD)'][f.dead])) | |||
823 | ui.write('\n') |
|
829 | ui.write('\n') | |
824 |
|
830 | |||
825 | # have we seen the start tag? |
|
831 | # have we seen the start tag? | |
@@ -829,7 +835,7 b' def debugcvsps(ui, *args, **opts):' | |||||
829 | off = False |
|
835 | off = False | |
830 |
|
836 | |||
831 | # see if we reached the end tag |
|
837 | # see if we reached the end tag | |
832 | if len(revisions)>1 and not off: |
|
838 | if len(revisions) > 1 and not off: | |
833 | if revisions[1] == str(cs.id) or \ |
|
839 | if revisions[1] == str(cs.id) or \ | |
834 | revisions[1] in cs.tags: |
|
840 | revisions[1] in cs.tags: | |
835 | break |
|
841 | break |
@@ -12,15 +12,19 b' import os, shutil, tempfile' | |||||
12 |
|
12 | |||
13 | # The naming drift of ElementTree is fun! |
|
13 | # The naming drift of ElementTree is fun! | |
14 |
|
14 | |||
15 | try: from xml.etree.cElementTree import ElementTree |
|
15 | try: | |
|
16 | from xml.etree.cElementTree import ElementTree | |||
16 | except ImportError: |
|
17 | except ImportError: | |
17 | try: from xml.etree.ElementTree import ElementTree |
|
18 | try: | |
|
19 | from xml.etree.ElementTree import ElementTree | |||
18 | except ImportError: |
|
20 | except ImportError: | |
19 | try: from elementtree.cElementTree import ElementTree |
|
21 | try: | |
|
22 | from elementtree.cElementTree import ElementTree | |||
20 | except ImportError: |
|
23 | except ImportError: | |
21 | try: from elementtree.ElementTree import ElementTree |
|
24 | try: | |
22 | except ImportError: ElementTree = None |
|
25 | from elementtree.ElementTree import ElementTree | |
23 |
|
26 | except ImportError: | ||
|
27 | ElementTree = None | |||
24 |
|
28 | |||
25 | class darcs_source(converter_source, commandline): |
|
29 | class darcs_source(converter_source, commandline): | |
26 | def __init__(self, ui, path, rev=None): |
|
30 | def __init__(self, ui, path, rev=None): |
@@ -12,7 +12,7 b' from common import SKIPREV, converter_so' | |||||
12 | def rpairs(name): |
|
12 | def rpairs(name): | |
13 | e = len(name) |
|
13 | e = len(name) | |
14 | while e != -1: |
|
14 | while e != -1: | |
15 | yield name[:e], name[e+1:] |
|
15 | yield name[:e], name[e + 1:] | |
16 | e = name.rfind('/', 0, e) |
|
16 | e = name.rfind('/', 0, e) | |
17 | yield '.', name |
|
17 | yield '.', name | |
18 |
|
18 |
@@ -43,13 +43,15 b' class convert_git(converter_source):' | |||||
43 |
|
43 | |||
44 | def getheads(self): |
|
44 | def getheads(self): | |
45 | if not self.rev: |
|
45 | if not self.rev: | |
46 |
|
|
46 | fh = self.gitcmd('git rev-parse --branches --remotes') | |
|
47 | return fh.read().splitlines() | |||
47 | else: |
|
48 | else: | |
48 | fh = self.gitcmd("git rev-parse --verify %s" % self.rev) |
|
49 | fh = self.gitcmd("git rev-parse --verify %s" % self.rev) | |
49 | return [fh.read()[:-1]] |
|
50 | return [fh.read()[:-1]] | |
50 |
|
51 | |||
51 | def catfile(self, rev, type): |
|
52 | def catfile(self, rev, type): | |
52 |
if rev == "0" * 40: |
|
53 | if rev == "0" * 40: | |
|
54 | raise IOError() | |||
53 | fh = self.gitcmd("git cat-file %s %s" % (type, rev)) |
|
55 | fh = self.gitcmd("git cat-file %s %s" % (type, rev)) | |
54 | return fh.read() |
|
56 | return fh.read() | |
55 |
|
57 | |||
@@ -86,7 +88,7 b' class convert_git(converter_source):' | |||||
86 | def getcommit(self, version): |
|
88 | def getcommit(self, version): | |
87 | c = self.catfile(version, "commit") # read the commit hash |
|
89 | c = self.catfile(version, "commit") # read the commit hash | |
88 | end = c.find("\n\n") |
|
90 | end = c.find("\n\n") | |
89 | message = c[end+2:] |
|
91 | message = c[end + 2:] | |
90 | message = self.recode(message) |
|
92 | message = self.recode(message) | |
91 | l = c[:end].splitlines() |
|
93 | l = c[:end].splitlines() | |
92 | parents = [] |
|
94 | parents = [] | |
@@ -105,7 +107,8 b' class convert_git(converter_source):' | |||||
105 | committer = " ".join(p[:-2]) |
|
107 | committer = " ".join(p[:-2]) | |
106 | if committer[0] == "<": committer = committer[1:-1] |
|
108 | if committer[0] == "<": committer = committer[1:-1] | |
107 | committer = self.recode(committer) |
|
109 | committer = self.recode(committer) | |
108 |
if n == "parent": |
|
110 | if n == "parent": | |
|
111 | parents.append(v) | |||
109 |
|
112 | |||
110 | if committer and committer != author: |
|
113 | if committer and committer != author: | |
111 | message += "\ncommitter: %s\n" % committer |
|
114 | message += "\ncommitter: %s\n" % committer | |
@@ -145,7 +148,7 b' class convert_git(converter_source):' | |||||
145 | fh.close() |
|
148 | fh.close() | |
146 | else: |
|
149 | else: | |
147 | fh = self.gitcmd('git diff-tree --name-only --root -r %s "%s^%s" --' |
|
150 | fh = self.gitcmd('git diff-tree --name-only --root -r %s "%s^%s" --' | |
148 | % (version, version, i+1)) |
|
151 | % (version, version, i + 1)) | |
149 | changes = [f.rstrip('\n') for f in fh] |
|
152 | changes = [f.rstrip('\n') for f in fh] | |
150 | fh.close() |
|
153 | fh.close() | |
151 |
|
154 |
@@ -89,7 +89,8 b' class gnuarch_source(converter_source, c' | |||||
89 |
|
89 | |||
90 | # Get the complete list of revisions for that tree version |
|
90 | # Get the complete list of revisions for that tree version | |
91 | output, status = self.runlines('revisions', '-r', '-f', treeversion) |
|
91 | output, status = self.runlines('revisions', '-r', '-f', treeversion) | |
92 |
self.checkexit(status, 'failed retrieveing revisions for %s' |
|
92 | self.checkexit(status, 'failed retrieveing revisions for %s' | |
|
93 | % treeversion) | |||
93 |
|
94 | |||
94 | # No new iteration unless a revision has a continuation-of header |
|
95 | # No new iteration unless a revision has a continuation-of header | |
95 | treeversion = None |
|
96 | treeversion = None | |
@@ -116,7 +117,8 b' class gnuarch_source(converter_source, c' | |||||
116 | # or if we have to 'jump' to a different treeversion given |
|
117 | # or if we have to 'jump' to a different treeversion given | |
117 | # by the continuation-of header. |
|
118 | # by the continuation-of header. | |
118 | if self.changes[rev].continuationof: |
|
119 | if self.changes[rev].continuationof: | |
119 |
treeversion = '--'.join( |
|
120 | treeversion = '--'.join( | |
|
121 | self.changes[rev].continuationof.split('--')[:-1]) | |||
120 | break |
|
122 | break | |
121 |
|
123 | |||
122 | # If we reached a base-0 revision w/o any continuation-of |
|
124 | # If we reached a base-0 revision w/o any continuation-of | |
@@ -220,7 +222,7 b' class gnuarch_source(converter_source, c' | |||||
220 | return data, mode |
|
222 | return data, mode | |
221 |
|
223 | |||
222 | def _exclude(self, name): |
|
224 | def _exclude(self, name): | |
223 |
exclude = [ |
|
225 | exclude = ['{arch}', '.arch-ids', '.arch-inventory'] | |
224 | for exc in exclude: |
|
226 | for exc in exclude: | |
225 | if name.find(exc) != -1: |
|
227 | if name.find(exc) != -1: | |
226 | return True |
|
228 | return True | |
@@ -285,7 +287,8 b' class gnuarch_source(converter_source, c' | |||||
285 |
|
287 | |||
286 | # Commit revision origin when dealing with a branch or tag |
|
288 | # Commit revision origin when dealing with a branch or tag | |
287 | if 'Continuation-of' in catlog: |
|
289 | if 'Continuation-of' in catlog: | |
288 |
self.changes[rev].continuationof = self.recode( |
|
290 | self.changes[rev].continuationof = self.recode( | |
|
291 | catlog['Continuation-of']) | |||
289 | except Exception: |
|
292 | except Exception: | |
290 | raise util.Abort(_('could not parse cat-log of %s') % rev) |
|
293 | raise util.Abort(_('could not parse cat-log of %s') % rev) | |
291 |
|
294 |
@@ -74,7 +74,7 b' class mercurial_sink(converter_sink):' | |||||
74 |
|
74 | |||
75 | def getheads(self): |
|
75 | def getheads(self): | |
76 | h = self.repo.changelog.heads() |
|
76 | h = self.repo.changelog.heads() | |
77 |
return [ |
|
77 | return [hex(x) for x in h] | |
78 |
|
78 | |||
79 | def setbranch(self, branch, pbranches): |
|
79 | def setbranch(self, branch, pbranches): | |
80 | if not self.clonebranches: |
|
80 | if not self.clonebranches: | |
@@ -147,8 +147,10 b' class mercurial_sink(converter_sink):' | |||||
147 | m1node = self.repo.changelog.read(bin(parents[0]))[0] |
|
147 | m1node = self.repo.changelog.read(bin(parents[0]))[0] | |
148 | parent = parents[0] |
|
148 | parent = parents[0] | |
149 |
|
149 | |||
150 |
if len(parents) < 2: |
|
150 | if len(parents) < 2: | |
151 |
|
|
151 | parents.append(nullid) | |
|
152 | if len(parents) < 2: | |||
|
153 | parents.append(nullid) | |||
152 | p2 = parents.pop(0) |
|
154 | p2 = parents.pop(0) | |
153 |
|
155 | |||
154 | text = commit.desc |
|
156 | text = commit.desc | |
@@ -161,8 +163,8 b' class mercurial_sink(converter_sink):' | |||||
161 | while parents: |
|
163 | while parents: | |
162 | p1 = p2 |
|
164 | p1 = p2 | |
163 | p2 = parents.pop(0) |
|
165 | p2 = parents.pop(0) | |
164 |
ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), |
|
166 | ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), | |
165 | commit.author, commit.date, extra) |
|
167 | getfilectx, commit.author, commit.date, extra) | |
166 | self.repo.commitctx(ctx) |
|
168 | self.repo.commitctx(ctx) | |
167 | text = "(octopus merge fixup)\n" |
|
169 | text = "(octopus merge fixup)\n" | |
168 | p2 = hex(self.repo.changelog.tip()) |
|
170 | p2 = hex(self.repo.changelog.tip()) |
@@ -38,16 +38,22 b' class monotone_source(converter_source, ' | |||||
38 | lines = r'(?:.|\n)+' |
|
38 | lines = r'(?:.|\n)+' | |
39 |
|
39 | |||
40 | self.dir_re = re.compile(space + "dir" + name) |
|
40 | self.dir_re = re.compile(space + "dir" + name) | |
41 |
self.file_re = re.compile(space + "file" + name + |
|
41 | self.file_re = re.compile(space + "file" + name + | |
42 | self.add_file_re = re.compile(space + "add_file" + name + "content" + revision) |
|
42 | "content" + revision) | |
43 |
self. |
|
43 | self.add_file_re = re.compile(space + "add_file" + name + | |
|
44 | "content" + revision) | |||
|
45 | self.patch_re = re.compile(space + "patch" + name + | |||
|
46 | "from" + revision + "to" + revision) | |||
44 | self.rename_re = re.compile(space + "rename" + name + "to" + name) |
|
47 | self.rename_re = re.compile(space + "rename" + name + "to" + name) | |
45 | self.delete_re = re.compile(space + "delete" + name) |
|
48 | self.delete_re = re.compile(space + "delete" + name) | |
46 |
self.tag_re = re.compile(space + "tag" + name + "revision" + |
|
49 | self.tag_re = re.compile(space + "tag" + name + "revision" + | |
47 | self.cert_re = re.compile(lines + space + "name" + name + "value" + value) |
|
50 | revision) | |
|
51 | self.cert_re = re.compile(lines + space + "name" + name + | |||
|
52 | "value" + value) | |||
48 |
|
53 | |||
49 | attr = space + "file" + lines + space + "attr" + space |
|
54 | attr = space + "file" + lines + space + "attr" + space | |
50 |
self.attr_execute_re = re.compile(attr + '"mtn:execute"' + |
|
55 | self.attr_execute_re = re.compile(attr + '"mtn:execute"' + | |
|
56 | space + '"true"') | |||
51 |
|
57 | |||
52 | # cached data |
|
58 | # cached data | |
53 | self.manifest_rev = None |
|
59 | self.manifest_rev = None |
@@ -42,8 +42,12 b' class p4_source(converter_source):' | |||||
42 | self.encoding = "latin_1" |
|
42 | self.encoding = "latin_1" | |
43 | self.depotname = {} # mapping from local name to depot name |
|
43 | self.depotname = {} # mapping from local name to depot name | |
44 | self.modecache = {} |
|
44 | self.modecache = {} | |
45 | self.re_type = re.compile("([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)(\+\w+)?$") |
|
45 | self.re_type = re.compile( | |
46 | self.re_keywords = re.compile(r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author):[^$\n]*\$") |
|
46 | "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" | |
|
47 | "(\+\w+)?$") | |||
|
48 | self.re_keywords = re.compile( | |||
|
49 | r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" | |||
|
50 | r":[^$\n]*\$") | |||
47 | self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") |
|
51 | self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") | |
48 |
|
52 | |||
49 | self._parse(ui, path) |
|
53 | self._parse(ui, path) | |
@@ -118,7 +122,8 b' class p4_source(converter_source):' | |||||
118 |
|
122 | |||
119 | date = (int(d["time"]), 0) # timezone not set |
|
123 | date = (int(d["time"]), 0) # timezone not set | |
120 | c = commit(author=self.recode(d["user"]), date=util.datestr(date), |
|
124 | c = commit(author=self.recode(d["user"]), date=util.datestr(date), | |
121 |
|
|
125 | parents=parents, desc=desc, branch='', | |
|
126 | extra={"p4": change}) | |||
122 |
|
127 | |||
123 | files = [] |
|
128 | files = [] | |
124 | i = 0 |
|
129 | i = 0 |
@@ -138,7 +138,7 b' class logstream(object):' | |||||
138 | # looking for several svn-specific files and directories in the given |
|
138 | # looking for several svn-specific files and directories in the given | |
139 | # directory. |
|
139 | # directory. | |
140 | def filecheck(ui, path, proto): |
|
140 | def filecheck(ui, path, proto): | |
141 |
for x in ('locks', 'hooks', 'format', 'db' |
|
141 | for x in ('locks', 'hooks', 'format', 'db'): | |
142 | if not os.path.exists(os.path.join(path, x)): |
|
142 | if not os.path.exists(os.path.join(path, x)): | |
143 | return False |
|
143 | return False | |
144 | return True |
|
144 | return True | |
@@ -150,7 +150,7 b' def httpcheck(ui, path, proto):' | |||||
150 | try: |
|
150 | try: | |
151 | opener = urllib2.build_opener() |
|
151 | opener = urllib2.build_opener() | |
152 | rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) |
|
152 | rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) | |
153 |
data = rsp.read() |
|
153 | data = rsp.read() | |
154 | except urllib2.HTTPError, inst: |
|
154 | except urllib2.HTTPError, inst: | |
155 | if inst.code != 404: |
|
155 | if inst.code != 404: | |
156 | # Except for 404 we cannot know for sure this is not an svn repo |
|
156 | # Except for 404 we cannot know for sure this is not an svn repo | |
@@ -231,7 +231,7 b' class svn_source(converter_source):' | |||||
231 | # deleted branches. |
|
231 | # deleted branches. | |
232 | at = url.rfind('@') |
|
232 | at = url.rfind('@') | |
233 | if at >= 0: |
|
233 | if at >= 0: | |
234 | latest = int(url[at+1:]) |
|
234 | latest = int(url[at + 1:]) | |
235 | url = url[:at] |
|
235 | url = url[:at] | |
236 | except ValueError: |
|
236 | except ValueError: | |
237 | pass |
|
237 | pass | |
@@ -363,7 +363,8 b' class svn_source(converter_source):' | |||||
363 | 'with more than one branch')) |
|
363 | 'with more than one branch')) | |
364 | revnum = self.revnum(self.heads[0]) |
|
364 | revnum = self.revnum(self.heads[0]) | |
365 | if revnum < self.startrev: |
|
365 | if revnum < self.startrev: | |
366 | raise util.Abort(_('svn: no revision found after start revision %d') |
|
366 | raise util.Abort( | |
|
367 | _('svn: no revision found after start revision %d') | |||
367 | % self.startrev) |
|
368 | % self.startrev) | |
368 |
|
369 | |||
369 | return self.heads |
|
370 | return self.heads | |
@@ -389,7 +390,7 b' class svn_source(converter_source):' | |||||
389 | uuid, module, revnum = self.revsplit(rev) |
|
390 | uuid, module, revnum = self.revsplit(rev) | |
390 | entries = svn.client.ls(self.baseurl + urllib.quote(module), |
|
391 | entries = svn.client.ls(self.baseurl + urllib.quote(module), | |
391 | optrev(revnum), True, self.ctx) |
|
392 | optrev(revnum), True, self.ctx) | |
392 | files = [n for n,e in entries.iteritems() |
|
393 | files = [n for n, e in entries.iteritems() | |
393 | if e.kind == svn.core.svn_node_file] |
|
394 | if e.kind == svn.core.svn_node_file] | |
394 | copies = {} |
|
395 | copies = {} | |
395 |
|
396 | |||
@@ -564,7 +565,8 b' class svn_source(converter_source):' | |||||
564 | except SubversionException: |
|
565 | except SubversionException: | |
565 | dirent = None |
|
566 | dirent = None | |
566 | if not dirent: |
|
567 | if not dirent: | |
567 |
raise SvnPathNotFound(_('%s not found up to revision %d') |
|
568 | raise SvnPathNotFound(_('%s not found up to revision %d') | |
|
569 | % (path, stop)) | |||
568 |
|
570 | |||
569 | # stat() gives us the previous revision on this line of |
|
571 | # stat() gives us the previous revision on this line of | |
570 | # development, but it might be in *another module*. Fetch the |
|
572 | # development, but it might be in *another module*. Fetch the | |
@@ -645,7 +647,8 b' class svn_source(converter_source):' | |||||
645 | # We can avoid the reparent calls if the module has |
|
647 | # We can avoid the reparent calls if the module has | |
646 | # not changed but it probably does not worth the pain. |
|
648 | # not changed but it probably does not worth the pain. | |
647 | prevmodule = self.reparent('') |
|
649 | prevmodule = self.reparent('') | |
648 |
fromkind = svn.ra.check_path(self.ra, parentpath.strip('/'), |
|
650 | fromkind = svn.ra.check_path(self.ra, parentpath.strip('/'), | |
|
651 | prevnum) | |||
649 | self.reparent(prevmodule) |
|
652 | self.reparent(prevmodule) | |
650 |
|
653 | |||
651 | if fromkind == svn.core.svn_node_file: |
|
654 | if fromkind == svn.core.svn_node_file: | |
@@ -657,7 +660,7 b' class svn_source(converter_source):' | |||||
657 | oroot = parentpath.strip('/') |
|
660 | oroot = parentpath.strip('/') | |
658 | nroot = path.strip('/') |
|
661 | nroot = path.strip('/') | |
659 | children = self._find_children(oroot, prevnum) |
|
662 | children = self._find_children(oroot, prevnum) | |
660 | children = [s.replace(oroot,nroot) for s in children] |
|
663 | children = [s.replace(oroot, nroot) for s in children] | |
661 |
|
664 | |||
662 | for child in children: |
|
665 | for child in children: | |
663 | childpath = self.getrelpath("/" + child, pmodule) |
|
666 | childpath = self.getrelpath("/" + child, pmodule) | |
@@ -738,7 +741,8 b' class svn_source(converter_source):' | |||||
738 | # check whether this revision is the start of a branch or part |
|
741 | # check whether this revision is the start of a branch or part | |
739 | # of a branch renaming |
|
742 | # of a branch renaming | |
740 | orig_paths = sorted(orig_paths.iteritems()) |
|
743 | orig_paths = sorted(orig_paths.iteritems()) | |
741 |
root_paths = [(p,e) for p,e in orig_paths |
|
744 | root_paths = [(p, e) for p, e in orig_paths | |
|
745 | if self.module.startswith(p)] | |||
742 | if root_paths: |
|
746 | if root_paths: | |
743 | path, ent = root_paths[-1] |
|
747 | path, ent = root_paths[-1] | |
744 | if ent.copyfrom_path: |
|
748 | if ent.copyfrom_path: | |
@@ -750,8 +754,9 b' class svn_source(converter_source):' | |||||
750 | prevmodule, prevnum = self.revsplit(previd)[1:] |
|
754 | prevmodule, prevnum = self.revsplit(previd)[1:] | |
751 | if prevnum >= self.startrev: |
|
755 | if prevnum >= self.startrev: | |
752 | parents = [previd] |
|
756 | parents = [previd] | |
753 |
self.ui.note( |
|
757 | self.ui.note( | |
754 | (self.module, prevnum, prevmodule)) |
|
758 | _('found parent of branch %s at %d: %s\n') % | |
|
759 | (self.module, prevnum, prevmodule)) | |||
755 | else: |
|
760 | else: | |
756 | self.ui.debug("no copyfrom path, don't know what to do.\n") |
|
761 | self.ui.debug("no copyfrom path, don't know what to do.\n") | |
757 |
|
762 |
@@ -125,4 +125,5 b' class SvnRaTransport(object):' | |||||
125 | self._baton, pool) |
|
125 | self._baton, pool) | |
126 |
|
126 | |||
127 | def do_update(self, revnum, path, *args, **kwargs): |
|
127 | def do_update(self, revnum, path, *args, **kwargs): | |
128 |
return self.Reporter(svn.ra.do_update(self.ra, revnum, path, |
|
128 | return self.Reporter(svn.ra.do_update(self.ra, revnum, path, | |
|
129 | *args, **kwargs)) |
@@ -239,7 +239,8 b' def uisetup(ui):' | |||||
239 | for cmd, path in ui.configitems('extdiff'): |
|
239 | for cmd, path in ui.configitems('extdiff'): | |
240 | if cmd.startswith('cmd.'): |
|
240 | if cmd.startswith('cmd.'): | |
241 | cmd = cmd[4:] |
|
241 | cmd = cmd[4:] | |
242 |
if not path: |
|
242 | if not path: | |
|
243 | path = cmd | |||
243 | diffopts = ui.config('extdiff', 'opts.' + cmd, '') |
|
244 | diffopts = ui.config('extdiff', 'opts.' + cmd, '') | |
244 | diffopts = diffopts and [diffopts] or [] |
|
245 | diffopts = diffopts and [diffopts] or [] | |
245 | elif cmd.startswith('opts.'): |
|
246 | elif cmd.startswith('opts.'): |
@@ -38,8 +38,10 b' class gpg(object):' | |||||
38 | finally: |
|
38 | finally: | |
39 | for f in (sigfile, datafile): |
|
39 | for f in (sigfile, datafile): | |
40 | try: |
|
40 | try: | |
41 |
if f: |
|
41 | if f: | |
42 | except: pass |
|
42 | os.unlink(f) | |
|
43 | except: | |||
|
44 | pass | |||
43 | keys = [] |
|
45 | keys = [] | |
44 | key, fingerprint = None, None |
|
46 | key, fingerprint = None, None | |
45 | err = "" |
|
47 | err = "" |
@@ -186,7 +186,7 b' def revtree(ui, args, repo, full="tree",' | |||||
186 | l[x].changeset() # force reading |
|
186 | l[x].changeset() # force reading | |
187 | else: |
|
187 | else: | |
188 | l[x] = 1 |
|
188 | l[x] = 1 | |
189 | for x in xrange(chunk-1, -1, -1): |
|
189 | for x in xrange(chunk - 1, -1, -1): | |
190 | if l[x] != 0: |
|
190 | if l[x] != 0: | |
191 | yield (i + x, full != None and l[x] or None) |
|
191 | yield (i + x, full != None and l[x] or None) | |
192 | if i == 0: |
|
192 | if i == 0: |
@@ -60,7 +60,7 b' def reposetup(ui, repo):' | |||||
60 | if ui.config('inotify', 'debug'): |
|
60 | if ui.config('inotify', 'debug'): | |
61 | r2 = super(inotifydirstate, self).status( |
|
61 | r2 = super(inotifydirstate, self).status( | |
62 | match, False, clean, unknown) |
|
62 | match, False, clean, unknown) | |
63 | for c,a,b in zip('LMARDUIC', result, r2): |
|
63 | for c, a, b in zip('LMARDUIC', result, r2): | |
64 | for f in a: |
|
64 | for f in a: | |
65 | if f not in b: |
|
65 | if f not in b: | |
66 | ui.warn('*** inotify: %s +%s\n' % (c, f)) |
|
66 | ui.warn('*** inotify: %s +%s\n' % (c, f)) |
@@ -11,7 +11,8 b' from mercurial.i18n import _' | |||||
11 | import common, server |
|
11 | import common, server | |
12 | import errno, os, socket, struct |
|
12 | import errno, os, socket, struct | |
13 |
|
13 | |||
14 |
class QueryFailed(Exception): |
|
14 | class QueryFailed(Exception): | |
|
15 | pass | |||
15 |
|
16 | |||
16 | def start_server(function): |
|
17 | def start_server(function): | |
17 | """ |
|
18 | """ | |
@@ -137,8 +138,10 b' class client(object):' | |||||
137 | states = 'almrx!' |
|
138 | states = 'almrx!' | |
138 | if ignored: |
|
139 | if ignored: | |
139 | raise ValueError('this is insanity') |
|
140 | raise ValueError('this is insanity') | |
140 |
if clean: |
|
141 | if clean: | |
141 |
|
|
142 | states += 'c' | |
|
143 | if unknown: | |||
|
144 | states += '?' | |||
142 | yield states |
|
145 | yield states | |
143 |
|
146 | |||
144 | req = '\0'.join(genquery()) |
|
147 | req = '\0'.join(genquery()) |
This diff has been collapsed as it changes many lines, (811 lines changed) Show them Hide them | |||||
@@ -17,298 +17,298 b'' | |||||
17 |
|
17 | |||
18 | static PyObject *init(PyObject *self, PyObject *args) |
|
18 | static PyObject *init(PyObject *self, PyObject *args) | |
19 | { |
|
19 | { | |
20 |
|
|
20 | PyObject *ret = NULL; | |
21 |
|
|
21 | int fd = -1; | |
22 |
|
22 | |||
23 |
|
|
23 | if (!PyArg_ParseTuple(args, ":init")) | |
24 | goto bail; |
|
24 | goto bail; | |
25 |
|
25 | |||
26 |
|
|
26 | Py_BEGIN_ALLOW_THREADS; | |
27 |
|
|
27 | fd = inotify_init(); | |
28 |
|
|
28 | Py_END_ALLOW_THREADS; | |
29 |
|
29 | |||
30 |
|
|
30 | if (fd == -1) { | |
31 | PyErr_SetFromErrno(PyExc_OSError); |
|
31 | PyErr_SetFromErrno(PyExc_OSError); | |
32 | goto bail; |
|
32 | goto bail; | |
33 | } |
|
33 | } | |
34 |
|
34 | |||
35 |
|
|
35 | ret = PyInt_FromLong(fd); | |
36 |
|
|
36 | if (ret == NULL) | |
37 | goto bail; |
|
37 | goto bail; | |
38 |
|
38 | |||
39 |
|
|
39 | goto done; | |
40 |
|
40 | |||
41 | bail: |
|
41 | bail: | |
42 |
|
|
42 | if (fd != -1) | |
43 | close(fd); |
|
43 | close(fd); | |
44 |
|
44 | |||
45 |
|
|
45 | Py_CLEAR(ret); | |
46 |
|
46 | |||
47 | done: |
|
47 | done: | |
48 |
|
|
48 | return ret; | |
49 | } |
|
49 | } | |
50 |
|
50 | |||
51 | PyDoc_STRVAR( |
|
51 | PyDoc_STRVAR( | |
52 |
|
|
52 | init_doc, | |
53 |
|
|
53 | "init() -> fd\n" | |
54 |
|
|
54 | "\n" | |
55 |
|
|
55 | "Initialise an inotify instance.\n" | |
56 |
|
|
56 | "Return a file descriptor associated with a new inotify event queue."); | |
57 |
|
57 | |||
58 | static PyObject *add_watch(PyObject *self, PyObject *args) |
|
58 | static PyObject *add_watch(PyObject *self, PyObject *args) | |
59 | { |
|
59 | { | |
60 |
|
|
60 | PyObject *ret = NULL; | |
61 |
|
|
61 | uint32_t mask; | |
62 |
|
|
62 | int wd = -1; | |
63 |
|
|
63 | char *path; | |
64 |
|
|
64 | int fd; | |
65 |
|
65 | |||
66 |
|
|
66 | if (!PyArg_ParseTuple(args, "isI:add_watch", &fd, &path, &mask)) | |
67 | goto bail; |
|
67 | goto bail; | |
68 |
|
68 | |||
69 |
|
|
69 | Py_BEGIN_ALLOW_THREADS; | |
70 |
|
|
70 | wd = inotify_add_watch(fd, path, mask); | |
71 |
|
|
71 | Py_END_ALLOW_THREADS; | |
72 |
|
72 | |||
73 |
|
|
73 | if (wd == -1) { | |
74 | PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); |
|
74 | PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); | |
75 | goto bail; |
|
75 | goto bail; | |
76 | } |
|
76 | } | |
77 |
|
77 | |||
78 |
|
|
78 | ret = PyInt_FromLong(wd); | |
79 |
|
|
79 | if (ret == NULL) | |
80 | goto bail; |
|
80 | goto bail; | |
81 |
|
81 | |||
82 |
|
|
82 | goto done; | |
83 |
|
83 | |||
84 | bail: |
|
84 | bail: | |
85 |
|
|
85 | if (wd != -1) | |
86 | inotify_rm_watch(fd, wd); |
|
86 | inotify_rm_watch(fd, wd); | |
87 |
|
87 | |||
88 |
|
|
88 | Py_CLEAR(ret); | |
89 |
|
89 | |||
90 | done: |
|
90 | done: | |
91 |
|
|
91 | return ret; | |
92 | } |
|
92 | } | |
93 |
|
93 | |||
94 | PyDoc_STRVAR( |
|
94 | PyDoc_STRVAR( | |
95 |
|
|
95 | add_watch_doc, | |
96 |
|
|
96 | "add_watch(fd, path, mask) -> wd\n" | |
97 |
|
|
97 | "\n" | |
98 |
|
|
98 | "Add a watch to an inotify instance, or modify an existing watch.\n" | |
99 |
|
|
99 | "\n" | |
100 |
|
|
100 | " fd: file descriptor returned by init()\n" | |
101 |
|
|
101 | " path: path to watch\n" | |
102 |
|
|
102 | " mask: mask of events to watch for\n" | |
103 |
|
|
103 | "\n" | |
104 |
|
|
104 | "Return a unique numeric watch descriptor for the inotify instance\n" | |
105 |
|
|
105 | "mapped by the file descriptor."); | |
106 |
|
106 | |||
107 | static PyObject *remove_watch(PyObject *self, PyObject *args) |
|
107 | static PyObject *remove_watch(PyObject *self, PyObject *args) | |
108 | { |
|
108 | { | |
109 |
|
|
109 | uint32_t wd; | |
110 |
|
|
110 | int fd; | |
111 |
|
|
111 | int r; | |
112 |
|
112 | |||
113 |
|
|
113 | if (!PyArg_ParseTuple(args, "iI:remove_watch", &fd, &wd)) | |
114 | return NULL; |
|
114 | return NULL; | |
115 |
|
115 | |||
116 |
|
|
116 | Py_BEGIN_ALLOW_THREADS; | |
117 |
|
|
117 | r = inotify_rm_watch(fd, wd); | |
118 |
|
|
118 | Py_END_ALLOW_THREADS; | |
119 |
|
119 | |||
120 |
|
|
120 | if (r == -1) { | |
121 | PyErr_SetFromErrno(PyExc_OSError); |
|
121 | PyErr_SetFromErrno(PyExc_OSError); | |
122 | return NULL; |
|
122 | return NULL; | |
123 | } |
|
123 | } | |
124 |
|
124 | |||
125 |
|
|
125 | Py_INCREF(Py_None); | |
126 |
|
|
126 | return Py_None; | |
127 | } |
|
127 | } | |
128 |
|
128 | |||
129 | PyDoc_STRVAR( |
|
129 | PyDoc_STRVAR( | |
130 |
|
|
130 | remove_watch_doc, | |
131 |
|
|
131 | "remove_watch(fd, wd)\n" | |
132 |
|
|
132 | "\n" | |
133 |
|
|
133 | " fd: file descriptor returned by init()\n" | |
134 |
|
|
134 | " wd: watch descriptor returned by add_watch()\n" | |
135 |
|
|
135 | "\n" | |
136 |
|
|
136 | "Remove a watch associated with the watch descriptor wd from the\n" | |
137 |
|
|
137 | "inotify instance associated with the file descriptor fd.\n" | |
138 |
|
|
138 | "\n" | |
139 |
|
|
139 | "Removing a watch causes an IN_IGNORED event to be generated for this\n" | |
140 |
|
|
140 | "watch descriptor."); | |
141 |
|
141 | |||
142 | #define bit_name(x) {x, #x} |
|
142 | #define bit_name(x) {x, #x} | |
143 |
|
143 | |||
144 | static struct { |
|
144 | static struct { | |
145 |
|
|
145 | int bit; | |
146 |
|
|
146 | const char *name; | |
147 |
|
|
147 | PyObject *pyname; | |
148 | } bit_names[] = { |
|
148 | } bit_names[] = { | |
149 |
|
|
149 | bit_name(IN_ACCESS), | |
150 |
|
|
150 | bit_name(IN_MODIFY), | |
151 |
|
|
151 | bit_name(IN_ATTRIB), | |
152 |
|
|
152 | bit_name(IN_CLOSE_WRITE), | |
153 |
|
|
153 | bit_name(IN_CLOSE_NOWRITE), | |
154 |
|
|
154 | bit_name(IN_OPEN), | |
155 |
|
|
155 | bit_name(IN_MOVED_FROM), | |
156 |
|
|
156 | bit_name(IN_MOVED_TO), | |
157 |
|
|
157 | bit_name(IN_CREATE), | |
158 |
|
|
158 | bit_name(IN_DELETE), | |
159 |
|
|
159 | bit_name(IN_DELETE_SELF), | |
160 |
|
|
160 | bit_name(IN_MOVE_SELF), | |
161 |
|
|
161 | bit_name(IN_UNMOUNT), | |
162 |
|
|
162 | bit_name(IN_Q_OVERFLOW), | |
163 |
|
|
163 | bit_name(IN_IGNORED), | |
164 |
|
|
164 | bit_name(IN_ONLYDIR), | |
165 |
|
|
165 | bit_name(IN_DONT_FOLLOW), | |
166 |
|
|
166 | bit_name(IN_MASK_ADD), | |
167 |
|
|
167 | bit_name(IN_ISDIR), | |
168 |
|
|
168 | bit_name(IN_ONESHOT), | |
169 | {0} |
|
169 | {0} | |
170 | }; |
|
170 | }; | |
171 |
|
171 | |||
172 | static PyObject *decode_mask(int mask) |
|
172 | static PyObject *decode_mask(int mask) | |
173 | { |
|
173 | { | |
174 |
|
|
174 | PyObject *ret = PyList_New(0); | |
175 |
|
|
175 | int i; | |
176 |
|
176 | |||
177 |
|
|
177 | if (ret == NULL) | |
178 | goto bail; |
|
178 | goto bail; | |
179 |
|
179 | |||
180 |
|
|
180 | for (i = 0; bit_names[i].bit; i++) { | |
181 | if (mask & bit_names[i].bit) { |
|
181 | if (mask & bit_names[i].bit) { | |
182 |
|
|
182 | if (bit_names[i].pyname == NULL) { | |
183 | bit_names[i].pyname = PyString_FromString(bit_names[i].name); |
|
183 | bit_names[i].pyname = PyString_FromString(bit_names[i].name); | |
184 | if (bit_names[i].pyname == NULL) |
|
184 | if (bit_names[i].pyname == NULL) | |
185 |
|
|
185 | goto bail; | |
186 | } |
|
186 | } | |
187 |
|
|
187 | Py_INCREF(bit_names[i].pyname); | |
188 |
|
|
188 | if (PyList_Append(ret, bit_names[i].pyname) == -1) | |
189 | goto bail; |
|
189 | goto bail; | |
|
190 | } | |||
190 | } |
|
191 | } | |
191 | } |
|
|||
192 |
|
192 | |||
193 |
|
|
193 | goto done; | |
194 |
|
194 | |||
195 | bail: |
|
195 | bail: | |
196 |
|
|
196 | Py_CLEAR(ret); | |
197 |
|
197 | |||
198 | done: |
|
198 | done: | |
199 |
|
|
199 | return ret; | |
200 | } |
|
200 | } | |
201 |
|
201 | |||
202 | static PyObject *pydecode_mask(PyObject *self, PyObject *args) |
|
202 | static PyObject *pydecode_mask(PyObject *self, PyObject *args) | |
203 | { |
|
203 | { | |
204 |
|
|
204 | int mask; | |
205 |
|
205 | |||
206 |
|
|
206 | if (!PyArg_ParseTuple(args, "i:decode_mask", &mask)) | |
207 | return NULL; |
|
207 | return NULL; | |
208 |
|
208 | |||
209 |
|
|
209 | return decode_mask(mask); | |
210 | } |
|
210 | } | |
211 |
|
211 | |||
212 | PyDoc_STRVAR( |
|
212 | PyDoc_STRVAR( | |
213 |
|
|
213 | decode_mask_doc, | |
214 |
|
|
214 | "decode_mask(mask) -> list_of_strings\n" | |
215 |
|
|
215 | "\n" | |
216 |
|
|
216 | "Decode an inotify mask value into a list of strings that give the\n" | |
217 |
|
|
217 | "name of each bit set in the mask."); | |
218 |
|
218 | |||
219 | static char doc[] = "Low-level inotify interface wrappers."; |
|
219 | static char doc[] = "Low-level inotify interface wrappers."; | |
220 |
|
220 | |||
221 | static void define_const(PyObject *dict, const char *name, uint32_t val) |
|
221 | static void define_const(PyObject *dict, const char *name, uint32_t val) | |
222 | { |
|
222 | { | |
223 |
|
|
223 | PyObject *pyval = PyInt_FromLong(val); | |
224 |
|
|
224 | PyObject *pyname = PyString_FromString(name); | |
225 |
|
225 | |||
226 |
|
|
226 | if (!pyname || !pyval) | |
227 | goto bail; |
|
227 | goto bail; | |
228 |
|
228 | |||
229 |
|
|
229 | PyDict_SetItem(dict, pyname, pyval); | |
230 |
|
230 | |||
231 | bail: |
|
231 | bail: | |
232 |
|
|
232 | Py_XDECREF(pyname); | |
233 |
|
|
233 | Py_XDECREF(pyval); | |
234 | } |
|
234 | } | |
235 |
|
235 | |||
236 | static void define_consts(PyObject *dict) |
|
236 | static void define_consts(PyObject *dict) | |
237 | { |
|
237 | { | |
238 |
|
|
238 | define_const(dict, "IN_ACCESS", IN_ACCESS); | |
239 |
|
|
239 | define_const(dict, "IN_MODIFY", IN_MODIFY); | |
240 |
|
|
240 | define_const(dict, "IN_ATTRIB", IN_ATTRIB); | |
241 |
|
|
241 | define_const(dict, "IN_CLOSE_WRITE", IN_CLOSE_WRITE); | |
242 |
|
|
242 | define_const(dict, "IN_CLOSE_NOWRITE", IN_CLOSE_NOWRITE); | |
243 |
|
|
243 | define_const(dict, "IN_OPEN", IN_OPEN); | |
244 |
|
|
244 | define_const(dict, "IN_MOVED_FROM", IN_MOVED_FROM); | |
245 |
|
|
245 | define_const(dict, "IN_MOVED_TO", IN_MOVED_TO); | |
246 |
|
246 | |||
247 |
|
|
247 | define_const(dict, "IN_CLOSE", IN_CLOSE); | |
248 |
|
|
248 | define_const(dict, "IN_MOVE", IN_MOVE); | |
249 |
|
249 | |||
250 |
|
|
250 | define_const(dict, "IN_CREATE", IN_CREATE); | |
251 |
|
|
251 | define_const(dict, "IN_DELETE", IN_DELETE); | |
252 |
|
|
252 | define_const(dict, "IN_DELETE_SELF", IN_DELETE_SELF); | |
253 |
|
|
253 | define_const(dict, "IN_MOVE_SELF", IN_MOVE_SELF); | |
254 |
|
|
254 | define_const(dict, "IN_UNMOUNT", IN_UNMOUNT); | |
255 |
|
|
255 | define_const(dict, "IN_Q_OVERFLOW", IN_Q_OVERFLOW); | |
256 |
|
|
256 | define_const(dict, "IN_IGNORED", IN_IGNORED); | |
257 |
|
257 | |||
258 |
|
|
258 | define_const(dict, "IN_ONLYDIR", IN_ONLYDIR); | |
259 |
|
|
259 | define_const(dict, "IN_DONT_FOLLOW", IN_DONT_FOLLOW); | |
260 |
|
|
260 | define_const(dict, "IN_MASK_ADD", IN_MASK_ADD); | |
261 |
|
|
261 | define_const(dict, "IN_ISDIR", IN_ISDIR); | |
262 |
|
|
262 | define_const(dict, "IN_ONESHOT", IN_ONESHOT); | |
263 |
|
|
263 | define_const(dict, "IN_ALL_EVENTS", IN_ALL_EVENTS); | |
264 | } |
|
264 | } | |
265 |
|
265 | |||
266 | struct event { |
|
266 | struct event { | |
267 |
|
|
267 | PyObject_HEAD | |
268 |
|
|
268 | PyObject *wd; | |
269 |
|
|
269 | PyObject *mask; | |
270 |
|
|
270 | PyObject *cookie; | |
271 |
|
|
271 | PyObject *name; | |
272 | }; |
|
272 | }; | |
273 |
|
273 | |||
274 | static PyObject *event_wd(PyObject *self, void *x) |
|
274 | static PyObject *event_wd(PyObject *self, void *x) | |
275 | { |
|
275 | { | |
276 |
|
|
276 | struct event *evt = (struct event *)self; | |
277 |
|
|
277 | Py_INCREF(evt->wd); | |
278 |
|
|
278 | return evt->wd; | |
279 | } |
|
279 | } | |
280 |
|
280 | |||
281 | static PyObject *event_mask(PyObject *self, void *x) |
|
281 | static PyObject *event_mask(PyObject *self, void *x) | |
282 | { |
|
282 | { | |
283 |
|
|
283 | struct event *evt = (struct event *)self; | |
284 |
|
|
284 | Py_INCREF(evt->mask); | |
285 |
|
|
285 | return evt->mask; | |
286 | } |
|
286 | } | |
287 |
|
287 | |||
288 | static PyObject *event_cookie(PyObject *self, void *x) |
|
288 | static PyObject *event_cookie(PyObject *self, void *x) | |
289 | { |
|
289 | { | |
290 |
|
|
290 | struct event *evt = (struct event *)self; | |
291 |
|
|
291 | Py_INCREF(evt->cookie); | |
292 |
|
|
292 | return evt->cookie; | |
293 | } |
|
293 | } | |
294 |
|
294 | |||
295 | static PyObject *event_name(PyObject *self, void *x) |
|
295 | static PyObject *event_name(PyObject *self, void *x) | |
296 | { |
|
296 | { | |
297 |
|
|
297 | struct event *evt = (struct event *)self; | |
298 |
|
|
298 | Py_INCREF(evt->name); | |
299 |
|
|
299 | return evt->name; | |
300 | } |
|
300 | } | |
301 |
|
301 | |||
302 | static struct PyGetSetDef event_getsets[] = { |
|
302 | static struct PyGetSetDef event_getsets[] = { | |
303 |
|
|
303 | {"wd", event_wd, NULL, | |
304 |
|
|
304 | "watch descriptor"}, | |
305 |
|
|
305 | {"mask", event_mask, NULL, | |
306 |
|
|
306 | "event mask"}, | |
307 |
|
|
307 | {"cookie", event_cookie, NULL, | |
308 |
|
|
308 | "rename cookie, if rename-related event"}, | |
309 |
|
|
309 | {"name", event_name, NULL, | |
310 |
|
|
310 | "file name"}, | |
311 |
|
|
311 | {NULL} | |
312 | }; |
|
312 | }; | |
313 |
|
313 | |||
314 | PyDoc_STRVAR( |
|
314 | PyDoc_STRVAR( | |
@@ -317,284 +317,285 b' PyDoc_STRVAR(' | |||||
317 |
|
317 | |||
318 | static PyObject *event_new(PyTypeObject *t, PyObject *a, PyObject *k) |
|
318 | static PyObject *event_new(PyTypeObject *t, PyObject *a, PyObject *k) | |
319 | { |
|
319 | { | |
320 |
|
|
320 | return (*t->tp_alloc)(t, 0); | |
321 | } |
|
321 | } | |
322 |
|
322 | |||
323 | static void event_dealloc(struct event *evt) |
|
323 | static void event_dealloc(struct event *evt) | |
324 | { |
|
324 | { | |
325 |
|
|
325 | Py_XDECREF(evt->wd); | |
326 |
|
|
326 | Py_XDECREF(evt->mask); | |
327 |
|
|
327 | Py_XDECREF(evt->cookie); | |
328 |
|
|
328 | Py_XDECREF(evt->name); | |
329 |
|
329 | |||
330 |
|
|
330 | (*evt->ob_type->tp_free)(evt); | |
331 | } |
|
331 | } | |
332 |
|
332 | |||
333 | static PyObject *event_repr(struct event *evt) |
|
333 | static PyObject *event_repr(struct event *evt) | |
334 | { |
|
334 | { | |
335 |
|
|
335 | int wd = PyInt_AsLong(evt->wd); | |
336 |
|
|
336 | int cookie = evt->cookie == Py_None ? -1 : PyInt_AsLong(evt->cookie); | |
337 |
|
|
337 | PyObject *ret = NULL, *pymasks = NULL, *pymask = NULL; | |
338 |
|
|
338 | PyObject *join = NULL; | |
339 |
|
|
339 | char *maskstr; | |
340 |
|
340 | |||
341 |
|
|
341 | join = PyString_FromString("|"); | |
342 |
|
|
342 | if (join == NULL) | |
343 | goto bail; |
|
343 | goto bail; | |
344 |
|
344 | |||
345 |
|
|
345 | pymasks = decode_mask(PyInt_AsLong(evt->mask)); | |
346 |
|
|
346 | if (pymasks == NULL) | |
347 | goto bail; |
|
347 | goto bail; | |
348 |
|
348 | |||
349 |
|
|
349 | pymask = _PyString_Join(join, pymasks); | |
350 |
|
|
350 | if (pymask == NULL) | |
351 | goto bail; |
|
351 | goto bail; | |
352 |
|
352 | |||
353 |
|
|
353 | maskstr = PyString_AsString(pymask); | |
|
354 | ||||
|
355 | if (evt->name != Py_None) { | |||
|
356 | PyObject *pyname = PyString_Repr(evt->name, 1); | |||
|
357 | char *name = pyname ? PyString_AsString(pyname) : "???"; | |||
354 |
|
358 | |||
355 | if (evt->name != Py_None) { |
|
359 | if (cookie == -1) | |
356 | PyObject *pyname = PyString_Repr(evt->name, 1); |
|
360 | ret = PyString_FromFormat( | |
357 | char *name = pyname ? PyString_AsString(pyname) : "???"; |
|
361 | "event(wd=%d, mask=%s, name=%s)", | |
358 |
|
362 | wd, maskstr, name); | ||
359 | if (cookie == -1) |
|
363 | else | |
360 |
|
|
364 | ret = PyString_FromFormat("event(wd=%d, mask=%s, " | |
361 | wd, maskstr, name); |
|
365 | "cookie=0x%x, name=%s)", | |
362 | else |
|
366 | wd, maskstr, cookie, name); | |
363 | ret = PyString_FromFormat("event(wd=%d, mask=%s, " |
|
|||
364 | "cookie=0x%x, name=%s)", |
|
|||
365 | wd, maskstr, cookie, name); |
|
|||
366 |
|
367 | |||
367 | Py_XDECREF(pyname); |
|
368 | Py_XDECREF(pyname); | |
368 |
|
|
369 | } else { | |
369 | if (cookie == -1) |
|
370 | if (cookie == -1) | |
370 |
|
|
371 | ret = PyString_FromFormat("event(wd=%d, mask=%s)", | |
371 |
|
|
372 | wd, maskstr); | |
372 | else { |
|
373 | else { | |
373 | ret = PyString_FromFormat("event(wd=%d, mask=%s, cookie=0x%x)", |
|
374 | ret = PyString_FromFormat( | |
374 |
|
|
375 | "event(wd=%d, mask=%s, cookie=0x%x)", | |
|
376 | wd, maskstr, cookie); | |||
|
377 | } | |||
375 | } |
|
378 | } | |
376 | } |
|
|||
377 |
|
379 | |||
378 |
|
|
380 | goto done; | |
379 | bail: |
|
381 | bail: | |
380 |
|
|
382 | Py_CLEAR(ret); | |
381 |
|
383 | |||
382 | done: |
|
384 | done: | |
383 |
|
|
385 | Py_XDECREF(pymask); | |
384 |
|
|
386 | Py_XDECREF(pymasks); | |
385 |
|
|
387 | Py_XDECREF(join); | |
386 |
|
388 | |||
387 |
|
|
389 | return ret; | |
388 | } |
|
390 | } | |
389 |
|
391 | |||
390 | static PyTypeObject event_type = { |
|
392 | static PyTypeObject event_type = { | |
391 |
|
|
393 | PyObject_HEAD_INIT(NULL) | |
392 |
|
|
394 | 0, /*ob_size*/ | |
393 |
|
|
395 | "_inotify.event", /*tp_name*/ | |
394 |
|
|
396 | sizeof(struct event), /*tp_basicsize*/ | |
395 |
|
|
397 | 0, /*tp_itemsize*/ | |
396 |
|
|
398 | (destructor)event_dealloc, /*tp_dealloc*/ | |
397 |
|
|
399 | 0, /*tp_print*/ | |
398 |
|
|
400 | 0, /*tp_getattr*/ | |
399 |
|
|
401 | 0, /*tp_setattr*/ | |
400 |
|
|
402 | 0, /*tp_compare*/ | |
401 |
|
|
403 | (reprfunc)event_repr, /*tp_repr*/ | |
402 |
|
|
404 | 0, /*tp_as_number*/ | |
403 |
|
|
405 | 0, /*tp_as_sequence*/ | |
404 |
|
|
406 | 0, /*tp_as_mapping*/ | |
405 |
|
|
407 | 0, /*tp_hash */ | |
406 |
|
|
408 | 0, /*tp_call*/ | |
407 |
|
|
409 | 0, /*tp_str*/ | |
408 |
|
|
410 | 0, /*tp_getattro*/ | |
409 |
|
|
411 | 0, /*tp_setattro*/ | |
410 |
|
|
412 | 0, /*tp_as_buffer*/ | |
411 |
|
|
413 | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ | |
412 |
|
|
414 | event_doc, /* tp_doc */ | |
413 |
|
|
415 | 0, /* tp_traverse */ | |
414 |
|
|
416 | 0, /* tp_clear */ | |
415 |
|
|
417 | 0, /* tp_richcompare */ | |
416 |
|
|
418 | 0, /* tp_weaklistoffset */ | |
417 |
|
|
419 | 0, /* tp_iter */ | |
418 |
|
|
420 | 0, /* tp_iternext */ | |
419 |
|
|
421 | 0, /* tp_methods */ | |
420 |
|
|
422 | 0, /* tp_members */ | |
421 |
|
|
423 | event_getsets, /* tp_getset */ | |
422 |
|
|
424 | 0, /* tp_base */ | |
423 |
|
|
425 | 0, /* tp_dict */ | |
424 |
|
|
426 | 0, /* tp_descr_get */ | |
425 |
|
|
427 | 0, /* tp_descr_set */ | |
426 |
|
|
428 | 0, /* tp_dictoffset */ | |
427 |
|
|
429 | 0, /* tp_init */ | |
428 |
|
|
430 | 0, /* tp_alloc */ | |
429 |
|
|
431 | event_new, /* tp_new */ | |
430 | }; |
|
432 | }; | |
431 |
|
433 | |||
432 | PyObject *read_events(PyObject *self, PyObject *args) |
|
434 | PyObject *read_events(PyObject *self, PyObject *args) | |
433 | { |
|
435 | { | |
434 |
|
|
436 | PyObject *ctor_args = NULL; | |
435 |
|
|
437 | PyObject *pybufsize = NULL; | |
436 |
|
|
438 | PyObject *ret = NULL; | |
437 |
|
|
439 | int bufsize = 65536; | |
438 |
|
|
440 | char *buf = NULL; | |
439 |
|
|
441 | int nread, pos; | |
440 |
|
|
442 | int fd; | |
|
443 | ||||
|
444 | if (!PyArg_ParseTuple(args, "i|O:read", &fd, &pybufsize)) | |||
|
445 | goto bail; | |||
441 |
|
446 | |||
442 | if (!PyArg_ParseTuple(args, "i|O:read", &fd, &pybufsize)) |
|
447 | if (pybufsize && pybufsize != Py_None) | |
443 | goto bail; |
|
448 | bufsize = PyInt_AsLong(pybufsize); | |
444 |
|
449 | |||
445 | if (pybufsize && pybufsize != Py_None) |
|
450 | ret = PyList_New(0); | |
446 | bufsize = PyInt_AsLong(pybufsize); |
|
451 | if (ret == NULL) | |
|
452 | goto bail; | |||
447 |
|
453 | |||
448 | ret = PyList_New(0); |
|
454 | if (bufsize <= 0) { | |
449 | if (ret == NULL) |
|
455 | int r; | |
450 | goto bail; |
|
|||
451 |
|
456 | |||
452 | if (bufsize <= 0) { |
|
457 | Py_BEGIN_ALLOW_THREADS; | |
453 | int r; |
|
458 | r = ioctl(fd, FIONREAD, &bufsize); | |
|
459 | Py_END_ALLOW_THREADS; | |||
454 |
|
460 | |||
455 | Py_BEGIN_ALLOW_THREADS |
|
461 | if (r == -1) { | |
456 | r = ioctl(fd, FIONREAD, &bufsize); |
|
462 | PyErr_SetFromErrno(PyExc_OSError); | |
457 | Py_END_ALLOW_THREADS |
|
463 | goto bail; | |
458 |
|
464 | } | ||
459 | if (r == -1) { |
|
465 | if (bufsize == 0) | |
460 | PyErr_SetFromErrno(PyExc_OSError); |
|
466 | goto done; | |
461 | goto bail; |
|
|||
462 | } |
|
467 | } | |
463 | if (bufsize == 0) |
|
468 | else { | |
464 | goto done; |
|
469 | static long name_max; | |
465 | } |
|
470 | static long name_fd = -1; | |
466 | else { |
|
471 | long min; | |
467 | static long name_max; |
|
|||
468 | static long name_fd = -1; |
|
|||
469 | long min; |
|
|||
470 |
|
472 | |||
471 | if (name_fd != fd) { |
|
473 | if (name_fd != fd) { | |
472 |
|
|
474 | name_fd = fd; | |
473 |
|
|
475 | Py_BEGIN_ALLOW_THREADS; | |
474 |
|
|
476 | name_max = fpathconf(fd, _PC_NAME_MAX); | |
475 |
|
|
477 | Py_END_ALLOW_THREADS; | |
|
478 | } | |||
|
479 | ||||
|
480 | min = sizeof(struct inotify_event) + name_max + 1; | |||
|
481 | ||||
|
482 | if (bufsize < min) { | |||
|
483 | PyErr_Format(PyExc_ValueError, | |||
|
484 | "bufsize must be at least %d", (int)min); | |||
|
485 | goto bail; | |||
|
486 | } | |||
476 | } |
|
487 | } | |
477 |
|
488 | |||
478 | min = sizeof(struct inotify_event) + name_max + 1; |
|
489 | buf = alloca(bufsize); | |
479 |
|
||||
480 | if (bufsize < min) { |
|
|||
481 | PyErr_Format(PyExc_ValueError, "bufsize must be at least %d", |
|
|||
482 | (int) min); |
|
|||
483 | goto bail; |
|
|||
484 | } |
|
|||
485 | } |
|
|||
486 |
|
||||
487 | buf = alloca(bufsize); |
|
|||
488 |
|
||||
489 | Py_BEGIN_ALLOW_THREADS |
|
|||
490 | nread = read(fd, buf, bufsize); |
|
|||
491 | Py_END_ALLOW_THREADS |
|
|||
492 |
|
||||
493 | if (nread == -1) { |
|
|||
494 | PyErr_SetFromErrno(PyExc_OSError); |
|
|||
495 | goto bail; |
|
|||
496 | } |
|
|||
497 |
|
||||
498 | ctor_args = PyTuple_New(0); |
|
|||
499 |
|
||||
500 | if (ctor_args == NULL) |
|
|||
501 | goto bail; |
|
|||
502 |
|
490 | |||
503 | pos = 0; |
|
491 | Py_BEGIN_ALLOW_THREADS; | |
504 |
|
492 | nread = read(fd, buf, bufsize); | ||
505 | while (pos < nread) { |
|
493 | Py_END_ALLOW_THREADS; | |
506 | struct inotify_event *in = (struct inotify_event *) (buf + pos); |
|
|||
507 | struct event *evt; |
|
|||
508 | PyObject *obj; |
|
|||
509 |
|
||||
510 | obj = PyObject_CallObject((PyObject *) &event_type, ctor_args); |
|
|||
511 |
|
||||
512 | if (obj == NULL) |
|
|||
513 | goto bail; |
|
|||
514 |
|
||||
515 | evt = (struct event *) obj; |
|
|||
516 |
|
494 | |||
517 | evt->wd = PyInt_FromLong(in->wd); |
|
495 | if (nread == -1) { | |
518 | evt->mask = PyInt_FromLong(in->mask); |
|
496 | PyErr_SetFromErrno(PyExc_OSError); | |
519 | if (in->mask & IN_MOVE) |
|
497 | goto bail; | |
520 | evt->cookie = PyInt_FromLong(in->cookie); |
|
|||
521 | else { |
|
|||
522 | Py_INCREF(Py_None); |
|
|||
523 | evt->cookie = Py_None; |
|
|||
524 | } |
|
|||
525 | if (in->len) |
|
|||
526 | evt->name = PyString_FromString(in->name); |
|
|||
527 | else { |
|
|||
528 | Py_INCREF(Py_None); |
|
|||
529 | evt->name = Py_None; |
|
|||
530 | } |
|
498 | } | |
531 |
|
499 | |||
532 | if (!evt->wd || !evt->mask || !evt->cookie || !evt->name) |
|
500 | ctor_args = PyTuple_New(0); | |
533 | goto mybail; |
|
501 | ||
|
502 | if (ctor_args == NULL) | |||
|
503 | goto bail; | |||
|
504 | ||||
|
505 | pos = 0; | |||
534 |
|
506 | |||
535 | if (PyList_Append(ret, obj) == -1) |
|
507 | while (pos < nread) { | |
536 | goto mybail; |
|
508 | struct inotify_event *in = (struct inotify_event *)(buf + pos); | |
|
509 | struct event *evt; | |||
|
510 | PyObject *obj; | |||
537 |
|
511 | |||
538 | pos += sizeof(struct inotify_event) + in->len; |
|
512 | obj = PyObject_CallObject((PyObject *)&event_type, ctor_args); | |
539 | continue; |
|
513 | ||
|
514 | if (obj == NULL) | |||
|
515 | goto bail; | |||
|
516 | ||||
|
517 | evt = (struct event *)obj; | |||
540 |
|
518 | |||
541 | mybail: |
|
519 | evt->wd = PyInt_FromLong(in->wd); | |
542 | Py_CLEAR(evt->wd); |
|
520 | evt->mask = PyInt_FromLong(in->mask); | |
543 | Py_CLEAR(evt->mask); |
|
521 | if (in->mask & IN_MOVE) | |
544 | Py_CLEAR(evt->cookie); |
|
522 | evt->cookie = PyInt_FromLong(in->cookie); | |
545 | Py_CLEAR(evt->name); |
|
523 | else { | |
546 |
|
|
524 | Py_INCREF(Py_None); | |
|
525 | evt->cookie = Py_None; | |||
|
526 | } | |||
|
527 | if (in->len) | |||
|
528 | evt->name = PyString_FromString(in->name); | |||
|
529 | else { | |||
|
530 | Py_INCREF(Py_None); | |||
|
531 | evt->name = Py_None; | |||
|
532 | } | |||
|
533 | ||||
|
534 | if (!evt->wd || !evt->mask || !evt->cookie || !evt->name) | |||
|
535 | goto mybail; | |||
547 |
|
536 | |||
548 | goto bail; |
|
537 | if (PyList_Append(ret, obj) == -1) | |
549 | } |
|
538 | goto mybail; | |
|
539 | ||||
|
540 | pos += sizeof(struct inotify_event) + in->len; | |||
|
541 | continue; | |||
550 |
|
542 | |||
551 | goto done; |
|
543 | mybail: | |
|
544 | Py_CLEAR(evt->wd); | |||
|
545 | Py_CLEAR(evt->mask); | |||
|
546 | Py_CLEAR(evt->cookie); | |||
|
547 | Py_CLEAR(evt->name); | |||
|
548 | Py_DECREF(obj); | |||
|
549 | ||||
|
550 | goto bail; | |||
|
551 | } | |||
|
552 | ||||
|
553 | goto done; | |||
552 |
|
554 | |||
553 | bail: |
|
555 | bail: | |
554 |
|
|
556 | Py_CLEAR(ret); | |
555 |
|
557 | |||
556 | done: |
|
558 | done: | |
557 |
|
|
559 | Py_XDECREF(ctor_args); | |
558 |
|
560 | |||
559 |
|
|
561 | return ret; | |
560 | } |
|
562 | } | |
561 |
|
563 | |||
562 | PyDoc_STRVAR( |
|
564 | PyDoc_STRVAR( | |
563 |
|
|
565 | read_doc, | |
564 |
|
|
566 | "read(fd, bufsize[=65536]) -> list_of_events\n" | |
565 |
|
|
567 | "\n" | |
566 |
|
|
568 | "\nRead inotify events from a file descriptor.\n" | |
567 |
|
|
569 | "\n" | |
568 |
|
|
570 | " fd: file descriptor returned by init()\n" | |
569 |
|
|
571 | " bufsize: size of buffer to read into, in bytes\n" | |
570 |
|
|
572 | "\n" | |
571 |
|
|
573 | "Return a list of event objects.\n" | |
572 |
|
|
574 | "\n" | |
573 |
|
|
575 | "If bufsize is > 0, block until events are available to be read.\n" | |
574 |
|
|
576 | "Otherwise, immediately return all events that can be read without\n" | |
575 |
|
|
577 | "blocking."); | |
576 |
|
||||
577 |
|
578 | |||
578 | static PyMethodDef methods[] = { |
|
579 | static PyMethodDef methods[] = { | |
579 |
|
|
580 | {"init", init, METH_VARARGS, init_doc}, | |
580 |
|
|
581 | {"add_watch", add_watch, METH_VARARGS, add_watch_doc}, | |
581 |
|
|
582 | {"remove_watch", remove_watch, METH_VARARGS, remove_watch_doc}, | |
582 |
|
|
583 | {"read", read_events, METH_VARARGS, read_doc}, | |
583 |
|
|
584 | {"decode_mask", pydecode_mask, METH_VARARGS, decode_mask_doc}, | |
584 |
|
|
585 | {NULL}, | |
585 | }; |
|
586 | }; | |
586 |
|
587 | |||
587 | void init_inotify(void) |
|
588 | void init_inotify(void) | |
588 | { |
|
589 | { | |
589 |
|
|
590 | PyObject *mod, *dict; | |
590 |
|
591 | |||
591 |
|
|
592 | if (PyType_Ready(&event_type) == -1) | |
592 | return; |
|
593 | return; | |
593 |
|
594 | |||
594 |
|
|
595 | mod = Py_InitModule3("_inotify", methods, doc); | |
595 |
|
596 | |||
596 |
|
|
597 | dict = PyModule_GetDict(mod); | |
597 |
|
598 | |||
598 |
|
|
599 | if (dict) | |
599 | define_consts(dict); |
|
600 | define_consts(dict); | |
600 | } |
|
601 | } |
@@ -112,7 +112,8 b' class pollable(object):' | |||||
112 | timeout = None |
|
112 | timeout = None | |
113 | timeobj = None |
|
113 | timeobj = None | |
114 | for obj in cls.instances.itervalues(): |
|
114 | for obj in cls.instances.itervalues(): | |
115 |
if obj.timeout is not None and (timeout is None |
|
115 | if obj.timeout is not None and (timeout is None | |
|
116 | or obj.timeout < timeout): | |||
116 | timeout, timeobj = obj.timeout, obj |
|
117 | timeout, timeobj = obj.timeout, obj | |
117 | try: |
|
118 | try: | |
118 | events = cls.poll.poll(timeout) |
|
119 | events = cls.poll.poll(timeout) |
@@ -17,7 +17,8 b' import struct' | |||||
17 | import sys |
|
17 | import sys | |
18 | import tempfile |
|
18 | import tempfile | |
19 |
|
19 | |||
20 |
class AlreadyStartedException(Exception): |
|
20 | class AlreadyStartedException(Exception): | |
|
21 | pass | |||
21 |
|
22 | |||
22 | def join(a, b): |
|
23 | def join(a, b): | |
23 | if a: |
|
24 | if a: | |
@@ -30,7 +31,7 b' def split(path):' | |||||
30 | c = path.rfind('/') |
|
31 | c = path.rfind('/') | |
31 | if c == -1: |
|
32 | if c == -1: | |
32 | return '', path |
|
33 | return '', path | |
33 | return path[:c], path[c+1:] |
|
34 | return path[:c], path[c + 1:] | |
34 |
|
35 | |||
35 | walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG) |
|
36 | walk_ignored_errors = (errno.ENOENT, errno.ENAMETOOLONG) | |
36 |
|
37 | |||
@@ -332,8 +333,8 b' class socketlistener(object):' | |||||
332 | self.sock.bind(self.sockpath) |
|
333 | self.sock.bind(self.sockpath) | |
333 | except socket.error, err: |
|
334 | except socket.error, err: | |
334 | if err[0] == errno.EADDRINUSE: |
|
335 | if err[0] == errno.EADDRINUSE: | |
335 |
raise AlreadyStartedException( |
|
336 | raise AlreadyStartedException(_('cannot start: socket is ' | |
336 |
|
|
337 | 'already bound')) | |
337 | if err[0] == "AF_UNIX path too long": |
|
338 | if err[0] == "AF_UNIX path too long": | |
338 | if os.path.islink(self.sockpath) and \ |
|
339 | if os.path.islink(self.sockpath) and \ | |
339 | not os.path.exists(self.sockpath): |
|
340 | not os.path.exists(self.sockpath): |
@@ -376,7 +376,8 b' class queue(object):' | |||||
376 | try: |
|
376 | try: | |
377 | guards = self.opener(self.guards_path).read().split() |
|
377 | guards = self.opener(self.guards_path).read().split() | |
378 | except IOError, err: |
|
378 | except IOError, err: | |
379 |
if err.errno != errno.ENOENT: |
|
379 | if err.errno != errno.ENOENT: | |
|
380 | raise | |||
380 | guards = [] |
|
381 | guards = [] | |
381 | for i, guard in enumerate(guards): |
|
382 | for i, guard in enumerate(guards): | |
382 | bad = self.check_guard(guard) |
|
383 | bad = self.check_guard(guard) | |
@@ -450,9 +451,12 b' class queue(object):' | |||||
450 | for i in items: |
|
451 | for i in items: | |
451 | fp.write("%s\n" % i) |
|
452 | fp.write("%s\n" % i) | |
452 | fp.close() |
|
453 | fp.close() | |
453 | if self.applied_dirty: write_list(map(str, self.applied), self.status_path) |
|
454 | if self.applied_dirty: | |
454 |
|
|
455 | write_list(map(str, self.applied), self.status_path) | |
455 | if self.guards_dirty: write_list(self.active_guards, self.guards_path) |
|
456 | if self.series_dirty: | |
|
457 | write_list(self.full_series, self.series_path) | |||
|
458 | if self.guards_dirty: | |||
|
459 | write_list(self.active_guards, self.guards_path) | |||
456 |
|
460 | |||
457 | def removeundo(self, repo): |
|
461 | def removeundo(self, repo): | |
458 | undo = repo.sjoin('undo') |
|
462 | undo = repo.sjoin('undo') | |
@@ -482,7 +486,7 b' class queue(object):' | |||||
482 |
|
486 | |||
483 | def mergeone(self, repo, mergeq, head, patch, rev, diffopts): |
|
487 | def mergeone(self, repo, mergeq, head, patch, rev, diffopts): | |
484 | # first try just applying the patch |
|
488 | # first try just applying the patch | |
485 |
(err, n) = self.apply(repo, [ |
|
489 | (err, n) = self.apply(repo, [patch], update_status=False, | |
486 | strict=True, merge=rev) |
|
490 | strict=True, merge=rev) | |
487 |
|
491 | |||
488 | if err == 0: |
|
492 | if err == 0: | |
@@ -529,7 +533,7 b' class queue(object):' | |||||
529 | return bin(self.applied[-1].rev) |
|
533 | return bin(self.applied[-1].rev) | |
530 | pp = repo.changelog.parents(rev) |
|
534 | pp = repo.changelog.parents(rev) | |
531 | if pp[1] != nullid: |
|
535 | if pp[1] != nullid: | |
532 |
arevs = [ |
|
536 | arevs = [x.rev for x in self.applied] | |
533 | p0 = hex(pp[0]) |
|
537 | p0 = hex(pp[0]) | |
534 | p1 = hex(pp[1]) |
|
538 | p1 = hex(pp[1]) | |
535 | if p0 in arevs: |
|
539 | if p0 in arevs: | |
@@ -864,7 +868,8 b' class queue(object):' | |||||
864 | wlock.release() |
|
868 | wlock.release() | |
865 | wlock = None |
|
869 | wlock = None | |
866 | r = self.qrepo() |
|
870 | r = self.qrepo() | |
867 |
if r: |
|
871 | if r: | |
|
872 | r.add([patchfn]) | |||
868 | except: |
|
873 | except: | |
869 | repo.rollback() |
|
874 | repo.rollback() | |
870 | raise |
|
875 | raise | |
@@ -941,7 +946,7 b' class queue(object):' | |||||
941 | if not os.path.isfile(self.join(patch)): |
|
946 | if not os.path.isfile(self.join(patch)): | |
942 | try: |
|
947 | try: | |
943 | sno = int(patch) |
|
948 | sno = int(patch) | |
944 | except(ValueError, OverflowError): |
|
949 | except (ValueError, OverflowError): | |
945 | pass |
|
950 | pass | |
946 | else: |
|
951 | else: | |
947 | if -len(self.series) <= sno < len(self.series): |
|
952 | if -len(self.series) <= sno < len(self.series): | |
@@ -957,8 +962,8 b' class queue(object):' | |||||
957 | if res: |
|
962 | if res: | |
958 | i = self.series.index(res) |
|
963 | i = self.series.index(res) | |
959 | try: |
|
964 | try: | |
960 | off = int(patch[minus+1:] or 1) |
|
965 | off = int(patch[minus + 1:] or 1) | |
961 | except(ValueError, OverflowError): |
|
966 | except (ValueError, OverflowError): | |
962 | pass |
|
967 | pass | |
963 | else: |
|
968 | else: | |
964 | if i - off >= 0: |
|
969 | if i - off >= 0: | |
@@ -969,8 +974,8 b' class queue(object):' | |||||
969 | if res: |
|
974 | if res: | |
970 | i = self.series.index(res) |
|
975 | i = self.series.index(res) | |
971 | try: |
|
976 | try: | |
972 | off = int(patch[plus+1:] or 1) |
|
977 | off = int(patch[plus + 1:] or 1) | |
973 | except(ValueError, OverflowError): |
|
978 | except (ValueError, OverflowError): | |
974 | pass |
|
979 | pass | |
975 | else: |
|
980 | else: | |
976 | if i + off < len(self.series): |
|
981 | if i + off < len(self.series): | |
@@ -1106,7 +1111,7 b' class queue(object):' | |||||
1106 |
|
1111 | |||
1107 | if not update: |
|
1112 | if not update: | |
1108 | parents = repo.dirstate.parents() |
|
1113 | parents = repo.dirstate.parents() | |
1109 |
rr = [ |
|
1114 | rr = [bin(x.rev) for x in self.applied] | |
1110 | for p in parents: |
|
1115 | for p in parents: | |
1111 | if p in rr: |
|
1116 | if p in rr: | |
1112 | self.ui.warn(_("qpop: forcing dirstate update\n")) |
|
1117 | self.ui.warn(_("qpop: forcing dirstate update\n")) | |
@@ -1306,7 +1311,8 b' class queue(object):' | |||||
1306 | f = repo.file(dst) |
|
1311 | f = repo.file(dst) | |
1307 | src = f.renamed(man[dst]) |
|
1312 | src = f.renamed(man[dst]) | |
1308 | if src: |
|
1313 | if src: | |
1309 |
copies.setdefault(src[0], []).extend( |
|
1314 | copies.setdefault(src[0], []).extend( | |
|
1315 | copies.get(dst, [])) | |||
1310 | if dst in a: |
|
1316 | if dst in a: | |
1311 | copies[src[0]].append(dst) |
|
1317 | copies[src[0]].append(dst) | |
1312 | # we can't copy a file created by the patch itself |
|
1318 | # we can't copy a file created by the patch itself | |
@@ -1440,8 +1446,8 b' class queue(object):' | |||||
1440 | length = len(self.series) - start |
|
1446 | length = len(self.series) - start | |
1441 | if not missing: |
|
1447 | if not missing: | |
1442 | if self.ui.verbose: |
|
1448 | if self.ui.verbose: | |
1443 | idxwidth = len(str(start+length - 1)) |
|
1449 | idxwidth = len(str(start + length - 1)) | |
1444 | for i in xrange(start, start+length): |
|
1450 | for i in xrange(start, start + length): | |
1445 | patch = self.series[i] |
|
1451 | patch = self.series[i] | |
1446 | if patch in applied: |
|
1452 | if patch in applied: | |
1447 | stat = 'A' |
|
1453 | stat = 'A' | |
@@ -1493,7 +1499,7 b' class queue(object):' | |||||
1493 | elif line.startswith('Dirstate:'): |
|
1499 | elif line.startswith('Dirstate:'): | |
1494 | l = line.rstrip() |
|
1500 | l = line.rstrip() | |
1495 | l = l[10:].split(' ') |
|
1501 | l = l[10:].split(' ') | |
1496 |
qpp = [ |
|
1502 | qpp = [bin(x) for x in l] | |
1497 | elif datastart != None: |
|
1503 | elif datastart != None: | |
1498 | l = line.rstrip() |
|
1504 | l = line.rstrip() | |
1499 | se = statusentry(l) |
|
1505 | se = statusentry(l) | |
@@ -1542,7 +1548,7 b' class queue(object):' | |||||
1542 | self.ui.warn(_("status is already saved\n")) |
|
1548 | self.ui.warn(_("status is already saved\n")) | |
1543 | return 1 |
|
1549 | return 1 | |
1544 |
|
1550 | |||
1545 |
ar = [ |
|
1551 | ar = [':' + x for x in self.full_series] | |
1546 | if not msg: |
|
1552 | if not msg: | |
1547 | msg = _("hg patches saved state") |
|
1553 | msg = _("hg patches saved state") | |
1548 | else: |
|
1554 | else: | |
@@ -1694,7 +1700,8 b' class queue(object):' | |||||
1694 | try: |
|
1700 | try: | |
1695 | if filename == '-': |
|
1701 | if filename == '-': | |
1696 | if not patchname: |
|
1702 | if not patchname: | |
1697 |
raise util.Abort( |
|
1703 | raise util.Abort( | |
|
1704 | _('need --name to import a patch from -')) | |||
1698 | text = sys.stdin.read() |
|
1705 | text = sys.stdin.read() | |
1699 | else: |
|
1706 | else: | |
1700 | text = url.open(self.ui, filename).read() |
|
1707 | text = url.open(self.ui, filename).read() | |
@@ -1912,7 +1919,8 b' def commit(ui, repo, *pats, **opts):' | |||||
1912 | """commit changes in the queue repository""" |
|
1919 | """commit changes in the queue repository""" | |
1913 | q = repo.mq |
|
1920 | q = repo.mq | |
1914 | r = q.qrepo() |
|
1921 | r = q.qrepo() | |
1915 | if not r: raise util.Abort('no queue repository') |
|
1922 | if not r: | |
|
1923 | raise util.Abort('no queue repository') | |||
1916 | commands.commit(r.ui, r, *pats, **opts) |
|
1924 | commands.commit(r.ui, r, *pats, **opts) | |
1917 |
|
1925 | |||
1918 | def series(ui, repo, **opts): |
|
1926 | def series(ui, repo, **opts): | |
@@ -1925,7 +1933,7 b' def top(ui, repo, **opts):' | |||||
1925 | q = repo.mq |
|
1933 | q = repo.mq | |
1926 | t = q.applied and q.series_end(True) or 0 |
|
1934 | t = q.applied and q.series_end(True) or 0 | |
1927 | if t: |
|
1935 | if t: | |
1928 | return q.qseries(repo, start=t-1, length=1, status='A', |
|
1936 | return q.qseries(repo, start=t - 1, length=1, status='A', | |
1929 | summary=opts.get('summary')) |
|
1937 | summary=opts.get('summary')) | |
1930 | else: |
|
1938 | else: | |
1931 | ui.write(_("no patches applied\n")) |
|
1939 | ui.write(_("no patches applied\n")) | |
@@ -1950,7 +1958,7 b' def prev(ui, repo, **opts):' | |||||
1950 | if not l: |
|
1958 | if not l: | |
1951 | ui.write(_("no patches applied\n")) |
|
1959 | ui.write(_("no patches applied\n")) | |
1952 | return 1 |
|
1960 | return 1 | |
1953 | return q.qseries(repo, start=l-2, length=1, status='A', |
|
1961 | return q.qseries(repo, start=l - 2, length=1, status='A', | |
1954 | summary=opts.get('summary')) |
|
1962 | summary=opts.get('summary')) | |
1955 |
|
1963 | |||
1956 | def setupheaderopts(ui, opts): |
|
1964 | def setupheaderopts(ui, opts): | |
@@ -1984,7 +1992,8 b' def new(ui, repo, patch, *args, **opts):' | |||||
1984 | information. |
|
1992 | information. | |
1985 | """ |
|
1993 | """ | |
1986 | msg = cmdutil.logmessage(opts) |
|
1994 | msg = cmdutil.logmessage(opts) | |
1987 | def getmsg(): return ui.edit(msg, ui.username()) |
|
1995 | def getmsg(): | |
|
1996 | return ui.edit(msg, ui.username()) | |||
1988 | q = repo.mq |
|
1997 | q = repo.mq | |
1989 | opts['msg'] = msg |
|
1998 | opts['msg'] = msg | |
1990 | if opts.get('edit'): |
|
1999 | if opts.get('edit'): | |
@@ -2272,7 +2281,8 b' def rename(ui, repo, patch, name=None, *' | |||||
2272 | raise util.Abort(_('%s already exists') % absdest) |
|
2281 | raise util.Abort(_('%s already exists') % absdest) | |
2273 |
|
2282 | |||
2274 | if name in q.series: |
|
2283 | if name in q.series: | |
2275 | raise util.Abort(_('A patch named %s already exists in the series file') % name) |
|
2284 | raise util.Abort( | |
|
2285 | _('A patch named %s already exists in the series file') % name) | |||
2276 |
|
2286 | |||
2277 | if ui.verbose: |
|
2287 | if ui.verbose: | |
2278 | ui.write('renaming %s to %s\n' % (patch, name)) |
|
2288 | ui.write('renaming %s to %s\n' % (patch, name)) | |
@@ -2464,7 +2474,7 b' def select(ui, repo, *args, **opts):' | |||||
2464 | if i == 0: |
|
2474 | if i == 0: | |
2465 | q.pop(repo, all=True) |
|
2475 | q.pop(repo, all=True) | |
2466 | else: |
|
2476 | else: | |
2467 | q.pop(repo, i-1) |
|
2477 | q.pop(repo, i - 1) | |
2468 | break |
|
2478 | break | |
2469 | if popped: |
|
2479 | if popped: | |
2470 | try: |
|
2480 | try: | |
@@ -2575,8 +2585,8 b' def reposetup(ui, repo):' | |||||
2575 | start = lrev + 1 |
|
2585 | start = lrev + 1 | |
2576 | if start < qbase: |
|
2586 | if start < qbase: | |
2577 | # update the cache (excluding the patches) and save it |
|
2587 | # update the cache (excluding the patches) and save it | |
2578 | self._updatebranchcache(partial, lrev+1, qbase) |
|
2588 | self._updatebranchcache(partial, lrev + 1, qbase) | |
2579 | self._writebranchcache(partial, cl.node(qbase-1), qbase-1) |
|
2589 | self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1) | |
2580 | start = qbase |
|
2590 | start = qbase | |
2581 | # if start = qbase, the cache is as updated as it should be. |
|
2591 | # if start = qbase, the cache is as updated as it should be. | |
2582 | # if start > qbase, the cache includes (part of) the patches. |
|
2592 | # if start > qbase, the cache includes (part of) the patches. | |
@@ -2591,7 +2601,8 b' def reposetup(ui, repo):' | |||||
2591 | repo.__class__ = mqrepo |
|
2601 | repo.__class__ = mqrepo | |
2592 |
|
2602 | |||
2593 | def mqimport(orig, ui, repo, *args, **kwargs): |
|
2603 | def mqimport(orig, ui, repo, *args, **kwargs): | |
2594 |
if hasattr(repo, 'abort_if_wdir_patched') |
|
2604 | if (hasattr(repo, 'abort_if_wdir_patched') | |
|
2605 | and not kwargs.get('no_commit', False)): | |||
2595 | repo.abort_if_wdir_patched(_('cannot import over an applied patch'), |
|
2606 | repo.abort_if_wdir_patched(_('cannot import over an applied patch'), | |
2596 | kwargs.get('force')) |
|
2607 | kwargs.get('force')) | |
2597 | return orig(ui, repo, *args, **kwargs) |
|
2608 | return orig(ui, repo, *args, **kwargs) | |
@@ -2688,11 +2699,16 b' cmdtable = {' | |||||
2688 | (refresh, |
|
2699 | (refresh, | |
2689 | [('e', 'edit', None, _('edit commit message')), |
|
2700 | [('e', 'edit', None, _('edit commit message')), | |
2690 | ('g', 'git', None, _('use git extended diff format')), |
|
2701 | ('g', 'git', None, _('use git extended diff format')), | |
2691 | ('s', 'short', None, _('refresh only files already in the patch and specified files')), |
|
2702 | ('s', 'short', None, | |
2692 | ('U', 'currentuser', None, _('add/update author field in patch with current user')), |
|
2703 | _('refresh only files already in the patch and specified files')), | |
2693 | ('u', 'user', '', _('add/update author field in patch with given user')), |
|
2704 | ('U', 'currentuser', None, | |
2694 |
|
|
2705 | _('add/update author field in patch with current user')), | |
2695 | ('d', 'date', '', _('add/update date field in patch with given date')) |
|
2706 | ('u', 'user', '', | |
|
2707 | _('add/update author field in patch with given user')), | |||
|
2708 | ('D', 'currentdate', None, | |||
|
2709 | _('add/update date field in patch with current date')), | |||
|
2710 | ('d', 'date', '', | |||
|
2711 | _('add/update date field in patch with given date')) | |||
2696 | ] + commands.walkopts + commands.commitopts, |
|
2712 | ] + commands.walkopts + commands.commitopts, | |
2697 | _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')), |
|
2713 | _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')), | |
2698 | 'qrename|qmv': |
|
2714 | 'qrename|qmv': |
@@ -134,7 +134,7 b' class notifier(object):' | |||||
134 | c = path.find('/') |
|
134 | c = path.find('/') | |
135 | if c == -1: |
|
135 | if c == -1: | |
136 | break |
|
136 | break | |
137 | path = path[c+1:] |
|
137 | path = path[c + 1:] | |
138 | count -= 1 |
|
138 | count -= 1 | |
139 | return path |
|
139 | return path | |
140 |
|
140 | |||
@@ -216,7 +216,7 b' class notifier(object):' | |||||
216 | subject = '%s: %s' % (self.root, s) |
|
216 | subject = '%s: %s' % (self.root, s) | |
217 | maxsubject = int(self.ui.config('notify', 'maxsubject', 67)) |
|
217 | maxsubject = int(self.ui.config('notify', 'maxsubject', 67)) | |
218 | if maxsubject and len(subject) > maxsubject: |
|
218 | if maxsubject and len(subject) > maxsubject: | |
219 | subject = subject[:maxsubject-3] + '...' |
|
219 | subject = subject[:maxsubject - 3] + '...' | |
220 | msg['Subject'] = mail.headencode(self.ui, subject, |
|
220 | msg['Subject'] = mail.headencode(self.ui, subject, | |
221 | self.charsets, self.test) |
|
221 | self.charsets, self.test) | |
222 |
|
222 |
@@ -67,7 +67,7 b' def reposetup(ui, repo):' | |||||
67 | p = cl.parentrevs(rev) |
|
67 | p = cl.parentrevs(rev) | |
68 | if j < len(suffix) and suffix[j].isdigit(): |
|
68 | if j < len(suffix) and suffix[j].isdigit(): | |
69 | j += 1 |
|
69 | j += 1 | |
70 | n = int(suffix[i+1:j]) |
|
70 | n = int(suffix[i + 1:j]) | |
71 | if n > 2 or n == 2 and p[1] == -1: |
|
71 | if n > 2 or n == 2 and p[1] == -1: | |
72 | raise |
|
72 | raise | |
73 | else: |
|
73 | else: | |
@@ -85,7 +85,7 b' def reposetup(ui, repo):' | |||||
85 | j += 1 |
|
85 | j += 1 | |
86 | if j == i + 1: |
|
86 | if j == i + 1: | |
87 | raise |
|
87 | raise | |
88 | n = int(suffix[i+1:j]) |
|
88 | n = int(suffix[i + 1:j]) | |
89 | for k in xrange(n): |
|
89 | for k in xrange(n): | |
90 | rev = cl.parentrevs(rev)[0] |
|
90 | rev = cl.parentrevs(rev)[0] | |
91 | i = j |
|
91 | i = j |
@@ -80,7 +80,7 b' def prompt(ui, prompt, default=None, res' | |||||
80 | if not ui.interactive(): |
|
80 | if not ui.interactive(): | |
81 | if default is not None: |
|
81 | if default is not None: | |
82 | return default |
|
82 | return default | |
83 | raise util.Abort(_("%s Please enter a valid value" % (prompt+rest))) |
|
83 | raise util.Abort(_("%s Please enter a valid value" % (prompt + rest))) | |
84 | if default: |
|
84 | if default: | |
85 | prompt += ' [%s]' % default |
|
85 | prompt += ' [%s]' % default | |
86 | prompt += rest |
|
86 | prompt += rest |
@@ -297,7 +297,8 b' def filterpatch(ui, chunks):' | |||||
297 | doc = gettext(record.__doc__) |
|
297 | doc = gettext(record.__doc__) | |
298 | c = doc.find(_('y - record this change')) |
|
298 | c = doc.find(_('y - record this change')) | |
299 | for l in doc[c:].splitlines(): |
|
299 | for l in doc[c:].splitlines(): | |
300 |
if l: |
|
300 | if l: | |
|
301 | ui.write(l.strip(), '\n') | |||
301 | continue |
|
302 | continue | |
302 | elif r == 0: # yes |
|
303 | elif r == 0: # yes | |
303 | ret = True |
|
304 | ret = True | |
@@ -437,8 +438,10 b' def dorecord(ui, repo, committer, *pats,' | |||||
437 |
|
438 | |||
438 | contenders = set() |
|
439 | contenders = set() | |
439 | for h in chunks: |
|
440 | for h in chunks: | |
440 | try: contenders.update(set(h.files())) |
|
441 | try: | |
441 | except AttributeError: pass |
|
442 | contenders.update(set(h.files())) | |
|
443 | except AttributeError: | |||
|
444 | pass | |||
442 |
|
445 | |||
443 | changed = changes[0] + changes[1] + changes[2] |
|
446 | changed = changes[0] + changes[1] + changes[2] | |
444 | newfiles = [f for f in changed if f in contenders] |
|
447 | newfiles = [f for f in changed if f in contenders] |
@@ -64,7 +64,7 b' class ShortRepository(object):' | |||||
64 | parts = parts[:-1] |
|
64 | parts = parts[:-1] | |
65 | else: |
|
65 | else: | |
66 | tail = '' |
|
66 | tail = '' | |
67 | context = dict((str(i+1), v) for i, v in enumerate(parts)) |
|
67 | context = dict((str(i + 1), v) for i, v in enumerate(parts)) | |
68 | url = ''.join(self.templater.process(self.url, context)) + tail |
|
68 | url = ''.join(self.templater.process(self.url, context)) + tail | |
69 | return hg._lookup(url).instance(ui, url, create) |
|
69 | return hg._lookup(url).instance(ui, url, create) | |
70 |
|
70 |
@@ -489,7 +489,7 b' def transplant(ui, repo, *revs, **opts):' | |||||
489 |
|
489 | |||
490 | def incwalk(repo, incoming, branches, match=util.always): |
|
490 | def incwalk(repo, incoming, branches, match=util.always): | |
491 | if not branches: |
|
491 | if not branches: | |
492 | branches=None |
|
492 | branches = None | |
493 | for node in repo.changelog.nodesbetween(incoming, branches)[0]: |
|
493 | for node in repo.changelog.nodesbetween(incoming, branches)[0]: | |
494 | if match(node): |
|
494 | if match(node): | |
495 | yield node |
|
495 | yield node | |
@@ -506,7 +506,7 b' def transplant(ui, repo, *revs, **opts):' | |||||
506 |
|
506 | |||
507 | def checkopts(opts, revs): |
|
507 | def checkopts(opts, revs): | |
508 | if opts.get('continue'): |
|
508 | if opts.get('continue'): | |
509 | if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')): |
|
509 | if opts.get('branch') or opts.get('all') or opts.get('merge'): | |
510 | raise util.Abort(_('--continue is incompatible with ' |
|
510 | raise util.Abort(_('--continue is incompatible with ' | |
511 | 'branch, all or merge')) |
|
511 | 'branch, all or merge')) | |
512 | return |
|
512 | return |
@@ -37,7 +37,7 b' import mercurial.hgweb.wsgicgi as wsgicg' | |||||
37 | # [collections] |
|
37 | # [collections] | |
38 | # /prefix/to/strip/off = /root/of/tree/full/of/repos |
|
38 | # /prefix/to/strip/off = /root/of/tree/full/of/repos | |
39 | # |
|
39 | # | |
40 |
# paths example: |
|
40 | # paths example: | |
41 | # |
|
41 | # | |
42 | # * First two lines mount one repository into one virtual path, like |
|
42 | # * First two lines mount one repository into one virtual path, like | |
43 | # '/real/path1' into 'virtual/path1'. |
|
43 | # '/real/path1' into 'virtual/path1'. |
@@ -205,7 +205,8 b' def archive(repo, dest, node, kind, deco' | |||||
205 | prefix is name of path to put before every archive member.''' |
|
205 | prefix is name of path to put before every archive member.''' | |
206 |
|
206 | |||
207 | def write(name, mode, islink, getdata): |
|
207 | def write(name, mode, islink, getdata): | |
208 |
if matchfn and not matchfn(name): |
|
208 | if matchfn and not matchfn(name): | |
|
209 | return | |||
209 | data = getdata() |
|
210 | data = getdata() | |
210 | if decode: |
|
211 | if decode: | |
211 | data = repo.wwritedata(name, data) |
|
212 | data = repo.wwritedata(name, data) |
@@ -105,18 +105,24 b' b85decode(PyObject *self, PyObject *args' | |||||
105 | { |
|
105 | { | |
106 | c = b85dec[(int)*text++] - 1; |
|
106 | c = b85dec[(int)*text++] - 1; | |
107 | if (c < 0) |
|
107 | if (c < 0) | |
108 | return PyErr_Format(PyExc_ValueError, "Bad base85 character at position %d", i); |
|
108 | return PyErr_Format( | |
|
109 | PyExc_ValueError, | |||
|
110 | "Bad base85 character at position %d", i); | |||
109 | acc = acc * 85 + c; |
|
111 | acc = acc * 85 + c; | |
110 | } |
|
112 | } | |
111 | if (i++ < len) |
|
113 | if (i++ < len) | |
112 | { |
|
114 | { | |
113 | c = b85dec[(int)*text++] - 1; |
|
115 | c = b85dec[(int)*text++] - 1; | |
114 | if (c < 0) |
|
116 | if (c < 0) | |
115 | return PyErr_Format(PyExc_ValueError, "Bad base85 character at position %d", i); |
|
117 | return PyErr_Format( | |
|
118 | PyExc_ValueError, | |||
|
119 | "Bad base85 character at position %d", i); | |||
116 | /* overflow detection: 0xffffffff == "|NsC0", |
|
120 | /* overflow detection: 0xffffffff == "|NsC0", | |
117 | * "|NsC" == 0x03030303 */ |
|
121 | * "|NsC" == 0x03030303 */ | |
118 | if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c) |
|
122 | if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c) | |
119 | return PyErr_Format(PyExc_ValueError, "Bad base85 sequence at position %d", i); |
|
123 | return PyErr_Format( | |
|
124 | PyExc_ValueError, | |||
|
125 | "Bad base85 sequence at position %d", i); | |||
120 | acc += c; |
|
126 | acc += c; | |
121 | } |
|
127 | } | |
122 |
|
128 |
@@ -15,11 +15,11 b'' | |||||
15 | #include <limits.h> |
|
15 | #include <limits.h> | |
16 |
|
16 | |||
17 | #if defined __hpux || defined __SUNPRO_C || defined _AIX |
|
17 | #if defined __hpux || defined __SUNPRO_C || defined _AIX | |
18 |
# |
|
18 | #define inline | |
19 | #endif |
|
19 | #endif | |
20 |
|
20 | |||
21 | #ifdef __linux |
|
21 | #ifdef __linux | |
22 |
# |
|
22 | #define inline __inline | |
23 | #endif |
|
23 | #endif | |
24 |
|
24 | |||
25 | #ifdef _WIN32 |
|
25 | #ifdef _WIN32 | |
@@ -267,19 +267,21 b' static struct hunklist diff(struct line ' | |||||
267 |
|
267 | |||
268 | /* normalize the hunk list, try to push each hunk towards the end */ |
|
268 | /* normalize the hunk list, try to push each hunk towards the end */ | |
269 | for (curr = l.base; curr != l.head; curr++) { |
|
269 | for (curr = l.base; curr != l.head; curr++) { | |
270 | struct hunk *next = curr+1; |
|
270 | struct hunk *next = curr + 1; | |
271 | int shift = 0; |
|
271 | int shift = 0; | |
272 |
|
272 | |||
273 | if (next == l.head) |
|
273 | if (next == l.head) | |
274 | break; |
|
274 | break; | |
275 |
|
275 | |||
276 | if (curr->a2 == next->a1) |
|
276 | if (curr->a2 == next->a1) | |
277 | while (curr->a2+shift < an && curr->b2+shift < bn |
|
277 | while (curr->a2 + shift < an && curr->b2 + shift < bn | |
278 |
&& !cmp(a+curr->a2+shift, |
|
278 | && !cmp(a + curr->a2 + shift, | |
|
279 | b + curr->b2 + shift)) | |||
279 | shift++; |
|
280 | shift++; | |
280 | else if (curr->b2 == next->b1) |
|
281 | else if (curr->b2 == next->b1) | |
281 | while (curr->b2+shift < bn && curr->a2+shift < an |
|
282 | while (curr->b2 + shift < bn && curr->a2 + shift < an | |
282 |
&& !cmp(b+curr->b2+shift, |
|
283 | && !cmp(b + curr->b2 + shift, | |
|
284 | a + curr->a2 + shift)) | |||
283 | shift++; |
|
285 | shift++; | |
284 | if (!shift) |
|
286 | if (!shift) | |
285 | continue; |
|
287 | continue; |
@@ -73,7 +73,8 b' class bundlerevlog(revlog.revlog):' | |||||
73 | if rev < 0: |
|
73 | if rev < 0: | |
74 | return False |
|
74 | return False | |
75 | return rev in self.basemap |
|
75 | return rev in self.basemap | |
76 |
def bundlebase(self, rev): |
|
76 | def bundlebase(self, rev): | |
|
77 | return self.basemap[rev] | |||
77 | def _chunk(self, rev): |
|
78 | def _chunk(self, rev): | |
78 | # Warning: in case of bundle, the diff is against bundlebase, |
|
79 | # Warning: in case of bundle, the diff is against bundlebase, | |
79 | # not against rev - 1 |
|
80 | # not against rev - 1 | |
@@ -98,7 +99,8 b' class bundlerevlog(revlog.revlog):' | |||||
98 |
|
99 | |||
99 | def revision(self, node): |
|
100 | def revision(self, node): | |
100 | """return an uncompressed revision of a given""" |
|
101 | """return an uncompressed revision of a given""" | |
101 |
if node == nullid: |
|
102 | if node == nullid: | |
|
103 | return "" | |||
102 |
|
104 | |||
103 | text = None |
|
105 | text = None | |
104 | chain = [] |
|
106 | chain = [] |
@@ -310,7 +310,8 b' class FTPRangeHandler(urllib2.FTPHandler' | |||||
310 | (fb, lb) = range_tup |
|
310 | (fb, lb) = range_tup | |
311 | if lb == '': |
|
311 | if lb == '': | |
312 | if retrlen is None or retrlen == 0: |
|
312 | if retrlen is None or retrlen == 0: | |
313 |
raise RangeError('Requested Range Not Satisfiable due |
|
313 | raise RangeError('Requested Range Not Satisfiable due' | |
|
314 | ' to unobtainable file length.') | |||
314 | lb = retrlen |
|
315 | lb = retrlen | |
315 | retrlen = lb - fb |
|
316 | retrlen = lb - fb | |
316 | if retrlen < 0: |
|
317 | if retrlen < 0: |
@@ -78,7 +78,7 b' class appender(object):' | |||||
78 | doff = self.offset - self.size |
|
78 | doff = self.offset - self.size | |
79 | self.data.insert(0, "".join(self.data)) |
|
79 | self.data.insert(0, "".join(self.data)) | |
80 | del self.data[1:] |
|
80 | del self.data[1:] | |
81 | s = self.data[0][doff:doff+count] |
|
81 | s = self.data[0][doff:doff + count] | |
82 | self.offset += len(s) |
|
82 | self.offset += len(s) | |
83 | ret += s |
|
83 | ret += s | |
84 | return ret |
|
84 | return ret |
@@ -93,7 +93,8 b' def loglimit(opts):' | |||||
93 | limit = int(limit) |
|
93 | limit = int(limit) | |
94 | except ValueError: |
|
94 | except ValueError: | |
95 | raise util.Abort(_('limit must be a positive integer')) |
|
95 | raise util.Abort(_('limit must be a positive integer')) | |
96 | if limit <= 0: raise util.Abort(_('limit must be positive')) |
|
96 | if limit <= 0: | |
|
97 | raise util.Abort(_('limit must be positive')) | |||
97 | else: |
|
98 | else: | |
98 | limit = None |
|
99 | limit = None | |
99 | return limit |
|
100 | return limit | |
@@ -166,7 +167,7 b' def revrange(repo, revs):' | |||||
166 | start = revfix(repo, start, 0) |
|
167 | start = revfix(repo, start, 0) | |
167 | end = revfix(repo, end, len(repo) - 1) |
|
168 | end = revfix(repo, end, len(repo) - 1) | |
168 | step = start > end and -1 or 1 |
|
169 | step = start > end and -1 or 1 | |
169 | for rev in xrange(start, end+step, step): |
|
170 | for rev in xrange(start, end + step, step): | |
170 | if rev in seen: |
|
171 | if rev in seen: | |
171 | continue |
|
172 | continue | |
172 | seen.add(rev) |
|
173 | seen.add(rev) | |
@@ -569,21 +570,21 b' def service(opts, parentfn=None, initfn=' | |||||
569 | if opts['daemon'] and not opts['daemon_pipefds']: |
|
570 | if opts['daemon'] and not opts['daemon_pipefds']: | |
570 | # Signal child process startup with file removal |
|
571 | # Signal child process startup with file removal | |
571 | lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-') |
|
572 | lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-') | |
572 |
os.close(lockfd) |
|
573 | os.close(lockfd) | |
573 | try: |
|
574 | try: | |
574 | if not runargs: |
|
575 | if not runargs: | |
575 | runargs = util.hgcmd() + sys.argv[1:] |
|
576 | runargs = util.hgcmd() + sys.argv[1:] | |
576 | runargs.append('--daemon-pipefds=%s' % lockpath) |
|
577 | runargs.append('--daemon-pipefds=%s' % lockpath) | |
577 | # Don't pass --cwd to the child process, because we've already |
|
578 | # Don't pass --cwd to the child process, because we've already | |
578 | # changed directory. |
|
579 | # changed directory. | |
579 | for i in xrange(1,len(runargs)): |
|
580 | for i in xrange(1, len(runargs)): | |
580 | if runargs[i].startswith('--cwd='): |
|
581 | if runargs[i].startswith('--cwd='): | |
581 | del runargs[i] |
|
582 | del runargs[i] | |
582 | break |
|
583 | break | |
583 | elif runargs[i].startswith('--cwd'): |
|
584 | elif runargs[i].startswith('--cwd'): | |
584 | del runargs[i:i+2] |
|
585 | del runargs[i:i + 2] | |
585 | break |
|
586 | break | |
586 |
pid = util.spawndetached(runargs) |
|
587 | pid = util.spawndetached(runargs) | |
587 | while os.path.exists(lockpath): |
|
588 | while os.path.exists(lockpath): | |
588 | time.sleep(0.1) |
|
589 | time.sleep(0.1) | |
589 | finally: |
|
590 | finally: | |
@@ -903,13 +904,15 b' def show_changeset(ui, repo, opts, buffe' | |||||
903 | if not os.path.split(mapfile)[0]: |
|
904 | if not os.path.split(mapfile)[0]: | |
904 | mapname = (templater.templatepath('map-cmdline.' + mapfile) |
|
905 | mapname = (templater.templatepath('map-cmdline.' + mapfile) | |
905 | or templater.templatepath(mapfile)) |
|
906 | or templater.templatepath(mapfile)) | |
906 |
if mapname: |
|
907 | if mapname: | |
|
908 | mapfile = mapname | |||
907 |
|
909 | |||
908 | try: |
|
910 | try: | |
909 | t = changeset_templater(ui, repo, patch, opts, mapfile, buffered) |
|
911 | t = changeset_templater(ui, repo, patch, opts, mapfile, buffered) | |
910 | except SyntaxError, inst: |
|
912 | except SyntaxError, inst: | |
911 | raise util.Abort(inst.args[0]) |
|
913 | raise util.Abort(inst.args[0]) | |
912 | if tmpl: t.use_template(tmpl) |
|
914 | if tmpl: | |
|
915 | t.use_template(tmpl) | |||
913 | return t |
|
916 | return t | |
914 |
|
917 | |||
915 | def finddate(ui, repo, date): |
|
918 | def finddate(ui, repo, date): | |
@@ -951,13 +954,13 b' def walkchangerevs(repo, match, opts, pr' | |||||
951 | def increasing_windows(start, end, windowsize=8, sizelimit=512): |
|
954 | def increasing_windows(start, end, windowsize=8, sizelimit=512): | |
952 | if start < end: |
|
955 | if start < end: | |
953 | while start < end: |
|
956 | while start < end: | |
954 | yield start, min(windowsize, end-start) |
|
957 | yield start, min(windowsize, end - start) | |
955 | start += windowsize |
|
958 | start += windowsize | |
956 | if windowsize < sizelimit: |
|
959 | if windowsize < sizelimit: | |
957 | windowsize *= 2 |
|
960 | windowsize *= 2 | |
958 | else: |
|
961 | else: | |
959 | while start > end: |
|
962 | while start > end: | |
960 | yield start, min(windowsize, start-end-1) |
|
963 | yield start, min(windowsize, start - end - 1) | |
961 | start -= windowsize |
|
964 | start -= windowsize | |
962 | if windowsize < sizelimit: |
|
965 | if windowsize < sizelimit: | |
963 | windowsize *= 2 |
|
966 | windowsize *= 2 | |
@@ -1014,7 +1017,8 b' def walkchangerevs(repo, match, opts, pr' | |||||
1014 | # A zero count may be a directory or deleted file, so |
|
1017 | # A zero count may be a directory or deleted file, so | |
1015 | # try to find matching entries on the slow path. |
|
1018 | # try to find matching entries on the slow path. | |
1016 | if follow: |
|
1019 | if follow: | |
1017 |
raise util.Abort( |
|
1020 | raise util.Abort( | |
|
1021 | _('cannot follow nonexistent file: "%s"') % file_) | |||
1018 | slowpath = True |
|
1022 | slowpath = True | |
1019 | break |
|
1023 | break | |
1020 | else: |
|
1024 | else: | |
@@ -1088,7 +1092,7 b' def walkchangerevs(repo, match, opts, pr' | |||||
1088 | rev = repo.changelog.rev(repo.lookup(rev)) |
|
1092 | rev = repo.changelog.rev(repo.lookup(rev)) | |
1089 | ff = followfilter() |
|
1093 | ff = followfilter() | |
1090 | stop = min(revs[0], revs[-1]) |
|
1094 | stop = min(revs[0], revs[-1]) | |
1091 | for x in xrange(rev, stop-1, -1): |
|
1095 | for x in xrange(rev, stop - 1, -1): | |
1092 | if ff.match(x): |
|
1096 | if ff.match(x): | |
1093 | wanted.discard(x) |
|
1097 | wanted.discard(x) | |
1094 |
|
1098 | |||
@@ -1103,7 +1107,7 b' def walkchangerevs(repo, match, opts, pr' | |||||
1103 |
|
1107 | |||
1104 | for i, window in increasing_windows(0, len(revs)): |
|
1108 | for i, window in increasing_windows(0, len(revs)): | |
1105 | change = util.cachefunc(repo.changectx) |
|
1109 | change = util.cachefunc(repo.changectx) | |
1106 | nrevs = [rev for rev in revs[i:i+window] if want(rev)] |
|
1110 | nrevs = [rev for rev in revs[i:i + window] if want(rev)] | |
1107 | for rev in sorted(nrevs): |
|
1111 | for rev in sorted(nrevs): | |
1108 | fns = fncache.get(rev) |
|
1112 | fns = fncache.get(rev) | |
1109 | ctx = change(rev) |
|
1113 | ctx = change(rev) |
@@ -34,7 +34,7 b' def add(ui, repo, *pats, **opts):' | |||||
34 | names = [] |
|
34 | names = [] | |
35 | m = cmdutil.match(repo, pats, opts) |
|
35 | m = cmdutil.match(repo, pats, opts) | |
36 | oldbad = m.bad |
|
36 | oldbad = m.bad | |
37 | m.bad = lambda x,y: bad.append(x) or oldbad(x,y) |
|
37 | m.bad = lambda x, y: bad.append(x) or oldbad(x, y) | |
38 |
|
38 | |||
39 | for f in repo.walk(m): |
|
39 | for f in repo.walk(m): | |
40 | exact = m.exact(f) |
|
40 | exact = m.exact(f) | |
@@ -69,7 +69,7 b' def addremove(ui, repo, *pats, **opts):' | |||||
69 | raise util.Abort(_('similarity must be a number')) |
|
69 | raise util.Abort(_('similarity must be a number')) | |
70 | if sim < 0 or sim > 100: |
|
70 | if sim < 0 or sim > 100: | |
71 | raise util.Abort(_('similarity must be between 0 and 100')) |
|
71 | raise util.Abort(_('similarity must be between 0 and 100')) | |
72 | return cmdutil.addremove(repo, pats, opts, similarity=sim/100.) |
|
72 | return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0) | |
73 |
|
73 | |||
74 | def annotate(ui, repo, *pats, **opts): |
|
74 | def annotate(ui, repo, *pats, **opts): | |
75 | """show changeset information by line for each file |
|
75 | """show changeset information by line for each file | |
@@ -98,8 +98,8 b' def annotate(ui, repo, *pats, **opts):' | |||||
98 | ('follow', lambda x: x[0].path()), |
|
98 | ('follow', lambda x: x[0].path()), | |
99 | ] |
|
99 | ] | |
100 |
|
100 | |||
101 |
if (not opts.get('user') and not opts.get('changeset') |
|
101 | if (not opts.get('user') and not opts.get('changeset') | |
102 | and not opts.get('follow')): |
|
102 | and not opts.get('date') and not opts.get('follow')): | |
103 | opts['number'] = 1 |
|
103 | opts['number'] = 1 | |
104 |
|
104 | |||
105 | linenumber = opts.get('line_number') is not None |
|
105 | linenumber = opts.get('line_number') is not None | |
@@ -173,7 +173,8 b' def archive(ui, repo, dest, **opts):' | |||||
173 | if kind == 'files': |
|
173 | if kind == 'files': | |
174 | raise util.Abort(_('cannot archive plain files to stdout')) |
|
174 | raise util.Abort(_('cannot archive plain files to stdout')) | |
175 | dest = sys.stdout |
|
175 | dest = sys.stdout | |
176 | if not prefix: prefix = os.path.basename(repo.root) + '-%h' |
|
176 | if not prefix: | |
|
177 | prefix = os.path.basename(repo.root) + '-%h' | |||
177 | prefix = cmdutil.make_filename(repo, prefix, node) |
|
178 | prefix = cmdutil.make_filename(repo, prefix, node) | |
178 | archival.archive(repo, dest, node, kind, not opts.get('no_decode'), |
|
179 | archival.archive(repo, dest, node, kind, not opts.get('no_decode'), | |
179 | matchfn, prefix) |
|
180 | matchfn, prefix) | |
@@ -257,7 +258,8 b' def backout(ui, repo, node=None, rev=Non' | |||||
257 | if op1 != node: |
|
258 | if op1 != node: | |
258 | hg.clean(repo, op1, show_stats=False) |
|
259 | hg.clean(repo, op1, show_stats=False) | |
259 | if opts.get('merge'): |
|
260 | if opts.get('merge'): | |
260 |
ui.status(_('merging with changeset %s\n') |
|
261 | ui.status(_('merging with changeset %s\n') | |
|
262 | % nice(repo.changelog.tip())) | |||
261 | hg.merge(repo, hex(repo.changelog.tip())) |
|
263 | hg.merge(repo, hex(repo.changelog.tip())) | |
262 | else: |
|
264 | else: | |
263 | ui.status(_('the backout changeset is a new head - ' |
|
265 | ui.status(_('the backout changeset is a new head - ' | |
@@ -868,11 +870,14 b' def debugstate(ui, repo, nodates=None):' | |||||
868 | if showdate: |
|
870 | if showdate: | |
869 | if ent[3] == -1: |
|
871 | if ent[3] == -1: | |
870 | # Pad or slice to locale representation |
|
872 | # Pad or slice to locale representation | |
871 |
locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", |
|
873 | locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", | |
|
874 | time.localtime(0))) | |||
872 | timestr = 'unset' |
|
875 | timestr = 'unset' | |
873 |
timestr = timestr[:locale_len] + |
|
876 | timestr = (timestr[:locale_len] + | |
|
877 | ' ' * (locale_len - len(timestr))) | |||
874 | else: |
|
878 | else: | |
875 |
timestr = time.strftime("%Y-%m-%d %H:%M:%S ", |
|
879 | timestr = time.strftime("%Y-%m-%d %H:%M:%S ", | |
|
880 | time.localtime(ent[3])) | |||
876 | if ent[1] & 020000: |
|
881 | if ent[1] & 020000: | |
877 | mode = 'lnk' |
|
882 | mode = 'lnk' | |
878 | else: |
|
883 | else: | |
@@ -884,7 +889,7 b' def debugstate(ui, repo, nodates=None):' | |||||
884 | def debugsub(ui, repo, rev=None): |
|
889 | def debugsub(ui, repo, rev=None): | |
885 | if rev == '': |
|
890 | if rev == '': | |
886 | rev = None |
|
891 | rev = None | |
887 | for k,v in sorted(repo[rev].substate.items()): |
|
892 | for k, v in sorted(repo[rev].substate.items()): | |
888 | ui.write('path %s\n' % k) |
|
893 | ui.write('path %s\n' % k) | |
889 | ui.write(' source %s\n' % v[0]) |
|
894 | ui.write(' source %s\n' % v[0]) | |
890 | ui.write(' revision %s\n' % v[1]) |
|
895 | ui.write(' revision %s\n' % v[1]) | |
@@ -1428,7 +1433,8 b' def heads(ui, repo, *branchrevs, **opts)' | |||||
1428 | bheads = repo.branchheads(branch, start, closed=closed) |
|
1433 | bheads = repo.branchheads(branch, start, closed=closed) | |
1429 | if not bheads: |
|
1434 | if not bheads: | |
1430 | if not opts.get('rev'): |
|
1435 | if not opts.get('rev'): | |
1431 |
ui.warn(_("no open branch heads on branch %s\n") |
|
1436 | ui.warn(_("no open branch heads on branch %s\n") | |
|
1437 | % encodedbranch) | |||
1432 | elif branch != branchrev: |
|
1438 | elif branch != branchrev: | |
1433 | ui.warn(_("no changes on branch %s containing %s are " |
|
1439 | ui.warn(_("no changes on branch %s containing %s are " | |
1434 | "reachable from %s\n") |
|
1440 | "reachable from %s\n") | |
@@ -1640,7 +1646,8 b' def help_(ui, name=None, with_version=Fa' | |||||
1640 | for title, options in option_lists: |
|
1646 | for title, options in option_lists: | |
1641 | opt_output.append(("\n%s" % title, None)) |
|
1647 | opt_output.append(("\n%s" % title, None)) | |
1642 | for shortopt, longopt, default, desc in options: |
|
1648 | for shortopt, longopt, default, desc in options: | |
1643 |
if _("DEPRECATED") in desc and not ui.verbose: |
|
1649 | if _("DEPRECATED") in desc and not ui.verbose: | |
|
1650 | continue | |||
1644 | opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, |
|
1651 | opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt, | |
1645 | longopt and " --%s" % longopt), |
|
1652 | longopt and " --%s" % longopt), | |
1646 | "%s%s" % (desc, |
|
1653 | "%s%s" % (desc, | |
@@ -1851,7 +1858,8 b' def import_(ui, repo, patch1, *patches, ' | |||||
1851 | patch.patch(tmpname, ui, strip=strip, cwd=repo.root, |
|
1858 | patch.patch(tmpname, ui, strip=strip, cwd=repo.root, | |
1852 | files=files, eolmode=None) |
|
1859 | files=files, eolmode=None) | |
1853 | finally: |
|
1860 | finally: | |
1854 |
files = patch.updatedir(ui, repo, files, |
|
1861 | files = patch.updatedir(ui, repo, files, | |
|
1862 | similarity=sim / 100.0) | |||
1855 | if not opts.get('no_commit'): |
|
1863 | if not opts.get('no_commit'): | |
1856 | m = cmdutil.matchfiles(repo, files or []) |
|
1864 | m = cmdutil.matchfiles(repo, files or []) | |
1857 | n = repo.commit(message, opts.get('user') or user, |
|
1865 | n = repo.commit(message, opts.get('user') or user, | |
@@ -1976,7 +1984,7 b' def locate(ui, repo, *pats, **opts):' | |||||
1976 |
|
1984 | |||
1977 | ret = 1 |
|
1985 | ret = 1 | |
1978 | m = cmdutil.match(repo, pats, opts, default='relglob') |
|
1986 | m = cmdutil.match(repo, pats, opts, default='relglob') | |
1979 | m.bad = lambda x,y: False |
|
1987 | m.bad = lambda x, y: False | |
1980 | for abs in repo[rev].walk(m): |
|
1988 | for abs in repo[rev].walk(m): | |
1981 | if not rev and abs not in repo.dirstate: |
|
1989 | if not rev and abs not in repo.dirstate: | |
1982 | continue |
|
1990 | continue | |
@@ -2168,7 +2176,8 b' def outgoing(ui, repo, dest=None, **opts' | |||||
2168 | """ |
|
2176 | """ | |
2169 | limit = cmdutil.loglimit(opts) |
|
2177 | limit = cmdutil.loglimit(opts) | |
2170 | dest, revs, checkout = hg.parseurl( |
|
2178 | dest, revs, checkout = hg.parseurl( | |
2171 |
ui.expandpath(dest or 'default-push', dest or 'default'), |
|
2179 | ui.expandpath(dest or 'default-push', dest or 'default'), | |
|
2180 | opts.get('rev')) | |||
2172 | if revs: |
|
2181 | if revs: | |
2173 | revs = [repo.lookup(rev) for rev in revs] |
|
2182 | revs = [repo.lookup(rev) for rev in revs] | |
2174 |
|
2183 | |||
@@ -2324,7 +2333,8 b' def push(ui, repo, dest=None, **opts):' | |||||
2324 | URLs. If DESTINATION is omitted, a default path will be used. |
|
2333 | URLs. If DESTINATION is omitted, a default path will be used. | |
2325 | """ |
|
2334 | """ | |
2326 | dest, revs, checkout = hg.parseurl( |
|
2335 | dest, revs, checkout = hg.parseurl( | |
2327 |
ui.expandpath(dest or 'default-push', dest or 'default'), |
|
2336 | ui.expandpath(dest or 'default-push', dest or 'default'), | |
|
2337 | opts.get('rev')) | |||
2328 | other = hg.repository(cmdutil.remoteui(repo, opts), dest) |
|
2338 | other = hg.repository(cmdutil.remoteui(repo, opts), dest) | |
2329 | ui.status(_('pushing to %s\n') % url.hidepassword(dest)) |
|
2339 | ui.status(_('pushing to %s\n') % url.hidepassword(dest)) | |
2330 | if revs: |
|
2340 | if revs: | |
@@ -2561,7 +2571,7 b' def revert(ui, repo, *pats, **opts):' | |||||
2561 | # walk dirstate. |
|
2571 | # walk dirstate. | |
2562 |
|
2572 | |||
2563 | m = cmdutil.match(repo, pats, opts) |
|
2573 | m = cmdutil.match(repo, pats, opts) | |
2564 | m.bad = lambda x,y: False |
|
2574 | m.bad = lambda x, y: False | |
2565 | for abs in repo.walk(m): |
|
2575 | for abs in repo.walk(m): | |
2566 | names[abs] = m.rel(abs), m.exact(abs) |
|
2576 | names[abs] = m.rel(abs), m.exact(abs) | |
2567 |
|
2577 | |||
@@ -2634,7 +2644,8 b' def revert(ui, repo, *pats, **opts):' | |||||
2634 | msg = msg(abs) |
|
2644 | msg = msg(abs) | |
2635 | ui.status(msg % rel) |
|
2645 | ui.status(msg % rel) | |
2636 | for table, hitlist, misslist, backuphit, backupmiss in disptable: |
|
2646 | for table, hitlist, misslist, backuphit, backupmiss in disptable: | |
2637 |
if abs not in table: |
|
2647 | if abs not in table: | |
|
2648 | continue | |||
2638 | # file has changed in dirstate |
|
2649 | # file has changed in dirstate | |
2639 | if mfentry: |
|
2650 | if mfentry: | |
2640 | handle(hitlist, backuphit) |
|
2651 | handle(hitlist, backuphit) | |
@@ -2650,7 +2661,8 b' def revert(ui, repo, *pats, **opts):' | |||||
2650 | continue |
|
2661 | continue | |
2651 | # file has not changed in dirstate |
|
2662 | # file has not changed in dirstate | |
2652 | if node == parent: |
|
2663 | if node == parent: | |
2653 | if exact: ui.warn(_('no changes needed to %s\n') % rel) |
|
2664 | if exact: | |
|
2665 | ui.warn(_('no changes needed to %s\n') % rel) | |||
2654 | continue |
|
2666 | continue | |
2655 | if pmf is None: |
|
2667 | if pmf is None: | |
2656 | # only need parent manifest in this unlikely case, |
|
2668 | # only need parent manifest in this unlikely case, | |
@@ -2782,7 +2794,8 b' def serve(ui, repo, **opts):' | |||||
2782 | util.set_signal_handler() |
|
2794 | util.set_signal_handler() | |
2783 | self.httpd = server.create_server(baseui, repo) |
|
2795 | self.httpd = server.create_server(baseui, repo) | |
2784 |
|
2796 | |||
2785 |
if not ui.verbose: |
|
2797 | if not ui.verbose: | |
|
2798 | return | |||
2786 |
|
2799 | |||
2787 | if self.httpd.prefix: |
|
2800 | if self.httpd.prefix: | |
2788 | prefix = self.httpd.prefix.strip('/') + '/' |
|
2801 | prefix = self.httpd.prefix.strip('/') + '/' | |
@@ -2939,7 +2952,7 b' def summary(ui, repo, **opts):' | |||||
2939 | _('%d deleted'), _('%d unknown'), _('%d ignored'), |
|
2952 | _('%d deleted'), _('%d unknown'), _('%d ignored'), | |
2940 | _('%d unresolved')] |
|
2953 | _('%d unresolved')] | |
2941 | t = [] |
|
2954 | t = [] | |
2942 | for s,l in zip(st, labels): |
|
2955 | for s, l in zip(st, labels): | |
2943 | if s: |
|
2956 | if s: | |
2944 | t.append(l % len(s)) |
|
2957 | t.append(l % len(s)) | |
2945 |
|
2958 | |||
@@ -3463,7 +3476,8 b' table = {' | |||||
3463 | [('0', 'print0', None, _('end fields with NUL')), |
|
3476 | [('0', 'print0', None, _('end fields with NUL')), | |
3464 | ('', 'all', None, _('print all revisions that match')), |
|
3477 | ('', 'all', None, _('print all revisions that match')), | |
3465 | ('f', 'follow', None, |
|
3478 | ('f', 'follow', None, | |
3466 |
_('follow changeset history, |
|
3479 | _('follow changeset history,' | |
|
3480 | ' or file history across copies and renames')), | |||
3467 | ('i', 'ignore-case', None, _('ignore case when matching')), |
|
3481 | ('i', 'ignore-case', None, _('ignore case when matching')), | |
3468 | ('l', 'files-with-matches', None, |
|
3482 | ('l', 'files-with-matches', None, | |
3469 | _('print only filenames and revisions that match')), |
|
3483 | _('print only filenames and revisions that match')), | |
@@ -3499,7 +3513,8 b' table = {' | |||||
3499 | ('b', 'base', '', _('base path')), |
|
3513 | ('b', 'base', '', _('base path')), | |
3500 | ('f', 'force', None, |
|
3514 | ('f', 'force', None, | |
3501 | _('skip check for outstanding uncommitted changes')), |
|
3515 | _('skip check for outstanding uncommitted changes')), | |
3502 | ('', 'no-commit', None, _("don't commit, just update the working directory")), |
|
3516 | ('', 'no-commit', None, | |
|
3517 | _("don't commit, just update the working directory")), | |||
3503 | ('', 'exact', None, |
|
3518 | ('', 'exact', None, | |
3504 | _('apply patch to the nodes from which it was generated')), |
|
3519 | _('apply patch to the nodes from which it was generated')), | |
3505 | ('', 'import-branch', None, |
|
3520 | ('', 'import-branch', None, | |
@@ -3533,7 +3548,8 b' table = {' | |||||
3533 | "^log|history": |
|
3548 | "^log|history": | |
3534 | (log, |
|
3549 | (log, | |
3535 | [('f', 'follow', None, |
|
3550 | [('f', 'follow', None, | |
3536 |
_('follow changeset history, |
|
3551 | _('follow changeset history,' | |
|
3552 | ' or file history across copies and renames')), | |||
3537 | ('', 'follow-first', None, |
|
3553 | ('', 'follow-first', None, | |
3538 | _('only follow the first parent of merge changesets')), |
|
3554 | _('only follow the first parent of merge changesets')), | |
3539 | ('d', 'date', '', _('show revisions matching date spec')), |
|
3555 | ('d', 'date', '', _('show revisions matching date spec')), | |
@@ -3545,7 +3561,8 b' table = {' | |||||
3545 | ('u', 'user', [], _('revisions committed by user')), |
|
3561 | ('u', 'user', [], _('revisions committed by user')), | |
3546 | ('b', 'only-branch', [], |
|
3562 | ('b', 'only-branch', [], | |
3547 | _('show only changesets within the given named branch')), |
|
3563 | _('show only changesets within the given named branch')), | |
3548 | ('P', 'prune', [], _('do not display revision or any of its ancestors')), |
|
3564 | ('P', 'prune', [], | |
|
3565 | _('do not display revision or any of its ancestors')), | |||
3549 | ] + logopts + walkopts, |
|
3566 | ] + logopts + walkopts, | |
3550 | _('[OPTION]... [FILE]')), |
|
3567 | _('[OPTION]... [FILE]')), | |
3551 | "manifest": |
|
3568 | "manifest": | |
@@ -3632,8 +3649,10 b' table = {' | |||||
3632 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), |
|
3649 | ('', 'daemon-pipefds', '', _('used internally by daemon mode')), | |
3633 | ('E', 'errorlog', '', _('name of error log file to write to')), |
|
3650 | ('E', 'errorlog', '', _('name of error log file to write to')), | |
3634 | ('p', 'port', 0, _('port to listen on (default: 8000)')), |
|
3651 | ('p', 'port', 0, _('port to listen on (default: 8000)')), | |
3635 | ('a', 'address', '', _('address to listen on (default: all interfaces)')), |
|
3652 | ('a', 'address', '', | |
3636 | ('', 'prefix', '', _('prefix path to serve from (default: server root)')), |
|
3653 | _('address to listen on (default: all interfaces)')), | |
|
3654 | ('', 'prefix', '', | |||
|
3655 | _('prefix path to serve from (default: server root)')), | |||
3637 | ('n', 'name', '', |
|
3656 | ('n', 'name', '', | |
3638 | _('name to show in web pages (default: working directory)')), |
|
3657 | _('name to show in web pages (default: working directory)')), | |
3639 | ('', 'webdir-conf', '', _('name of the webdir config file' |
|
3658 | ('', 'webdir-conf', '', _('name of the webdir config file' |
@@ -87,20 +87,33 b' class changectx(object):' | |||||
87 | for f in sorted(self._manifest): |
|
87 | for f in sorted(self._manifest): | |
88 | yield f |
|
88 | yield f | |
89 |
|
89 | |||
90 |
def changeset(self): |
|
90 | def changeset(self): | |
91 |
|
|
91 | return self._changeset | |
92 |
def manifest |
|
92 | def manifest(self): | |
|
93 | return self._manifest | |||
|
94 | def manifestnode(self): | |||
|
95 | return self._changeset[0] | |||
93 |
|
96 | |||
94 |
def rev(self): |
|
97 | def rev(self): | |
95 |
|
|
98 | return self._rev | |
96 | def hex(self): return hex(self._node) |
|
99 | def node(self): | |
97 |
|
|
100 | return self._node | |
98 | def date(self): return self._changeset[2] |
|
101 | def hex(self): | |
99 | def files(self): return self._changeset[3] |
|
102 | return hex(self._node) | |
100 | def description(self): return self._changeset[4] |
|
103 | def user(self): | |
101 |
|
|
104 | return self._changeset[1] | |
102 | def extra(self): return self._changeset[5] |
|
105 | def date(self): | |
103 | def tags(self): return self._repo.nodetags(self._node) |
|
106 | return self._changeset[2] | |
|
107 | def files(self): | |||
|
108 | return self._changeset[3] | |||
|
109 | def description(self): | |||
|
110 | return self._changeset[4] | |||
|
111 | def branch(self): | |||
|
112 | return self._changeset[5].get("branch") | |||
|
113 | def extra(self): | |||
|
114 | return self._changeset[5] | |||
|
115 | def tags(self): | |||
|
116 | return self._repo.nodetags(self._node) | |||
104 |
|
117 | |||
105 | def parents(self): |
|
118 | def parents(self): | |
106 | """return contexts for each parent changeset""" |
|
119 | """return contexts for each parent changeset""" | |
@@ -283,10 +296,14 b' class filectx(object):' | |||||
283 | return filectx(self._repo, self._path, fileid=fileid, |
|
296 | return filectx(self._repo, self._path, fileid=fileid, | |
284 | filelog=self._filelog) |
|
297 | filelog=self._filelog) | |
285 |
|
298 | |||
286 |
def filerev(self): |
|
299 | def filerev(self): | |
287 |
|
|
300 | return self._filerev | |
288 | def flags(self): return self._changectx.flags(self._path) |
|
301 | def filenode(self): | |
289 |
|
|
302 | return self._filenode | |
|
303 | def flags(self): | |||
|
304 | return self._changectx.flags(self._path) | |||
|
305 | def filelog(self): | |||
|
306 | return self._filelog | |||
290 |
|
307 | |||
291 | def rev(self): |
|
308 | def rev(self): | |
292 | if '_changectx' in self.__dict__: |
|
309 | if '_changectx' in self.__dict__: | |
@@ -295,23 +312,38 b' class filectx(object):' | |||||
295 | return self._changectx.rev() |
|
312 | return self._changectx.rev() | |
296 | return self._filelog.linkrev(self._filerev) |
|
313 | return self._filelog.linkrev(self._filerev) | |
297 |
|
314 | |||
298 | def linkrev(self): return self._filelog.linkrev(self._filerev) |
|
315 | def linkrev(self): | |
299 | def node(self): return self._changectx.node() |
|
316 | return self._filelog.linkrev(self._filerev) | |
300 | def hex(self): return hex(self.node()) |
|
317 | def node(self): | |
301 |
|
|
318 | return self._changectx.node() | |
302 | def date(self): return self._changectx.date() |
|
319 | def hex(self): | |
303 | def files(self): return self._changectx.files() |
|
320 | return hex(self.node()) | |
304 | def description(self): return self._changectx.description() |
|
321 | def user(self): | |
305 |
|
|
322 | return self._changectx.user() | |
306 | def extra(self): return self._changectx.extra() |
|
323 | def date(self): | |
307 |
|
|
324 | return self._changectx.date() | |
308 | def changectx(self): return self._changectx |
|
325 | def files(self): | |
|
326 | return self._changectx.files() | |||
|
327 | def description(self): | |||
|
328 | return self._changectx.description() | |||
|
329 | def branch(self): | |||
|
330 | return self._changectx.branch() | |||
|
331 | def extra(self): | |||
|
332 | return self._changectx.extra() | |||
|
333 | def manifest(self): | |||
|
334 | return self._changectx.manifest() | |||
|
335 | def changectx(self): | |||
|
336 | return self._changectx | |||
309 |
|
337 | |||
310 | def data(self): return self._filelog.read(self._filenode) |
|
338 | def data(self): | |
311 | def path(self): return self._path |
|
339 | return self._filelog.read(self._filenode) | |
312 | def size(self): return self._filelog.size(self._filerev) |
|
340 | def path(self): | |
|
341 | return self._path | |||
|
342 | def size(self): | |||
|
343 | return self._filelog.size(self._filerev) | |||
313 |
|
344 | |||
314 | def cmp(self, text): return self._filelog.cmp(self._filenode, text) |
|
345 | def cmp(self, text): | |
|
346 | return self._filelog.cmp(self._filenode, text) | |||
315 |
|
347 | |||
316 | def renamed(self): |
|
348 | def renamed(self): | |
317 | """check if file was actually renamed in this changeset revision |
|
349 | """check if file was actually renamed in this changeset revision | |
@@ -348,7 +380,7 b' class filectx(object):' | |||||
348 | pl[0] = (r[0], r[1], None) |
|
380 | pl[0] = (r[0], r[1], None) | |
349 |
|
381 | |||
350 | return [filectx(self._repo, p, fileid=n, filelog=l) |
|
382 | return [filectx(self._repo, p, fileid=n, filelog=l) | |
351 | for p,n,l in pl if n != nullid] |
|
383 | for p, n, l in pl if n != nullid] | |
352 |
|
384 | |||
353 | def children(self): |
|
385 | def children(self): | |
354 | # hard for renames |
|
386 | # hard for renames | |
@@ -594,22 +626,33 b' class workingctx(changectx):' | |||||
594 | self._parents = [changectx(self._repo, x) for x in p] |
|
626 | self._parents = [changectx(self._repo, x) for x in p] | |
595 | return self._parents |
|
627 | return self._parents | |
596 |
|
628 | |||
597 |
def manifest(self): |
|
629 | def manifest(self): | |
598 |
|
630 | return self._manifest | ||
599 | def user(self): return self._user or self._repo.ui.username() |
|
631 | def user(self): | |
600 | def date(self): return self._date |
|
632 | return self._user or self._repo.ui.username() | |
601 | def description(self): return self._text |
|
633 | def date(self): | |
|
634 | return self._date | |||
|
635 | def description(self): | |||
|
636 | return self._text | |||
602 | def files(self): |
|
637 | def files(self): | |
603 | return sorted(self._status[0] + self._status[1] + self._status[2]) |
|
638 | return sorted(self._status[0] + self._status[1] + self._status[2]) | |
604 |
|
639 | |||
605 |
def modified(self): |
|
640 | def modified(self): | |
606 |
|
|
641 | return self._status[0] | |
607 | def removed(self): return self._status[2] |
|
642 | def added(self): | |
608 |
|
|
643 | return self._status[1] | |
609 | def unknown(self): return self._status[4] |
|
644 | def removed(self): | |
610 |
|
|
645 | return self._status[2] | |
611 | def branch(self): return self._extra['branch'] |
|
646 | def deleted(self): | |
612 |
|
|
647 | return self._status[3] | |
|
648 | def unknown(self): | |||
|
649 | return self._status[4] | |||
|
650 | def clean(self): | |||
|
651 | return self._status[5] | |||
|
652 | def branch(self): | |||
|
653 | return self._extra['branch'] | |||
|
654 | def extra(self): | |||
|
655 | return self._extra | |||
613 |
|
656 | |||
614 | def tags(self): |
|
657 | def tags(self): | |
615 | t = [] |
|
658 | t = [] | |
@@ -685,7 +728,8 b' class workingfilectx(filectx):' | |||||
685 | def __str__(self): |
|
728 | def __str__(self): | |
686 | return "%s@%s" % (self.path(), self._changectx) |
|
729 | return "%s@%s" % (self.path(), self._changectx) | |
687 |
|
730 | |||
688 | def data(self): return self._repo.wread(self._path) |
|
731 | def data(self): | |
|
732 | return self._repo.wread(self._path) | |||
689 | def renamed(self): |
|
733 | def renamed(self): | |
690 | rp = self._repo.dirstate.copied(self._path) |
|
734 | rp = self._repo.dirstate.copied(self._path) | |
691 | if not rp: |
|
735 | if not rp: | |
@@ -711,21 +755,24 b' class workingfilectx(filectx):' | |||||
711 | pl.append((path, filenode(pc, path), fl)) |
|
755 | pl.append((path, filenode(pc, path), fl)) | |
712 |
|
756 | |||
713 | return [filectx(self._repo, p, fileid=n, filelog=l) |
|
757 | return [filectx(self._repo, p, fileid=n, filelog=l) | |
714 | for p,n,l in pl if n != nullid] |
|
758 | for p, n, l in pl if n != nullid] | |
715 |
|
759 | |||
716 | def children(self): |
|
760 | def children(self): | |
717 | return [] |
|
761 | return [] | |
718 |
|
762 | |||
719 | def size(self): return os.stat(self._repo.wjoin(self._path)).st_size |
|
763 | def size(self): | |
|
764 | return os.stat(self._repo.wjoin(self._path)).st_size | |||
720 | def date(self): |
|
765 | def date(self): | |
721 | t, tz = self._changectx.date() |
|
766 | t, tz = self._changectx.date() | |
722 | try: |
|
767 | try: | |
723 | return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz) |
|
768 | return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz) | |
724 | except OSError, err: |
|
769 | except OSError, err: | |
725 |
if err.errno != errno.ENOENT: |
|
770 | if err.errno != errno.ENOENT: | |
|
771 | raise | |||
726 | return (t, tz) |
|
772 | return (t, tz) | |
727 |
|
773 | |||
728 | def cmp(self, text): return self._repo.wread(self._path) == text |
|
774 | def cmp(self, text): | |
|
775 | return self._repo.wread(self._path) == text | |||
729 |
|
776 | |||
730 | class memctx(object): |
|
777 | class memctx(object): | |
731 | """Use memctx to perform in-memory commits via localrepo.commitctx(). |
|
778 | """Use memctx to perform in-memory commits via localrepo.commitctx(). | |
@@ -787,22 +834,37 b' class memctx(object):' | |||||
787 | def __getitem__(self, key): |
|
834 | def __getitem__(self, key): | |
788 | return self.filectx(key) |
|
835 | return self.filectx(key) | |
789 |
|
836 | |||
790 | def p1(self): return self._parents[0] |
|
837 | def p1(self): | |
791 |
|
|
838 | return self._parents[0] | |
|
839 | def p2(self): | |||
|
840 | return self._parents[1] | |||
792 |
|
841 | |||
793 | def user(self): return self._user or self._repo.ui.username() |
|
842 | def user(self): | |
794 | def date(self): return self._date |
|
843 | return self._user or self._repo.ui.username() | |
795 | def description(self): return self._text |
|
844 | def date(self): | |
796 | def files(self): return self.modified() |
|
845 | return self._date | |
797 | def modified(self): return self._status[0] |
|
846 | def description(self): | |
798 |
|
|
847 | return self._text | |
799 | def removed(self): return self._status[2] |
|
848 | def files(self): | |
800 | def deleted(self): return self._status[3] |
|
849 | return self.modified() | |
801 | def unknown(self): return self._status[4] |
|
850 | def modified(self): | |
802 |
|
|
851 | return self._status[0] | |
803 | def branch(self): return self._extra['branch'] |
|
852 | def added(self): | |
804 |
|
|
853 | return self._status[1] | |
805 | def flags(self, f): return self[f].flags() |
|
854 | def removed(self): | |
|
855 | return self._status[2] | |||
|
856 | def deleted(self): | |||
|
857 | return self._status[3] | |||
|
858 | def unknown(self): | |||
|
859 | return self._status[4] | |||
|
860 | def clean(self): | |||
|
861 | return self._status[5] | |||
|
862 | def branch(self): | |||
|
863 | return self._extra['branch'] | |||
|
864 | def extra(self): | |||
|
865 | return self._extra | |||
|
866 | def flags(self, f): | |||
|
867 | return self[f].flags() | |||
806 |
|
868 | |||
807 | def parents(self): |
|
869 | def parents(self): | |
808 | """return contexts for each parent changeset""" |
|
870 | """return contexts for each parent changeset""" | |
@@ -832,11 +894,19 b' class memfilectx(object):' | |||||
832 | if copied: |
|
894 | if copied: | |
833 | self._copied = (copied, nullid) |
|
895 | self._copied = (copied, nullid) | |
834 |
|
896 | |||
835 |
def __nonzero__(self): |
|
897 | def __nonzero__(self): | |
836 | def __str__(self): return "%s@%s" % (self.path(), self._changectx) |
|
898 | return True | |
837 | def path(self): return self._path |
|
899 | def __str__(self): | |
838 | def data(self): return self._data |
|
900 | return "%s@%s" % (self.path(), self._changectx) | |
839 | def flags(self): return self._flags |
|
901 | def path(self): | |
840 | def isexec(self): return 'x' in self._flags |
|
902 | return self._path | |
841 | def islink(self): return 'l' in self._flags |
|
903 | def data(self): | |
842 | def renamed(self): return self._copied |
|
904 | return self._data | |
|
905 | def flags(self): | |||
|
906 | return self._flags | |||
|
907 | def isexec(self): | |||
|
908 | return 'x' in self._flags | |||
|
909 | def islink(self): | |||
|
910 | return 'l' in self._flags | |||
|
911 | def renamed(self): | |||
|
912 | return self._copied |
@@ -190,8 +190,10 b' def copies(repo, c1, c2, ca, checkdirs=F' | |||||
190 | repo.ui.debug(" all copies found (* = to merge, ! = divergent):\n") |
|
190 | repo.ui.debug(" all copies found (* = to merge, ! = divergent):\n") | |
191 | for f in fullcopy: |
|
191 | for f in fullcopy: | |
192 | note = "" |
|
192 | note = "" | |
193 |
if f in copy: |
|
193 | if f in copy: | |
194 |
|
|
194 | note += "*" | |
|
195 | if f in diverge2: | |||
|
196 | note += "!" | |||
195 | repo.ui.debug(" %s -> %s %s\n" % (f, fullcopy[f], note)) |
|
197 | repo.ui.debug(" %s -> %s %s\n" % (f, fullcopy[f], note)) | |
196 | del diverge2 |
|
198 | del diverge2 | |
197 |
|
199 |
@@ -33,7 +33,7 b' static void _fix_newline(PyObject *hunk,' | |||||
33 | hline = PyString_FromStringAndSize(l, sz-1); |
|
33 | hline = PyString_FromStringAndSize(l, sz-1); | |
34 |
|
34 | |||
35 | if (c == ' ' || c == '+') { |
|
35 | if (c == ' ' || c == '+') { | |
36 | PyObject *rline = PyString_FromStringAndSize(l+1, sz-2); |
|
36 | PyObject *rline = PyString_FromStringAndSize(l + 1, sz - 2); | |
37 | PyList_SetItem(b, blen-1, rline); |
|
37 | PyList_SetItem(b, blen-1, rline); | |
38 | } |
|
38 | } | |
39 | if (c == ' ' || c == '-') { |
|
39 | if (c == ' ' || c == '-') { | |
@@ -74,13 +74,13 b' addlines(PyObject *self, PyObject *args)' | |||||
74 | if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b)) |
|
74 | if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b)) | |
75 | return NULL; |
|
75 | return NULL; | |
76 |
|
76 | |||
77 | while(1) { |
|
77 | while (1) { | |
78 | todoa = lena - PyList_Size(a); |
|
78 | todoa = lena - PyList_Size(a); | |
79 | todob = lenb - PyList_Size(b); |
|
79 | todob = lenb - PyList_Size(b); | |
80 | num = todoa > todob ? todoa : todob; |
|
80 | num = todoa > todob ? todoa : todob; | |
81 | if (num == 0) |
|
81 | if (num == 0) | |
82 | break; |
|
82 | break; | |
83 |
for (i = 0 |
|
83 | for (i = 0; i < num; i++) { | |
84 | x = PyFile_GetLine(fp, 0); |
|
84 | x = PyFile_GetLine(fp, 0); | |
85 | s = PyString_AS_STRING(x); |
|
85 | s = PyString_AS_STRING(x); | |
86 | c = *s; |
|
86 | c = *s; | |
@@ -135,10 +135,10 b' testhunk(PyObject *self, PyObject *args)' | |||||
135 | if (alen > blen - bstart) { |
|
135 | if (alen > blen - bstart) { | |
136 | return Py_BuildValue("l", -1); |
|
136 | return Py_BuildValue("l", -1); | |
137 | } |
|
137 | } | |
138 |
for (i = 0 |
|
138 | for (i = 0; i < alen; i++) { | |
139 | sa = PyString_AS_STRING(PyList_GET_ITEM(a, i)); |
|
139 | sa = PyString_AS_STRING(PyList_GET_ITEM(a, i)); | |
140 | sb = PyString_AS_STRING(PyList_GET_ITEM(b, i + bstart)); |
|
140 | sb = PyString_AS_STRING(PyList_GET_ITEM(b, i + bstart)); | |
141 | if (strcmp(sa+1, sb) != 0) |
|
141 | if (strcmp(sa + 1, sb) != 0) | |
142 | return Py_BuildValue("l", -1); |
|
142 | return Py_BuildValue("l", -1); | |
143 | } |
|
143 | } | |
144 | return Py_BuildValue("l", 0); |
|
144 | return Py_BuildValue("l", 0); |
@@ -87,13 +87,14 b' class dirstate(object):' | |||||
87 | elif l > 0 and l < 40: |
|
87 | elif l > 0 and l < 40: | |
88 | raise util.Abort(_('working directory state appears damaged!')) |
|
88 | raise util.Abort(_('working directory state appears damaged!')) | |
89 | except IOError, err: |
|
89 | except IOError, err: | |
90 |
if err.errno != errno.ENOENT: |
|
90 | if err.errno != errno.ENOENT: | |
|
91 | raise | |||
91 | return [nullid, nullid] |
|
92 | return [nullid, nullid] | |
92 |
|
93 | |||
93 | @propertycache |
|
94 | @propertycache | |
94 | def _dirs(self): |
|
95 | def _dirs(self): | |
95 | dirs = {} |
|
96 | dirs = {} | |
96 | for f,s in self._map.iteritems(): |
|
97 | for f, s in self._map.iteritems(): | |
97 | if s[0] != 'r': |
|
98 | if s[0] != 'r': | |
98 | _incdirs(dirs, f) |
|
99 | _incdirs(dirs, f) | |
99 | return dirs |
|
100 | return dirs | |
@@ -157,7 +158,8 b' class dirstate(object):' | |||||
157 |
|
158 | |||
158 | def getcwd(self): |
|
159 | def getcwd(self): | |
159 | cwd = os.getcwd() |
|
160 | cwd = os.getcwd() | |
160 |
if cwd == self._root: |
|
161 | if cwd == self._root: | |
|
162 | return '' | |||
161 | # self._root ends with a path separator if self._root is '/' or 'C:\' |
|
163 | # self._root ends with a path separator if self._root is '/' or 'C:\' | |
162 | rootsep = self._root |
|
164 | rootsep = self._root | |
163 | if not util.endswithsep(rootsep): |
|
165 | if not util.endswithsep(rootsep): | |
@@ -215,7 +217,8 b' class dirstate(object):' | |||||
215 | try: |
|
217 | try: | |
216 | st = self._opener("dirstate").read() |
|
218 | st = self._opener("dirstate").read() | |
217 | except IOError, err: |
|
219 | except IOError, err: | |
218 |
if err.errno != errno.ENOENT: |
|
220 | if err.errno != errno.ENOENT: | |
|
221 | raise | |||
219 | return |
|
222 | return | |
220 | if not st: |
|
223 | if not st: | |
221 | return |
|
224 | return | |
@@ -440,11 +443,16 b' class dirstate(object):' | |||||
440 |
|
443 | |||
441 | def badtype(mode): |
|
444 | def badtype(mode): | |
442 | kind = _('unknown') |
|
445 | kind = _('unknown') | |
443 |
if stat.S_ISCHR(mode): |
|
446 | if stat.S_ISCHR(mode): | |
444 |
|
|
447 | kind = _('character device') | |
445 |
elif stat.S_IS |
|
448 | elif stat.S_ISBLK(mode): | |
446 |
|
|
449 | kind = _('block device') | |
447 |
elif stat.S_IS |
|
450 | elif stat.S_ISFIFO(mode): | |
|
451 | kind = _('fifo') | |||
|
452 | elif stat.S_ISSOCK(mode): | |||
|
453 | kind = _('socket') | |||
|
454 | elif stat.S_ISDIR(mode): | |||
|
455 | kind = _('directory') | |||
448 | return _('unsupported file type (type is %s)') % kind |
|
456 | return _('unsupported file type (type is %s)') % kind | |
449 |
|
457 | |||
450 | ignore = self._ignore |
|
458 | ignore = self._ignore |
@@ -35,7 +35,8 b' def _runcatch(ui, args):' | |||||
35 |
|
35 | |||
36 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': |
|
36 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': | |
37 | num = getattr(signal, name, None) |
|
37 | num = getattr(signal, name, None) | |
38 | if num: signal.signal(num, catchterm) |
|
38 | if num: | |
|
39 | signal.signal(num, catchterm) | |||
39 |
|
40 | |||
40 | try: |
|
41 | try: | |
41 | try: |
|
42 | try: |
@@ -62,7 +62,7 b' def fromlocal(s):' | |||||
62 | try: |
|
62 | try: | |
63 | return s.decode(encoding, encodingmode).encode("utf-8") |
|
63 | return s.decode(encoding, encodingmode).encode("utf-8") | |
64 | except UnicodeDecodeError, inst: |
|
64 | except UnicodeDecodeError, inst: | |
65 | sub = s[max(0, inst.start-10):inst.start+10] |
|
65 | sub = s[max(0, inst.start - 10):inst.start + 10] | |
66 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) |
|
66 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) | |
67 | except LookupError, k: |
|
67 | except LookupError, k: | |
68 | raise error.Abort("%s, please check your locale settings" % k) |
|
68 | raise error.Abort("%s, please check your locale settings" % k) |
@@ -16,7 +16,7 b' def gnugetopt(args, options, longoptions' | |||||
16 | extraargs = [] |
|
16 | extraargs = [] | |
17 | if '--' in args: |
|
17 | if '--' in args: | |
18 | stopindex = args.index('--') |
|
18 | stopindex = args.index('--') | |
19 | extraargs = args[stopindex+1:] |
|
19 | extraargs = args[stopindex + 1:] | |
20 | args = args[:stopindex] |
|
20 | args = args[:stopindex] | |
21 | opts, parseargs = getopt.getopt(args, options, longoptions) |
|
21 | opts, parseargs = getopt.getopt(args, options, longoptions) | |
22 | args = [] |
|
22 | args = [] | |
@@ -77,8 +77,10 b' def fancyopts(args, options, state, gnu=' | |||||
77 |
|
77 | |||
78 | # does it take a parameter? |
|
78 | # does it take a parameter? | |
79 | if not (default is None or default is True or default is False): |
|
79 | if not (default is None or default is True or default is False): | |
80 |
if short: |
|
80 | if short: | |
81 |
|
|
81 | short += ':' | |
|
82 | if oname: | |||
|
83 | oname += '=' | |||
82 | if short: |
|
84 | if short: | |
83 | shortlist += short |
|
85 | shortlist += short | |
84 | if name: |
|
86 | if name: |
@@ -17,7 +17,7 b' class filelog(revlog.revlog):' | |||||
17 | if not t.startswith('\1\n'): |
|
17 | if not t.startswith('\1\n'): | |
18 | return t |
|
18 | return t | |
19 | s = t.index('\1\n', 2) |
|
19 | s = t.index('\1\n', 2) | |
20 | return t[s+2:] |
|
20 | return t[s + 2:] | |
21 |
|
21 | |||
22 | def _readmeta(self, node): |
|
22 | def _readmeta(self, node): | |
23 | t = self.revision(node) |
|
23 | t = self.revision(node) |
@@ -65,19 +65,19 b' def _picktool(repo, ui, path, binary, sy' | |||||
65 |
|
65 | |||
66 | # then merge tools |
|
66 | # then merge tools | |
67 | tools = {} |
|
67 | tools = {} | |
68 | for k,v in ui.configitems("merge-tools"): |
|
68 | for k, v in ui.configitems("merge-tools"): | |
69 | t = k.split('.')[0] |
|
69 | t = k.split('.')[0] | |
70 | if t not in tools: |
|
70 | if t not in tools: | |
71 | tools[t] = int(_toolstr(ui, t, "priority", "0")) |
|
71 | tools[t] = int(_toolstr(ui, t, "priority", "0")) | |
72 | names = tools.keys() |
|
72 | names = tools.keys() | |
73 | tools = sorted([(-p,t) for t,p in tools.items()]) |
|
73 | tools = sorted([(-p, t) for t, p in tools.items()]) | |
74 | uimerge = ui.config("ui", "merge") |
|
74 | uimerge = ui.config("ui", "merge") | |
75 | if uimerge: |
|
75 | if uimerge: | |
76 | if uimerge not in names: |
|
76 | if uimerge not in names: | |
77 | return (uimerge, uimerge) |
|
77 | return (uimerge, uimerge) | |
78 | tools.insert(0, (None, uimerge)) # highest priority |
|
78 | tools.insert(0, (None, uimerge)) # highest priority | |
79 | tools.append((None, "hgmerge")) # the old default, if found |
|
79 | tools.append((None, "hgmerge")) # the old default, if found | |
80 | for p,t in tools: |
|
80 | for p, t in tools: | |
81 | if check(t, None, symlink, binary): |
|
81 | if check(t, None, symlink, binary): | |
82 | toolpath = _findtool(ui, t) |
|
82 | toolpath = _findtool(ui, t) | |
83 | return (t, '"' + toolpath + '"') |
|
83 | return (t, '"' + toolpath + '"') |
@@ -13,17 +13,19 b' import extensions' | |||||
13 | def moduledoc(file): |
|
13 | def moduledoc(file): | |
14 | '''return the top-level python documentation for the given file |
|
14 | '''return the top-level python documentation for the given file | |
15 |
|
15 | |||
16 |
Loosely inspired by pydoc.source_synopsis(), but rewritten to |
|
16 | Loosely inspired by pydoc.source_synopsis(), but rewritten to | |
17 |
|
|
17 | handle triple quotes and to return the whole text instead of just | |
|
18 | the synopsis''' | |||
18 | result = [] |
|
19 | result = [] | |
19 |
|
20 | |||
20 | line = file.readline() |
|
21 | line = file.readline() | |
21 | while line[:1] == '#' or not line.strip(): |
|
22 | while line[:1] == '#' or not line.strip(): | |
22 | line = file.readline() |
|
23 | line = file.readline() | |
23 |
if not line: |
|
24 | if not line: | |
|
25 | break | |||
24 |
|
26 | |||
25 | start = line[:3] |
|
27 | start = line[:3] | |
26 | if start == '"""' or start == "'''": |
|
28 | if start == '\"\"\"' or start == "\'\'\'": | |
27 | line = line[3:] |
|
29 | line = line[3:] | |
28 | while line: |
|
30 | while line: | |
29 | if line.rstrip().endswith(start): |
|
31 | if line.rstrip().endswith(start): | |
@@ -83,11 +85,15 b' helptable = (' | |||||
83 | (["config"], _("Configuration Files"), loaddoc('config')), |
|
85 | (["config"], _("Configuration Files"), loaddoc('config')), | |
84 | (["dates"], _("Date Formats"), loaddoc('dates')), |
|
86 | (["dates"], _("Date Formats"), loaddoc('dates')), | |
85 | (["patterns"], _("File Name Patterns"), loaddoc('patterns')), |
|
87 | (["patterns"], _("File Name Patterns"), loaddoc('patterns')), | |
86 |
(['environment', 'env'], _('Environment Variables'), |
|
88 | (['environment', 'env'], _('Environment Variables'), | |
87 | (['revs', 'revisions'], _('Specifying Single Revisions'), loaddoc('revisions')), |
|
89 | loaddoc('environment')), | |
88 |
([' |
|
90 | (['revs', 'revisions'], _('Specifying Single Revisions'), | |
|
91 | loaddoc('revisions')), | |||
|
92 | (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'), | |||
|
93 | loaddoc('multirevs')), | |||
89 | (['diffs'], _('Diff Formats'), loaddoc('diffs')), |
|
94 | (['diffs'], _('Diff Formats'), loaddoc('diffs')), | |
90 |
(['templating', 'templates'], _('Template Usage'), |
|
95 | (['templating', 'templates'], _('Template Usage'), | |
|
96 | loaddoc('templates')), | |||
91 | (['urls'], _('URL Paths'), loaddoc('urls')), |
|
97 | (['urls'], _('URL Paths'), loaddoc('urls')), | |
92 | (["extensions"], _("Using additional features"), extshelp), |
|
98 | (["extensions"], _("Using additional features"), extshelp), | |
93 | ) |
|
99 | ) |
@@ -348,7 +348,8 b' def update(repo, node):' | |||||
348 | def clean(repo, node, show_stats=True): |
|
348 | def clean(repo, node, show_stats=True): | |
349 | """forcibly switch the working directory to node, clobbering changes""" |
|
349 | """forcibly switch the working directory to node, clobbering changes""" | |
350 | stats = _merge.update(repo, node, False, True, None) |
|
350 | stats = _merge.update(repo, node, False, True, None) | |
351 |
if show_stats: |
|
351 | if show_stats: | |
|
352 | _showstats(repo, stats) | |||
352 | return stats[3] > 0 |
|
353 | return stats[3] > 0 | |
353 |
|
354 | |||
354 | def merge(repo, node, force=None, remind=True): |
|
355 | def merge(repo, node, force=None, remind=True): |
@@ -136,7 +136,7 b' class hgweb(object):' | |||||
136 | style = cmd.rfind('-') |
|
136 | style = cmd.rfind('-') | |
137 | if style != -1: |
|
137 | if style != -1: | |
138 | req.form['style'] = [cmd[:style]] |
|
138 | req.form['style'] = [cmd[:style]] | |
139 | cmd = cmd[style+1:] |
|
139 | cmd = cmd[style + 1:] | |
140 |
|
140 | |||
141 | # avoid accepting e.g. style parameter as command |
|
141 | # avoid accepting e.g. style parameter as command | |
142 | if hasattr(webcommands, cmd): |
|
142 | if hasattr(webcommands, cmd): |
@@ -324,7 +324,7 b' class hgwebdir(object):' | |||||
324 | style, mapfile = templater.stylemap(styles) |
|
324 | style, mapfile = templater.stylemap(styles) | |
325 | if style == styles[0]: |
|
325 | if style == styles[0]: | |
326 | vars['style'] = style |
|
326 | vars['style'] = style | |
327 |
|
327 | |||
328 | start = url[-1] == '?' and '&' or '?' |
|
328 | start = url[-1] == '?' and '&' or '?' | |
329 | sessionvars = webutil.sessionvars(vars, start) |
|
329 | sessionvars = webutil.sessionvars(vars, start) | |
330 | staticurl = config('web', 'staticurl') or url + 'static/' |
|
330 | staticurl = config('web', 'staticurl') or url + 'static/' |
@@ -228,7 +228,7 b' def changelog(web, req, tmpl, shortlog=F' | |||||
228 | start = max(0, pos - revcount + 1) |
|
228 | start = max(0, pos - revcount + 1) | |
229 | end = min(count, start + revcount) |
|
229 | end = min(count, start + revcount) | |
230 | pos = end - 1 |
|
230 | pos = end - 1 | |
231 | parity = paritygen(web.stripecount, offset=start-end) |
|
231 | parity = paritygen(web.stripecount, offset=start - end) | |
232 |
|
232 | |||
233 | changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx) |
|
233 | changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx) | |
234 |
|
234 | |||
@@ -334,7 +334,7 b' def manifest(web, req, tmpl):' | |||||
334 | emptydirs = [] |
|
334 | emptydirs = [] | |
335 | h = dirs[d] |
|
335 | h = dirs[d] | |
336 | while isinstance(h, dict) and len(h) == 1: |
|
336 | while isinstance(h, dict) and len(h) == 1: | |
337 | k,v = h.items()[0] |
|
337 | k, v = h.items()[0] | |
338 | if v: |
|
338 | if v: | |
339 | emptydirs.append(k) |
|
339 | emptydirs.append(k) | |
340 | h = v |
|
340 | h = v | |
@@ -378,9 +378,9 b' def tags(web, req, tmpl):' | |||||
378 |
|
378 | |||
379 | return tmpl("tags", |
|
379 | return tmpl("tags", | |
380 | node=hex(web.repo.changelog.tip()), |
|
380 | node=hex(web.repo.changelog.tip()), | |
381 | entries=lambda **x: entries(False,0, **x), |
|
381 | entries=lambda **x: entries(False, 0, **x), | |
382 | entriesnotip=lambda **x: entries(True,0, **x), |
|
382 | entriesnotip=lambda **x: entries(True, 0, **x), | |
383 | latestentry=lambda **x: entries(True,1, **x)) |
|
383 | latestentry=lambda **x: entries(True, 1, **x)) | |
384 |
|
384 | |||
385 | def branches(web, req, tmpl): |
|
385 | def branches(web, req, tmpl): | |
386 | b = web.repo.branchtags() |
|
386 | b = web.repo.branchtags() | |
@@ -437,14 +437,14 b' def summary(web, req, tmpl):' | |||||
437 |
|
437 | |||
438 | b = web.repo.branchtags() |
|
438 | b = web.repo.branchtags() | |
439 | l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.iteritems()] |
|
439 | l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.iteritems()] | |
440 | for r,n,t in sorted(l): |
|
440 | for r, n, t in sorted(l): | |
441 | yield {'parity': parity.next(), |
|
441 | yield {'parity': parity.next(), | |
442 | 'branch': t, |
|
442 | 'branch': t, | |
443 | 'node': hex(n), |
|
443 | 'node': hex(n), | |
444 | 'date': web.repo[n].date()} |
|
444 | 'date': web.repo[n].date()} | |
445 |
|
445 | |||
446 | def changelist(**map): |
|
446 | def changelist(**map): | |
447 | parity = paritygen(web.stripecount, offset=start-end) |
|
447 | parity = paritygen(web.stripecount, offset=start - end) | |
448 | l = [] # build a list in forward order for efficiency |
|
448 | l = [] # build a list in forward order for efficiency | |
449 | for i in xrange(start, end): |
|
449 | for i in xrange(start, end): | |
450 | ctx = web.repo[i] |
|
450 | ctx = web.repo[i] | |
@@ -600,7 +600,7 b' def filelog(web, req, tmpl):' | |||||
600 | count = fctx.filerev() + 1 |
|
600 | count = fctx.filerev() + 1 | |
601 | start = max(0, fctx.filerev() - revcount + 1) # first rev on this page |
|
601 | start = max(0, fctx.filerev() - revcount + 1) # first rev on this page | |
602 | end = min(count, start + revcount) # last rev on this page |
|
602 | end = min(count, start + revcount) # last rev on this page | |
603 | parity = paritygen(web.stripecount, offset=start-end) |
|
603 | parity = paritygen(web.stripecount, offset=start - end) | |
604 |
|
604 | |||
605 | def entries(limit=0, **map): |
|
605 | def entries(limit=0, **map): | |
606 | l = [] |
|
606 | l = [] |
@@ -59,7 +59,7 b' def revnavgen(pos, pagelen, limit, nodef' | |||||
59 | yield {"label": label, "node": node} |
|
59 | yield {"label": label, "node": node} | |
60 | return f |
|
60 | return f | |
61 |
|
61 | |||
62 |
return (dict(before=gen(navbefore), after=gen(navafter)), |
|
62 | return (dict(before=gen(navbefore), after=gen(navafter)),) | |
63 |
|
63 | |||
64 | def _siblings(siblings=[], hiderev=None): |
|
64 | def _siblings(siblings=[], hiderev=None): | |
65 | siblings = [s for s in siblings if s.node() != nullid] |
|
65 | siblings = [s for s in siblings if s.node() != nullid] |
@@ -107,9 +107,10 b' class httprepository(repo.repository):' | |||||
107 | proto.startswith('text/plain') or |
|
107 | proto.startswith('text/plain') or | |
108 | proto.startswith('application/hg-changegroup')): |
|
108 | proto.startswith('application/hg-changegroup')): | |
109 | self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu)) |
|
109 | self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu)) | |
110 | raise error.RepoError(_("'%s' does not appear to be an hg repository:\n" |
|
110 | raise error.RepoError( | |
111 | "---%%<--- (%s)\n%s\n---%%<---\n") |
|
111 | _("'%s' does not appear to be an hg repository:\n" | |
112 | % (safeurl, proto, resp.read())) |
|
112 | "---%%<--- (%s)\n%s\n---%%<---\n") | |
|
113 | % (safeurl, proto, resp.read())) | |||
113 |
|
114 | |||
114 | if proto.startswith('application/mercurial-'): |
|
115 | if proto.startswith('application/mercurial-'): | |
115 | try: |
|
116 | try: | |
@@ -171,7 +172,7 b' class httprepository(repo.repository):' | |||||
171 | n = " ".join(map(hex, nodes)) |
|
172 | n = " ".join(map(hex, nodes)) | |
172 | d = self.do_read("branches", nodes=n) |
|
173 | d = self.do_read("branches", nodes=n) | |
173 | try: |
|
174 | try: | |
174 |
br = [ |
|
175 | br = [tuple(map(bin, b.split(" "))) for b in d.splitlines()] | |
175 | return br |
|
176 | return br | |
176 | except: |
|
177 | except: | |
177 | raise error.ResponseError(_("unexpected response:"), d) |
|
178 | raise error.ResponseError(_("unexpected response:"), d) | |
@@ -183,7 +184,8 b' class httprepository(repo.repository):' | |||||
183 | n = " ".join(["-".join(map(hex, p)) for p in pairs[i:i + batch]]) |
|
184 | n = " ".join(["-".join(map(hex, p)) for p in pairs[i:i + batch]]) | |
184 | d = self.do_read("between", pairs=n) |
|
185 | d = self.do_read("between", pairs=n) | |
185 | try: |
|
186 | try: | |
186 |
r += [ |
|
187 | r += [l and map(bin, l.split(" ")) or [] | |
|
188 | for l in d.splitlines()] | |||
187 | except: |
|
189 | except: | |
188 | raise error.ResponseError(_("unexpected response:"), d) |
|
190 | raise error.ResponseError(_("unexpected response:"), d) | |
189 | return r |
|
191 | return r |
@@ -120,7 +120,8 b' import urllib2' | |||||
120 | DEBUG = None |
|
120 | DEBUG = None | |
121 |
|
121 | |||
122 | import sys |
|
122 | import sys | |
123 |
if sys.version_info < (2, 4): |
|
123 | if sys.version_info < (2, 4): | |
|
124 | HANDLE_ERRORS = 1 | |||
124 | else: HANDLE_ERRORS = 0 |
|
125 | else: HANDLE_ERRORS = 0 | |
125 |
|
126 | |||
126 | class ConnectionManager: |
|
127 | class ConnectionManager: | |
@@ -137,7 +138,8 b' class ConnectionManager:' | |||||
137 | def add(self, host, connection, ready): |
|
138 | def add(self, host, connection, ready): | |
138 | self._lock.acquire() |
|
139 | self._lock.acquire() | |
139 | try: |
|
140 | try: | |
140 |
if not host in self._hostmap: |
|
141 | if not host in self._hostmap: | |
|
142 | self._hostmap[host] = [] | |||
141 | self._hostmap[host].append(connection) |
|
143 | self._hostmap[host].append(connection) | |
142 | self._connmap[connection] = host |
|
144 | self._connmap[connection] = host | |
143 | self._readymap[connection] = ready |
|
145 | self._readymap[connection] = ready | |
@@ -160,8 +162,10 b' class ConnectionManager:' | |||||
160 | self._lock.release() |
|
162 | self._lock.release() | |
161 |
|
163 | |||
162 | def set_ready(self, connection, ready): |
|
164 | def set_ready(self, connection, ready): | |
163 | try: self._readymap[connection] = ready |
|
165 | try: | |
164 | except KeyError: pass |
|
166 | self._readymap[connection] = ready | |
|
167 | except KeyError: | |||
|
168 | pass | |||
165 |
|
169 | |||
166 | def get_ready_conn(self, host): |
|
170 | def get_ready_conn(self, host): | |
167 | conn = None |
|
171 | conn = None | |
@@ -214,7 +218,8 b' class KeepAliveHandler:' | |||||
214 | self._cm.set_ready(connection, 1) |
|
218 | self._cm.set_ready(connection, 1) | |
215 |
|
219 | |||
216 | def _remove_connection(self, host, connection, close=0): |
|
220 | def _remove_connection(self, host, connection, close=0): | |
217 |
if close: |
|
221 | if close: | |
|
222 | connection.close() | |||
218 | self._cm.remove(connection) |
|
223 | self._cm.remove(connection) | |
219 |
|
224 | |||
220 | #### Transaction Execution |
|
225 | #### Transaction Execution | |
@@ -233,7 +238,8 b' class KeepAliveHandler:' | |||||
233 |
|
238 | |||
234 | # if this response is non-None, then it worked and we're |
|
239 | # if this response is non-None, then it worked and we're | |
235 | # done. Break out, skipping the else block. |
|
240 | # done. Break out, skipping the else block. | |
236 |
if r: |
|
241 | if r: | |
|
242 | break | |||
237 |
|
243 | |||
238 | # connection is bad - possibly closed by server |
|
244 | # connection is bad - possibly closed by server | |
239 | # discard it and ask for the next free connection |
|
245 | # discard it and ask for the next free connection | |
@@ -243,8 +249,9 b' class KeepAliveHandler:' | |||||
243 | else: |
|
249 | else: | |
244 | # no (working) free connections were found. Create a new one. |
|
250 | # no (working) free connections were found. Create a new one. | |
245 | h = http_class(host) |
|
251 | h = http_class(host) | |
246 | if DEBUG: DEBUG.info("creating new connection to %s (%d)", |
|
252 | if DEBUG: | |
247 | host, id(h)) |
|
253 | DEBUG.info("creating new connection to %s (%d)", | |
|
254 | host, id(h)) | |||
248 | self._cm.add(host, h, 0) |
|
255 | self._cm.add(host, h, 0) | |
249 | self._start_transaction(h, req) |
|
256 | self._start_transaction(h, req) | |
250 | r = h.getresponse() |
|
257 | r = h.getresponse() | |
@@ -252,9 +259,11 b' class KeepAliveHandler:' | |||||
252 | raise urllib2.URLError(err) |
|
259 | raise urllib2.URLError(err) | |
253 |
|
260 | |||
254 | # if not a persistent connection, don't try to reuse it |
|
261 | # if not a persistent connection, don't try to reuse it | |
255 |
if r.will_close: |
|
262 | if r.will_close: | |
|
263 | self._cm.remove(h) | |||
256 |
|
264 | |||
257 | if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason) |
|
265 | if DEBUG: | |
|
266 | DEBUG.info("STATUS: %s, %s", r.status, r.reason) | |||
258 | r._handler = self |
|
267 | r._handler = self | |
259 | r._host = host |
|
268 | r._host = host | |
260 | r._url = req.get_full_url() |
|
269 | r._url = req.get_full_url() | |
@@ -293,8 +302,9 b' class KeepAliveHandler:' | |||||
293 | # same exception was raised, etc. The tradeoff is |
|
302 | # same exception was raised, etc. The tradeoff is | |
294 | # that it's now possible this call will raise |
|
303 | # that it's now possible this call will raise | |
295 | # a DIFFERENT exception |
|
304 | # a DIFFERENT exception | |
296 | if DEBUG: DEBUG.error("unexpected exception - closing " + \ |
|
305 | if DEBUG: | |
297 | "connection to %s (%d)", host, id(h)) |
|
306 | DEBUG.error("unexpected exception - closing " | |
|
307 | "connection to %s (%d)", host, id(h)) | |||
298 | self._cm.remove(h) |
|
308 | self._cm.remove(h) | |
299 | h.close() |
|
309 | h.close() | |
300 | raise |
|
310 | raise | |
@@ -304,11 +314,13 b' class KeepAliveHandler:' | |||||
304 | # bad header back. This is most likely to happen if |
|
314 | # bad header back. This is most likely to happen if | |
305 | # the socket has been closed by the server since we |
|
315 | # the socket has been closed by the server since we | |
306 | # last used the connection. |
|
316 | # last used the connection. | |
307 | if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)", |
|
317 | if DEBUG: | |
308 | host, id(h)) |
|
318 | DEBUG.info("failed to re-use connection to %s (%d)", | |
|
319 | host, id(h)) | |||
309 | r = None |
|
320 | r = None | |
310 | else: |
|
321 | else: | |
311 | if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h)) |
|
322 | if DEBUG: | |
|
323 | DEBUG.info("re-using connection to %s (%d)", host, id(h)) | |||
312 |
|
324 | |||
313 | return r |
|
325 | return r | |
314 |
|
326 | |||
@@ -319,7 +331,7 b' class KeepAliveHandler:' | |||||
319 | if sys.version_info >= (2, 4): |
|
331 | if sys.version_info >= (2, 4): | |
320 | headers.update(req.unredirected_hdrs) |
|
332 | headers.update(req.unredirected_hdrs) | |
321 | headers.update(self.parent.addheaders) |
|
333 | headers.update(self.parent.addheaders) | |
322 | headers = dict((n.lower(), v) for n,v in headers.items()) |
|
334 | headers = dict((n.lower(), v) for n, v in headers.items()) | |
323 | skipheaders = {} |
|
335 | skipheaders = {} | |
324 | for n in ('host', 'accept-encoding'): |
|
336 | for n in ('host', 'accept-encoding'): | |
325 | if n in headers: |
|
337 | if n in headers: | |
@@ -477,13 +489,18 b' class HTTPResponse(httplib.HTTPResponse)' | |||||
477 | i = self._rbuf.find('\n') |
|
489 | i = self._rbuf.find('\n') | |
478 | while i < 0 and not (0 < limit <= len(self._rbuf)): |
|
490 | while i < 0 and not (0 < limit <= len(self._rbuf)): | |
479 | new = self._raw_read(self._rbufsize) |
|
491 | new = self._raw_read(self._rbufsize) | |
480 |
if not new: |
|
492 | if not new: | |
|
493 | break | |||
481 | i = new.find('\n') |
|
494 | i = new.find('\n') | |
482 |
if i >= 0: |
|
495 | if i >= 0: | |
|
496 | i = i + len(self._rbuf) | |||
483 | self._rbuf = self._rbuf + new |
|
497 | self._rbuf = self._rbuf + new | |
484 |
if i < 0: |
|
498 | if i < 0: | |
485 | else: i = i+1 |
|
499 | i = len(self._rbuf) | |
486 | if 0 <= limit < len(self._rbuf): i = limit |
|
500 | else: | |
|
501 | i = i + 1 | |||
|
502 | if 0 <= limit < len(self._rbuf): | |||
|
503 | i = limit | |||
487 | data, self._rbuf = self._rbuf[:i], self._rbuf[i:] |
|
504 | data, self._rbuf = self._rbuf[:i], self._rbuf[i:] | |
488 | return data |
|
505 | return data | |
489 |
|
506 | |||
@@ -492,7 +509,8 b' class HTTPResponse(httplib.HTTPResponse)' | |||||
492 | list = [] |
|
509 | list = [] | |
493 | while 1: |
|
510 | while 1: | |
494 | line = self.readline() |
|
511 | line = self.readline() | |
495 |
if not line: |
|
512 | if not line: | |
|
513 | break | |||
496 | list.append(line) |
|
514 | list.append(line) | |
497 | total += len(line) |
|
515 | total += len(line) | |
498 | if sizehint and total >= sizehint: |
|
516 | if sizehint and total >= sizehint: | |
@@ -528,9 +546,10 b' def safesend(self, str):' | |||||
528 | if self.debuglevel > 0: |
|
546 | if self.debuglevel > 0: | |
529 | print "send:", repr(str) |
|
547 | print "send:", repr(str) | |
530 | try: |
|
548 | try: | |
531 | blocksize=8192 |
|
549 | blocksize = 8192 | |
532 | if hasattr(str,'read') : |
|
550 | if hasattr(str,'read') : | |
533 |
if self.debuglevel > 0: |
|
551 | if self.debuglevel > 0: | |
|
552 | print "sendIng a read()able" | |||
534 | data=str.read(blocksize) |
|
553 | data=str.read(blocksize) | |
535 | while data: |
|
554 | while data: | |
536 | self.sock.sendall(data) |
|
555 | self.sock.sendall(data) | |
@@ -588,8 +607,10 b' def error_handler(url):' | |||||
588 | fo = urllib2.urlopen(url) |
|
607 | fo = urllib2.urlopen(url) | |
589 | fo.read() |
|
608 | fo.read() | |
590 | fo.close() |
|
609 | fo.close() | |
591 | try: status, reason = fo.status, fo.reason |
|
610 | try: | |
592 | except AttributeError: status, reason = None, None |
|
611 | status, reason = fo.status, fo.reason | |
|
612 | except AttributeError: | |||
|
613 | status, reason = None, None | |||
593 | except IOError, e: |
|
614 | except IOError, e: | |
594 | print " EXCEPTION: %s" % e |
|
615 | print " EXCEPTION: %s" % e | |
595 | raise |
|
616 | raise | |
@@ -635,7 +656,8 b' def continuity(url):' | |||||
635 | foo = '' |
|
656 | foo = '' | |
636 | while 1: |
|
657 | while 1: | |
637 | f = fo.readline() |
|
658 | f = fo.readline() | |
638 |
if f: |
|
659 | if f: | |
|
660 | foo = foo + f | |||
639 | else: break |
|
661 | else: break | |
640 | fo.close() |
|
662 | fo.close() | |
641 | m = md5.new(foo) |
|
663 | m = md5.new(foo) | |
@@ -657,14 +679,15 b' def comp(N, url):' | |||||
657 | urllib2.install_opener(opener) |
|
679 | urllib2.install_opener(opener) | |
658 | t2 = fetch(N, url) |
|
680 | t2 = fetch(N, url) | |
659 | print ' TIME: %.3f s' % t2 |
|
681 | print ' TIME: %.3f s' % t2 | |
660 |
print ' improvement factor: %.2f' % (t1/t2 |
|
682 | print ' improvement factor: %.2f' % (t1 / t2) | |
661 |
|
683 | |||
662 | def fetch(N, url, delay=0): |
|
684 | def fetch(N, url, delay=0): | |
663 | import time |
|
685 | import time | |
664 | lens = [] |
|
686 | lens = [] | |
665 | starttime = time.time() |
|
687 | starttime = time.time() | |
666 | for i in range(N): |
|
688 | for i in range(N): | |
667 |
if delay and i > 0: |
|
689 | if delay and i > 0: | |
|
690 | time.sleep(delay) | |||
668 | fo = urllib2.urlopen(url) |
|
691 | fo = urllib2.urlopen(url) | |
669 | foo = fo.read() |
|
692 | foo = fo.read() | |
670 | fo.close() |
|
693 | fo.close() | |
@@ -683,7 +706,8 b' def test_timeout(url):' | |||||
683 | global DEBUG |
|
706 | global DEBUG | |
684 | dbbackup = DEBUG |
|
707 | dbbackup = DEBUG | |
685 | class FakeLogger: |
|
708 | class FakeLogger: | |
686 |
def debug(self, msg, *args): |
|
709 | def debug(self, msg, *args): | |
|
710 | print msg % args | |||
687 | info = warning = error = debug |
|
711 | info = warning = error = debug | |
688 | DEBUG = FakeLogger() |
|
712 | DEBUG = FakeLogger() | |
689 | print " fetching the file to establish a connection" |
|
713 | print " fetching the file to establish a connection" |
@@ -319,7 +319,7 b' class localrepository(repo.repository):' | |||||
319 | # TODO: rename this function? |
|
319 | # TODO: rename this function? | |
320 | tiprev = len(self) - 1 |
|
320 | tiprev = len(self) - 1 | |
321 | if lrev != tiprev: |
|
321 | if lrev != tiprev: | |
322 | self._updatebranchcache(partial, lrev+1, tiprev+1) |
|
322 | self._updatebranchcache(partial, lrev + 1, tiprev + 1) | |
323 | self._writebranchcache(partial, self.changelog.tip(), tiprev) |
|
323 | self._writebranchcache(partial, self.changelog.tip(), tiprev) | |
324 |
|
324 | |||
325 | return partial |
|
325 | return partial | |
@@ -377,7 +377,8 b' class localrepository(repo.repository):' | |||||
377 | # invalidate the cache |
|
377 | # invalidate the cache | |
378 | raise ValueError('invalidating branch cache (tip differs)') |
|
378 | raise ValueError('invalidating branch cache (tip differs)') | |
379 | for l in lines: |
|
379 | for l in lines: | |
380 |
if not l: |
|
380 | if not l: | |
|
381 | continue | |||
381 | node, label = l.split(" ", 1) |
|
382 | node, label = l.split(" ", 1) | |
382 | partial.setdefault(label.strip(), []).append(bin(node)) |
|
383 | partial.setdefault(label.strip(), []).append(bin(node)) | |
383 | except KeyboardInterrupt: |
|
384 | except KeyboardInterrupt: | |
@@ -562,7 +563,8 b' class localrepository(repo.repository):' | |||||
562 |
|
563 | |||
563 | # abort here if the journal already exists |
|
564 | # abort here if the journal already exists | |
564 | if os.path.exists(self.sjoin("journal")): |
|
565 | if os.path.exists(self.sjoin("journal")): | |
565 | raise error.RepoError(_("abandoned transaction found - run hg recover")) |
|
566 | raise error.RepoError( | |
|
567 | _("abandoned transaction found - run hg recover")) | |||
566 |
|
568 | |||
567 | # save dirstate for rollback |
|
569 | # save dirstate for rollback | |
568 | try: |
|
570 | try: | |
@@ -587,7 +589,8 b' class localrepository(repo.repository):' | |||||
587 | try: |
|
589 | try: | |
588 | if os.path.exists(self.sjoin("journal")): |
|
590 | if os.path.exists(self.sjoin("journal")): | |
589 | self.ui.status(_("rolling back interrupted transaction\n")) |
|
591 | self.ui.status(_("rolling back interrupted transaction\n")) | |
590 |
transaction.rollback(self.sopener, self.sjoin("journal"), |
|
592 | transaction.rollback(self.sopener, self.sjoin("journal"), | |
|
593 | self.ui.warn) | |||
591 | self.invalidate() |
|
594 | self.invalidate() | |
592 | return True |
|
595 | return True | |
593 | else: |
|
596 | else: | |
@@ -603,7 +606,8 b' class localrepository(repo.repository):' | |||||
603 | lock = self.lock() |
|
606 | lock = self.lock() | |
604 | if os.path.exists(self.sjoin("undo")): |
|
607 | if os.path.exists(self.sjoin("undo")): | |
605 | self.ui.status(_("rolling back last transaction\n")) |
|
608 | self.ui.status(_("rolling back last transaction\n")) | |
606 |
transaction.rollback(self.sopener, self.sjoin("undo"), |
|
609 | transaction.rollback(self.sopener, self.sjoin("undo"), | |
|
610 | self.ui.warn) | |||
607 | util.rename(self.join("undo.dirstate"), self.join("dirstate")) |
|
611 | util.rename(self.join("undo.dirstate"), self.join("dirstate")) | |
608 | try: |
|
612 | try: | |
609 | branch = self.opener("undo.branch").read() |
|
613 | branch = self.opener("undo.branch").read() | |
@@ -1339,7 +1343,7 b' class localrepository(repo.repository):' | |||||
1339 | self.ui.debug("request %d: %s\n" % |
|
1343 | self.ui.debug("request %d: %s\n" % | |
1340 | (reqcnt, " ".join(map(short, r)))) |
|
1344 | (reqcnt, " ".join(map(short, r)))) | |
1341 | for p in xrange(0, len(r), 10): |
|
1345 | for p in xrange(0, len(r), 10): | |
1342 | for b in remote.branches(r[p:p+10]): |
|
1346 | for b in remote.branches(r[p:p + 10]): | |
1343 | self.ui.debug("received %s:%s\n" % |
|
1347 | self.ui.debug("received %s:%s\n" % | |
1344 | (short(b[0]), short(b[1]))) |
|
1348 | (short(b[0]), short(b[1]))) | |
1345 | unknown.append(b) |
|
1349 | unknown.append(b) | |
@@ -1609,7 +1613,8 b' class localrepository(repo.repository):' | |||||
1609 | ret = self.prepush(remote, force, revs) |
|
1613 | ret = self.prepush(remote, force, revs) | |
1610 | if ret[0] is not None: |
|
1614 | if ret[0] is not None: | |
1611 | cg, remote_heads = ret |
|
1615 | cg, remote_heads = ret | |
1612 |
if force: |
|
1616 | if force: | |
|
1617 | remote_heads = ['force'] | |||
1613 | return remote.unbundle(cg, remote_heads, 'push') |
|
1618 | return remote.unbundle(cg, remote_heads, 'push') | |
1614 | return ret[1] |
|
1619 | return ret[1] | |
1615 |
|
1620 |
@@ -11,8 +11,10 b' import array, struct' | |||||
11 |
|
11 | |||
12 | class manifestdict(dict): |
|
12 | class manifestdict(dict): | |
13 | def __init__(self, mapping=None, flags=None): |
|
13 | def __init__(self, mapping=None, flags=None): | |
14 |
if mapping is None: |
|
14 | if mapping is None: | |
15 | if flags is None: flags = {} |
|
15 | mapping = {} | |
|
16 | if flags is None: | |||
|
17 | flags = {} | |||
16 | dict.__init__(self, mapping) |
|
18 | dict.__init__(self, mapping) | |
17 | self._flags = flags |
|
19 | self._flags = flags | |
18 | def flags(self, f): |
|
20 | def flags(self, f): | |
@@ -70,7 +72,7 b' class manifest(revlog.revlog):' | |||||
70 | while lo < hi: |
|
72 | while lo < hi: | |
71 | mid = (lo + hi) // 2 |
|
73 | mid = (lo + hi) // 2 | |
72 | start = mid |
|
74 | start = mid | |
73 | while start > 0 and m[start-1] != '\n': |
|
75 | while start > 0 and m[start - 1] != '\n': | |
74 | start -= 1 |
|
76 | start -= 1 | |
75 | end = advance(start, '\0') |
|
77 | end = advance(start, '\0') | |
76 | if m[start:end] < s: |
|
78 | if m[start:end] < s: | |
@@ -85,7 +87,7 b' class manifest(revlog.revlog):' | |||||
85 | if cmp(s, found) == 0: |
|
87 | if cmp(s, found) == 0: | |
86 | # we know that after the null there are 40 bytes of sha1 |
|
88 | # we know that after the null there are 40 bytes of sha1 | |
87 | end = advance(end + 40, '\n') |
|
89 | end = advance(end + 40, '\n') | |
88 | return (lo, end+1) |
|
90 | return (lo, end + 1) | |
89 | else: |
|
91 | else: | |
90 | return (lo, lo) |
|
92 | return (lo, lo) | |
91 |
|
93 |
@@ -125,10 +125,11 b' def _globre(pat):' | |||||
125 | res = '' |
|
125 | res = '' | |
126 | group = 0 |
|
126 | group = 0 | |
127 | escape = re.escape |
|
127 | escape = re.escape | |
128 | def peek(): return i < n and pat[i] |
|
128 | def peek(): | |
|
129 | return i < n and pat[i] | |||
129 | while i < n: |
|
130 | while i < n: | |
130 | c = pat[i] |
|
131 | c = pat[i] | |
131 |
i = |
|
132 | i += 1 | |
132 | if c not in '*?[{},\\': |
|
133 | if c not in '*?[{},\\': | |
133 | res += escape(c) |
|
134 | res += escape(c) | |
134 | elif c == '*': |
|
135 | elif c == '*': |
@@ -96,7 +96,8 b' def unidiff(a, ad, b, bd, fn1, fn2, r=No' | |||||
96 | return '\t\n' |
|
96 | return '\t\n' | |
97 | return '\n' |
|
97 | return '\n' | |
98 |
|
98 | |||
99 |
if not a and not b: |
|
99 | if not a and not b: | |
|
100 | return "" | |||
100 | epoch = util.datestr((0, 0)) |
|
101 | epoch = util.datestr((0, 0)) | |
101 |
|
102 | |||
102 | if not opts.text and (util.binary(a) or util.binary(b)): |
|
103 | if not opts.text and (util.binary(a) or util.binary(b)): | |
@@ -125,7 +126,8 b' def unidiff(a, ad, b, bd, fn1, fn2, r=No' | |||||
125 | al = splitnewlines(a) |
|
126 | al = splitnewlines(a) | |
126 | bl = splitnewlines(b) |
|
127 | bl = splitnewlines(b) | |
127 | l = list(bunidiff(a, b, al, bl, "a/" + fn1, "b/" + fn2, opts=opts)) |
|
128 | l = list(bunidiff(a, b, al, bl, "a/" + fn1, "b/" + fn2, opts=opts)) | |
128 |
if not l: |
|
129 | if not l: | |
|
130 | return "" | |||
129 | # difflib uses a space, rather than a tab |
|
131 | # difflib uses a space, rather than a tab | |
130 | l[0] = "%s%s" % (l[0][:-2], datetag(ad)) |
|
132 | l[0] = "%s%s" % (l[0][:-2], datetag(ad)) | |
131 | l[1] = "%s%s" % (l[1][:-2], datetag(bd)) |
|
133 | l[1] = "%s%s" % (l[1][:-2], datetag(bd)) | |
@@ -182,7 +184,7 b' def bunidiff(t1, t2, l1, l2, header1, he' | |||||
182 | for x in xrange(a2, aend): |
|
184 | for x in xrange(a2, aend): | |
183 | yield ' ' + l1[x] |
|
185 | yield ' ' + l1[x] | |
184 |
|
186 | |||
185 |
header = [ |
|
187 | header = ["--- %s\t\n" % header1, "+++ %s\t\n" % header2] | |
186 |
|
188 | |||
187 | if opts.showfunc: |
|
189 | if opts.showfunc: | |
188 | funcre = re.compile('\w') |
|
190 | funcre = re.compile('\w') | |
@@ -203,7 +205,7 b' def bunidiff(t1, t2, l1, l2, header1, he' | |||||
203 | # in the file. If it starts later, old and new below will both be |
|
205 | # in the file. If it starts later, old and new below will both be | |
204 | # empty and we'll continue to the next match. |
|
206 | # empty and we'll continue to the next match. | |
205 | if i > 0: |
|
207 | if i > 0: | |
206 | s = diff[i-1] |
|
208 | s = diff[i - 1] | |
207 | else: |
|
209 | else: | |
208 | s = [0, 0, 0, 0] |
|
210 | s = [0, 0, 0, 0] | |
209 | delta = [] |
|
211 | delta = [] | |
@@ -246,11 +248,11 b' def bunidiff(t1, t2, l1, l2, header1, he' | |||||
246 | delta = hunk[4] |
|
248 | delta = hunk[4] | |
247 | else: |
|
249 | else: | |
248 | # create a new hunk |
|
250 | # create a new hunk | |
249 |
hunk = [ |
|
251 | hunk = [astart, a2, bstart, b2, delta] | |
250 |
|
252 | |||
251 |
delta[len(delta):] = [ |
|
253 | delta[len(delta):] = [' ' + x for x in l1[astart:a1]] | |
252 |
delta[len(delta):] = [ |
|
254 | delta[len(delta):] = ['-' + x for x in old] | |
253 |
delta[len(delta):] = [ |
|
255 | delta[len(delta):] = ['+' + x for x in new] | |
254 |
|
256 | |||
255 | if hunk: |
|
257 | if hunk: | |
256 | for x in yieldhunk(hunk, header): |
|
258 | for x in yieldhunk(hunk, header): |
@@ -135,8 +135,10 b' def manifestmerge(repo, p1, p2, pa, over' | |||||
135 | _(" conflicting flags for %s\n" |
|
135 | _(" conflicting flags for %s\n" | |
136 | "(n)one, e(x)ec or sym(l)ink?") % f, |
|
136 | "(n)one, e(x)ec or sym(l)ink?") % f, | |
137 | (_("&None"), _("E&xec"), _("Sym&link")), 0) |
|
137 | (_("&None"), _("E&xec"), _("Sym&link")), 0) | |
138 |
if r == 1: |
|
138 | if r == 1: | |
139 |
|
|
139 | return "x" # Exec | |
|
140 | if r == 2: | |||
|
141 | return "l" # Symlink | |||
140 | return "" |
|
142 | return "" | |
141 | if m and m != a: # changed from a to m |
|
143 | if m and m != a: # changed from a to m | |
142 | return m |
|
144 | return m |
@@ -78,9 +78,9 b' def findliteralblocks(blocks):' | |||||
78 | # | indented literal block | |
|
78 | # | indented literal block | | |
79 | # +---------------------------+ |
|
79 | # +---------------------------+ | |
80 | blocks[i]['type'] = 'paragraph' |
|
80 | blocks[i]['type'] = 'paragraph' | |
81 | if blocks[i]['lines'][-1].endswith('::') and i+1 < len(blocks): |
|
81 | if blocks[i]['lines'][-1].endswith('::') and i + 1 < len(blocks): | |
82 | indent = blocks[i]['indent'] |
|
82 | indent = blocks[i]['indent'] | |
83 | adjustment = blocks[i+1]['indent'] - indent |
|
83 | adjustment = blocks[i + 1]['indent'] - indent | |
84 |
|
84 | |||
85 | if blocks[i]['lines'] == ['::']: |
|
85 | if blocks[i]['lines'] == ['::']: | |
86 | # Expanded form: remove block |
|
86 | # Expanded form: remove block | |
@@ -104,9 +104,9 b' def findliteralblocks(blocks):' | |||||
104 | adjustment -= m.end() |
|
104 | adjustment -= m.end() | |
105 |
|
105 | |||
106 | # Mark the following indented blocks. |
|
106 | # Mark the following indented blocks. | |
107 | while i+1 < len(blocks) and blocks[i+1]['indent'] > indent: |
|
107 | while i + 1 < len(blocks) and blocks[i + 1]['indent'] > indent: | |
108 | blocks[i+1]['type'] = 'literal' |
|
108 | blocks[i + 1]['type'] = 'literal' | |
109 | blocks[i+1]['indent'] -= adjustment |
|
109 | blocks[i + 1]['indent'] -= adjustment | |
110 | i += 1 |
|
110 | i += 1 | |
111 | i += 1 |
|
111 | i += 1 | |
112 | return blocks |
|
112 | return blocks | |
@@ -133,7 +133,7 b' def splitparagraphs(blocks):' | |||||
133 | item (but only if singleline is True). |
|
133 | item (but only if singleline is True). | |
134 | """ |
|
134 | """ | |
135 | line1 = lines[i] |
|
135 | line1 = lines[i] | |
136 | line2 = i+1 < len(lines) and lines[i+1] or '' |
|
136 | line2 = i + 1 < len(lines) and lines[i + 1] or '' | |
137 | if not itemre.match(line1): |
|
137 | if not itemre.match(line1): | |
138 | return False |
|
138 | return False | |
139 | if singleline: |
|
139 | if singleline: | |
@@ -153,7 +153,7 b' def splitparagraphs(blocks):' | |||||
153 | items.append(dict(type=type, lines=[], |
|
153 | items.append(dict(type=type, lines=[], | |
154 | indent=blocks[i]['indent'])) |
|
154 | indent=blocks[i]['indent'])) | |
155 | items[-1]['lines'].append(line) |
|
155 | items[-1]['lines'].append(line) | |
156 | blocks[i:i+1] = items |
|
156 | blocks[i:i + 1] = items | |
157 | break |
|
157 | break | |
158 | i += 1 |
|
158 | i += 1 | |
159 | return blocks |
|
159 | return blocks | |
@@ -221,7 +221,7 b' def addmargins(blocks):' | |||||
221 | """ |
|
221 | """ | |
222 | i = 1 |
|
222 | i = 1 | |
223 | while i < len(blocks): |
|
223 | while i < len(blocks): | |
224 | if (blocks[i]['type'] == blocks[i-1]['type'] and |
|
224 | if (blocks[i]['type'] == blocks[i - 1]['type'] and | |
225 | blocks[i]['type'] in ('bullet', 'option', 'field', 'definition')): |
|
225 | blocks[i]['type'] in ('bullet', 'option', 'field', 'definition')): | |
226 | i += 1 |
|
226 | i += 1 | |
227 | else: |
|
227 | else: |
@@ -35,13 +35,13 b' typedef int Py_ssize_t;' | |||||
35 | #endif |
|
35 | #endif | |
36 |
|
36 | |||
37 | #ifdef _WIN32 |
|
37 | #ifdef _WIN32 | |
38 |
# |
|
38 | #ifdef _MSC_VER | |
39 | /* msvc 6.0 has problems */ |
|
39 | /* msvc 6.0 has problems */ | |
40 |
# |
|
40 | #define inline __inline | |
41 | typedef unsigned long uint32_t; |
|
41 | typedef unsigned long uint32_t; | |
42 |
# |
|
42 | #else | |
43 |
# |
|
43 | #include <stdint.h> | |
44 |
# |
|
44 | #endif | |
45 | static uint32_t ntohl(uint32_t x) |
|
45 | static uint32_t ntohl(uint32_t x) | |
46 | { |
|
46 | { | |
47 | return ((x & 0x000000ffUL) << 24) | |
|
47 | return ((x & 0x000000ffUL) << 24) | | |
@@ -51,13 +51,13 b' static uint32_t ntohl(uint32_t x)' | |||||
51 | } |
|
51 | } | |
52 | #else |
|
52 | #else | |
53 | /* not windows */ |
|
53 | /* not windows */ | |
54 |
# |
|
54 | #include <sys/types.h> | |
55 |
# |
|
55 | #if defined __BEOS__ && !defined __HAIKU__ | |
56 |
# |
|
56 | #include <ByteOrder.h> | |
57 |
# |
|
57 | #else | |
58 |
# |
|
58 | #include <arpa/inet.h> | |
59 |
# |
|
59 | #endif | |
60 |
# |
|
60 | #include <inttypes.h> | |
61 | #endif |
|
61 | #endif | |
62 |
|
62 | |||
63 | static char mpatch_doc[] = "Efficient binary patching."; |
|
63 | static char mpatch_doc[] = "Efficient binary patching."; |
@@ -14,13 +14,13 b'' | |||||
14 | #include <string.h> |
|
14 | #include <string.h> | |
15 |
|
15 | |||
16 | #ifdef _WIN32 |
|
16 | #ifdef _WIN32 | |
17 |
# |
|
17 | #include <windows.h> | |
18 |
# |
|
18 | #include <io.h> | |
19 | #else |
|
19 | #else | |
20 |
# |
|
20 | #include <dirent.h> | |
21 |
# |
|
21 | #include <sys/stat.h> | |
22 |
# |
|
22 | #include <sys/types.h> | |
23 |
# |
|
23 | #include <unistd.h> | |
24 | #endif |
|
24 | #endif | |
25 |
|
25 | |||
26 | /* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */ |
|
26 | /* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */ | |
@@ -174,7 +174,7 b' static PyObject *make_item(const WIN32_F' | |||||
174 | stp->st_mtime = to_python_time(&fd->ftLastWriteTime); |
|
174 | stp->st_mtime = to_python_time(&fd->ftLastWriteTime); | |
175 | stp->st_ctime = to_python_time(&fd->ftCreationTime); |
|
175 | stp->st_ctime = to_python_time(&fd->ftCreationTime); | |
176 | if (kind == _S_IFREG) |
|
176 | if (kind == _S_IFREG) | |
177 |
stp->st_size = |
|
177 | stp->st_size = ((__int64)fd->nFileSizeHigh << 32) | |
178 | + fd->nFileSizeLow; |
|
178 | + fd->nFileSizeLow; | |
179 | return Py_BuildValue("siN", fd->cFileName, |
|
179 | return Py_BuildValue("siN", fd->cFileName, | |
180 | kind, py_st); |
|
180 | kind, py_st); | |
@@ -189,7 +189,7 b' static PyObject *_listdir(char *path, in' | |||||
189 | char *pattern; |
|
189 | char *pattern; | |
190 |
|
190 | |||
191 | /* build the path + \* pattern string */ |
|
191 | /* build the path + \* pattern string */ | |
192 | pattern = malloc(plen+3); /* path + \* + \0 */ |
|
192 | pattern = malloc(plen + 3); /* path + \* + \0 */ | |
193 | if (!pattern) { |
|
193 | if (!pattern) { | |
194 | PyErr_NoMemory(); |
|
194 | PyErr_NoMemory(); | |
195 | goto error_nomem; |
|
195 | goto error_nomem; | |
@@ -485,7 +485,7 b' static PyObject *posixfile(PyObject *sel' | |||||
485 | goto bail; |
|
485 | goto bail; | |
486 | } |
|
486 | } | |
487 |
|
487 | |||
488 |
fd = _open_osfhandle((intptr_t) |
|
488 | fd = _open_osfhandle((intptr_t)handle, flags); | |
489 | if (fd == -1) { |
|
489 | if (fd == -1) { | |
490 | CloseHandle(handle); |
|
490 | CloseHandle(handle); | |
491 | PyErr_SetFromErrnoWithFilename(PyExc_IOError, name); |
|
491 | PyErr_SetFromErrnoWithFilename(PyExc_IOError, name); |
@@ -131,14 +131,14 b' quit:' | |||||
131 | } |
|
131 | } | |
132 |
|
132 | |||
133 | #ifdef _WIN32 |
|
133 | #ifdef _WIN32 | |
134 |
# |
|
134 | #ifdef _MSC_VER | |
135 | /* msvc 6.0 has problems */ |
|
135 | /* msvc 6.0 has problems */ | |
136 |
# |
|
136 | #define inline __inline | |
137 | typedef unsigned long uint32_t; |
|
137 | typedef unsigned long uint32_t; | |
138 | typedef unsigned __int64 uint64_t; |
|
138 | typedef unsigned __int64 uint64_t; | |
139 |
# |
|
139 | #else | |
140 |
# |
|
140 | #include <stdint.h> | |
141 |
# |
|
141 | #endif | |
142 | static uint32_t ntohl(uint32_t x) |
|
142 | static uint32_t ntohl(uint32_t x) | |
143 | { |
|
143 | { | |
144 | return ((x & 0x000000ffUL) << 24) | |
|
144 | return ((x & 0x000000ffUL) << 24) | | |
@@ -148,13 +148,13 b' static uint32_t ntohl(uint32_t x)' | |||||
148 | } |
|
148 | } | |
149 | #else |
|
149 | #else | |
150 | /* not windows */ |
|
150 | /* not windows */ | |
151 |
# |
|
151 | #include <sys/types.h> | |
152 |
# |
|
152 | #if defined __BEOS__ && !defined __HAIKU__ | |
153 |
# |
|
153 | #include <ByteOrder.h> | |
154 |
# |
|
154 | #else | |
155 |
# |
|
155 | #include <arpa/inet.h> | |
156 |
# |
|
156 | #endif | |
157 |
# |
|
157 | #include <inttypes.h> | |
158 | #endif |
|
158 | #endif | |
159 |
|
159 | |||
160 | static PyObject *parse_dirstate(PyObject *self, PyObject *args) |
|
160 | static PyObject *parse_dirstate(PyObject *self, PyObject *args) | |
@@ -303,16 +303,16 b' static int _parse_index_ng (const char *' | |||||
303 | if (n == 0) /* mask out version number for the first entry */ |
|
303 | if (n == 0) /* mask out version number for the first entry */ | |
304 | offset_flags &= 0xFFFF; |
|
304 | offset_flags &= 0xFFFF; | |
305 | else { |
|
305 | else { | |
306 |
uint32_t offset_high = ntohl(*((uint32_t *) |
|
306 | uint32_t offset_high = ntohl(*((uint32_t *)decode)); | |
307 |
offset_flags |= ((uint64_t) |
|
307 | offset_flags |= ((uint64_t)offset_high) << 32; | |
308 | } |
|
308 | } | |
309 |
|
309 | |||
310 |
comp_len = ntohl(*((uint32_t *) |
|
310 | comp_len = ntohl(*((uint32_t *)(decode + 8))); | |
311 |
uncomp_len = ntohl(*((uint32_t *) |
|
311 | uncomp_len = ntohl(*((uint32_t *)(decode + 12))); | |
312 |
base_rev = ntohl(*((uint32_t *) |
|
312 | base_rev = ntohl(*((uint32_t *)(decode + 16))); | |
313 |
link_rev = ntohl(*((uint32_t *) |
|
313 | link_rev = ntohl(*((uint32_t *)(decode + 20))); | |
314 |
parent_1 = ntohl(*((uint32_t *) |
|
314 | parent_1 = ntohl(*((uint32_t *)(decode + 24))); | |
315 |
parent_2 = ntohl(*((uint32_t *) |
|
315 | parent_2 = ntohl(*((uint32_t *)(decode + 28))); | |
316 | c_node_id = decode + 32; |
|
316 | c_node_id = decode + 32; | |
317 |
|
317 | |||
318 | entry = _build_idx_entry(nodemap, n, offset_flags, |
|
318 | entry = _build_idx_entry(nodemap, n, offset_flags, |
@@ -78,7 +78,7 b' def extract(ui, fileobj):' | |||||
78 | if subject.startswith('[PATCH'): |
|
78 | if subject.startswith('[PATCH'): | |
79 | pend = subject.find(']') |
|
79 | pend = subject.find(']') | |
80 | if pend >= 0: |
|
80 | if pend >= 0: | |
81 | subject = subject[pend+1:].lstrip() |
|
81 | subject = subject[pend + 1:].lstrip() | |
82 | subject = subject.replace('\n\t', ' ') |
|
82 | subject = subject.replace('\n\t', ' ') | |
83 | ui.debug('Subject: %s\n' % subject) |
|
83 | ui.debug('Subject: %s\n' % subject) | |
84 | if user: |
|
84 | if user: | |
@@ -472,7 +472,7 b' class patchfile(object):' | |||||
472 | search_start = orig_start + self.skew |
|
472 | search_start = orig_start + self.skew | |
473 |
|
473 | |||
474 | for fuzzlen in xrange(3): |
|
474 | for fuzzlen in xrange(3): | |
475 |
for toponly in [ |
|
475 | for toponly in [True, False]: | |
476 | old = h.old(fuzzlen, toponly) |
|
476 | old = h.old(fuzzlen, toponly) | |
477 |
|
477 | |||
478 | cand = self.findlines(old[0][1:], search_start) |
|
478 | cand = self.findlines(old[0][1:], search_start) | |
@@ -497,7 +497,7 b' class patchfile(object):' | |||||
497 | else: |
|
497 | else: | |
498 | msg = _("Hunk #%d succeeded at %d %s" |
|
498 | msg = _("Hunk #%d succeeded at %d %s" | |
499 | "(offset %d lines).\n") |
|
499 | "(offset %d lines).\n") | |
500 | f(msg % (h.number, l+1, fuzzstr, offset)) |
|
500 | f(msg % (h.number, l + 1, fuzzstr, offset)) | |
501 | return fuzzlen |
|
501 | return fuzzlen | |
502 | self.printfile(True) |
|
502 | self.printfile(True) | |
503 | self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start)) |
|
503 | self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start)) | |
@@ -508,7 +508,7 b' class hunk(object):' | |||||
508 | def __init__(self, desc, num, lr, context, create=False, remove=False): |
|
508 | def __init__(self, desc, num, lr, context, create=False, remove=False): | |
509 | self.number = num |
|
509 | self.number = num | |
510 | self.desc = desc |
|
510 | self.desc = desc | |
511 |
self.hunk = [ |
|
511 | self.hunk = [desc] | |
512 | self.a = [] |
|
512 | self.a = [] | |
513 | self.b = [] |
|
513 | self.b = [] | |
514 | self.starta = self.lena = None |
|
514 | self.starta = self.lena = None | |
@@ -621,7 +621,7 b' class hunk(object):' | |||||
621 | if l.startswith('\ '): |
|
621 | if l.startswith('\ '): | |
622 | s = self.b[-1][:-1] |
|
622 | s = self.b[-1][:-1] | |
623 | self.b[-1] = s |
|
623 | self.b[-1] = s | |
624 | self.hunk[hunki-1] = s |
|
624 | self.hunk[hunki - 1] = s | |
625 | continue |
|
625 | continue | |
626 | if not l: |
|
626 | if not l: | |
627 | lr.push(l) |
|
627 | lr.push(l) | |
@@ -650,7 +650,7 b' class hunk(object):' | |||||
650 | elif h.startswith('-'): |
|
650 | elif h.startswith('-'): | |
651 | continue |
|
651 | continue | |
652 | else: |
|
652 | else: | |
653 | self.hunk.insert(hunki-1, u) |
|
653 | self.hunk.insert(hunki - 1, u) | |
654 | break |
|
654 | break | |
655 |
|
655 | |||
656 | if not self.a: |
|
656 | if not self.a: | |
@@ -689,15 +689,15 b' class hunk(object):' | |||||
689 | top = 0 |
|
689 | top = 0 | |
690 | bot = 0 |
|
690 | bot = 0 | |
691 | hlen = len(self.hunk) |
|
691 | hlen = len(self.hunk) | |
692 | for x in xrange(hlen-1): |
|
692 | for x in xrange(hlen - 1): | |
693 | # the hunk starts with the @@ line, so use x+1 |
|
693 | # the hunk starts with the @@ line, so use x+1 | |
694 | if self.hunk[x+1][0] == ' ': |
|
694 | if self.hunk[x + 1][0] == ' ': | |
695 | top += 1 |
|
695 | top += 1 | |
696 | else: |
|
696 | else: | |
697 | break |
|
697 | break | |
698 | if not toponly: |
|
698 | if not toponly: | |
699 | for x in xrange(hlen-1): |
|
699 | for x in xrange(hlen - 1): | |
700 | if self.hunk[hlen-bot-1][0] == ' ': |
|
700 | if self.hunk[hlen - bot - 1][0] == ' ': | |
701 | bot += 1 |
|
701 | bot += 1 | |
702 | else: |
|
702 | else: | |
703 | break |
|
703 | break | |
@@ -1025,11 +1025,13 b' def applydiff(ui, fp, changed, strip=1, ' | |||||
1025 | afile, bfile, first_hunk = values |
|
1025 | afile, bfile, first_hunk = values | |
1026 | try: |
|
1026 | try: | |
1027 | if sourcefile: |
|
1027 | if sourcefile: | |
1028 |
current_file = patchfile(ui, sourcefile, opener, |
|
1028 | current_file = patchfile(ui, sourcefile, opener, | |
|
1029 | eolmode=eolmode) | |||
1029 | else: |
|
1030 | else: | |
1030 |
current_file, missing = selectfile(afile, bfile, |
|
1031 | current_file, missing = selectfile(afile, bfile, | |
1031 | strip) |
|
1032 | first_hunk, strip) | |
1032 |
current_file = patchfile(ui, current_file, opener, |
|
1033 | current_file = patchfile(ui, current_file, opener, | |
|
1034 | missing, eolmode) | |||
1033 | except PatchError, err: |
|
1035 | except PatchError, err: | |
1034 | ui.warn(str(err) + '\n') |
|
1036 | ui.warn(str(err) + '\n') | |
1035 | current_file, current_hunk = None, None |
|
1037 | current_file, current_hunk = None, None | |
@@ -1196,7 +1198,8 b' def patch(patchname, ui, strip=1, cwd=No' | |||||
1196 | try: |
|
1198 | try: | |
1197 | return internalpatch(patchname, ui, strip, cwd, files, eolmode) |
|
1199 | return internalpatch(patchname, ui, strip, cwd, files, eolmode) | |
1198 | except NoHunks: |
|
1200 | except NoHunks: | |
1199 |
patcher = util.find_exe('gpatch') or util.find_exe('patch') |
|
1201 | patcher = (util.find_exe('gpatch') or util.find_exe('patch') | |
|
1202 | or 'patch') | |||
1200 | ui.debug('no valid hunks found; trying with %r instead\n' % |
|
1203 | ui.debug('no valid hunks found; trying with %r instead\n' % | |
1201 | patcher) |
|
1204 | patcher) | |
1202 | if util.needbinarypatch(): |
|
1205 | if util.needbinarypatch(): | |
@@ -1232,7 +1235,7 b' def b85diff(to, tn):' | |||||
1232 | l = len(text) |
|
1235 | l = len(text) | |
1233 | i = 0 |
|
1236 | i = 0 | |
1234 | while i < l: |
|
1237 | while i < l: | |
1235 | yield text[i:i+csize] |
|
1238 | yield text[i:i + csize] | |
1236 | i += csize |
|
1239 | i += csize | |
1237 |
|
1240 | |||
1238 | tohash = gitindex(to) |
|
1241 | tohash = gitindex(to) | |
@@ -1460,7 +1463,7 b" def export(repo, revs, template='hg-%h.p" | |||||
1460 | fp.write(chunk) |
|
1463 | fp.write(chunk) | |
1461 |
|
1464 | |||
1462 | for seqno, rev in enumerate(revs): |
|
1465 | for seqno, rev in enumerate(revs): | |
1463 | single(rev, seqno+1, fp) |
|
1466 | single(rev, seqno + 1, fp) | |
1464 |
|
1467 | |||
1465 | def diffstatdata(lines): |
|
1468 | def diffstatdata(lines): | |
1466 | filename, adds, removes = None, 0, 0 |
|
1469 | filename, adds, removes = None, 0, 0 | |
@@ -1495,7 +1498,7 b' def diffstat(lines, width=80, git=False)' | |||||
1495 | totaladds += adds |
|
1498 | totaladds += adds | |
1496 | totalremoves += removes |
|
1499 | totalremoves += removes | |
1497 | maxname = max(maxname, len(filename)) |
|
1500 | maxname = max(maxname, len(filename)) | |
1498 | maxtotal = max(maxtotal, adds+removes) |
|
1501 | maxtotal = max(maxtotal, adds + removes) | |
1499 | if isbinary: |
|
1502 | if isbinary: | |
1500 | hasbinary = True |
|
1503 | hasbinary = True | |
1501 |
|
1504 |
@@ -48,7 +48,7 b' def b85decode(text):' | |||||
48 | l = len(text) |
|
48 | l = len(text) | |
49 | out = [] |
|
49 | out = [] | |
50 | for i in range(0, len(text), 5): |
|
50 | for i in range(0, len(text), 5): | |
51 | chunk = text[i:i+5] |
|
51 | chunk = text[i:i + 5] | |
52 | acc = 0 |
|
52 | acc = 0 | |
53 | for j, c in enumerate(chunk): |
|
53 | for j, c in enumerate(chunk): | |
54 | try: |
|
54 | try: |
@@ -33,13 +33,15 b' def _normalizeblocks(a, b, blocks):' | |||||
33 | a2end = a2 + l2 |
|
33 | a2end = a2 + l2 | |
34 | b2end = b2 + l2 |
|
34 | b2end = b2 + l2 | |
35 | if a1end == a2: |
|
35 | if a1end == a2: | |
36 |
while a1end+shift < a2end and |
|
36 | while (a1end + shift < a2end and | |
|
37 | a[a1end + shift] == b[b1end + shift]): | |||
37 | shift += 1 |
|
38 | shift += 1 | |
38 | elif b1end == b2: |
|
39 | elif b1end == b2: | |
39 |
while b1end+shift < b2end and |
|
40 | while (b1end + shift < b2end and | |
|
41 | a[a1end + shift] == b[b1end + shift]): | |||
40 | shift += 1 |
|
42 | shift += 1 | |
41 | yield a1, b1, l1+shift |
|
43 | yield a1, b1, l1 + shift | |
42 | prev = a2+shift, b2+shift, l2-shift |
|
44 | prev = a2 + shift, b2 + shift, l2 - shift | |
43 | yield prev |
|
45 | yield prev | |
44 |
|
46 | |||
45 | def bdiff(a, b): |
|
47 | def bdiff(a, b): |
@@ -22,7 +22,8 b' except ImportError:' | |||||
22 | # temporary string buffers. |
|
22 | # temporary string buffers. | |
23 |
|
23 | |||
24 | def patches(a, bins): |
|
24 | def patches(a, bins): | |
25 |
if not bins: |
|
25 | if not bins: | |
|
26 | return a | |||
26 |
|
27 | |||
27 | plens = [len(x) for x in bins] |
|
28 | plens = [len(x) for x in bins] | |
28 | pl = sum(plens) |
|
29 | pl = sum(plens) | |
@@ -30,7 +31,8 b' def patches(a, bins):' | |||||
30 | tl = bl + bl + pl # enough for the patches and two working texts |
|
31 | tl = bl + bl + pl # enough for the patches and two working texts | |
31 | b1, b2 = 0, bl |
|
32 | b1, b2 = 0, bl | |
32 |
|
33 | |||
33 |
if not tl: |
|
34 | if not tl: | |
|
35 | return a | |||
34 |
|
36 | |||
35 | m = StringIO() |
|
37 | m = StringIO() | |
36 | def move(dest, src, count): |
|
38 | def move(dest, src, count): |
@@ -11,13 +11,20 b' import stat as _stat' | |||||
11 | posixfile = open |
|
11 | posixfile = open | |
12 |
|
12 | |||
13 | def _mode_to_kind(mode): |
|
13 | def _mode_to_kind(mode): | |
14 |
if _stat.S_ISREG(mode): |
|
14 | if _stat.S_ISREG(mode): | |
15 |
|
|
15 | return _stat.S_IFREG | |
16 |
if _stat.S_IS |
|
16 | if _stat.S_ISDIR(mode): | |
17 |
|
|
17 | return _stat.S_IFDIR | |
18 |
if _stat.S_IS |
|
18 | if _stat.S_ISLNK(mode): | |
19 | if _stat.S_ISFIFO(mode): return _stat.S_IFIFO |
|
19 | return _stat.S_IFLNK | |
20 |
if _stat.S_IS |
|
20 | if _stat.S_ISBLK(mode): | |
|
21 | return _stat.S_IFBLK | |||
|
22 | if _stat.S_ISCHR(mode): | |||
|
23 | return _stat.S_IFCHR | |||
|
24 | if _stat.S_ISFIFO(mode): | |||
|
25 | return _stat.S_IFIFO | |||
|
26 | if _stat.S_ISSOCK(mode): | |||
|
27 | return _stat.S_IFSOCK | |||
21 | return mode |
|
28 | return mode | |
22 |
|
29 | |||
23 | def listdir(path, stat=False, skip=None): |
|
30 | def listdir(path, stat=False, skip=None): |
@@ -127,7 +127,7 b' class lazyparser(object):' | |||||
127 | self.dataf = dataf |
|
127 | self.dataf = dataf | |
128 | self.s = struct.calcsize(indexformatng) |
|
128 | self.s = struct.calcsize(indexformatng) | |
129 | self.datasize = size |
|
129 | self.datasize = size | |
130 | self.l = size/self.s |
|
130 | self.l = size / self.s | |
131 | self.index = [None] * self.l |
|
131 | self.index = [None] * self.l | |
132 | self.map = {nullid: nullrev} |
|
132 | self.map = {nullid: nullrev} | |
133 | self.allmap = 0 |
|
133 | self.allmap = 0 | |
@@ -902,7 +902,7 b' class revlog(object):' | |||||
902 | try: |
|
902 | try: | |
903 | # hex(node)[:...] |
|
903 | # hex(node)[:...] | |
904 | l = len(id) // 2 # grab an even number of digits |
|
904 | l = len(id) // 2 # grab an even number of digits | |
905 | bin_id = bin(id[:l*2]) |
|
905 | bin_id = bin(id[:l * 2]) | |
906 | nl = [n for n in self.nodemap if n[:l] == bin_id] |
|
906 | nl = [n for n in self.nodemap if n[:l] == bin_id] | |
907 | nl = [n for n in nl if hex(n).startswith(id)] |
|
907 | nl = [n for n in nl if hex(n).startswith(id)] | |
908 | if len(nl) > 0: |
|
908 | if len(nl) > 0: | |
@@ -1403,7 +1403,7 b' class revlog(object):' | |||||
1403 | return (dd, di) |
|
1403 | return (dd, di) | |
1404 |
|
1404 | |||
1405 | def files(self): |
|
1405 | def files(self): | |
1406 |
res = [ |
|
1406 | res = [self.indexfile] | |
1407 | if not self._inline: |
|
1407 | if not self._inline: | |
1408 | res.append(self.datafile) |
|
1408 | res.append(self.datafile) | |
1409 | return res |
|
1409 | return res |
@@ -50,7 +50,7 b' def intersect(ra, rb):' | |||||
50 | def compare_range(a, astart, aend, b, bstart, bend): |
|
50 | def compare_range(a, astart, aend, b, bstart, bend): | |
51 | """Compare a[astart:aend] == b[bstart:bend], without slicing. |
|
51 | """Compare a[astart:aend] == b[bstart:bend], without slicing. | |
52 | """ |
|
52 | """ | |
53 | if (aend-astart) != (bend-bstart): |
|
53 | if (aend - astart) != (bend - bstart): | |
54 | return False |
|
54 | return False | |
55 | for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)): |
|
55 | for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)): | |
56 | if a[ia] != b[ib]: |
|
56 | if a[ia] != b[ib]: | |
@@ -303,7 +303,7 b' class Merge3Text(object):' | |||||
303 | region_ib) |
|
303 | region_ib) | |
304 | if reg is not None: |
|
304 | if reg is not None: | |
305 | yield reg |
|
305 | yield reg | |
306 | yield 'same', region_ia, region_len+region_ia |
|
306 | yield 'same', region_ia, region_len + region_ia | |
307 | next_a = region_ia + region_len |
|
307 | next_a = region_ia + region_len | |
308 | next_b = region_ib + region_len |
|
308 | next_b = region_ib + region_len | |
309 | reg = self.mismatch_region(next_a, amatch, next_b, bmatch) |
|
309 | reg = self.mismatch_region(next_a, amatch, next_b, bmatch) | |
@@ -336,7 +336,7 b' class Merge3Text(object):' | |||||
336 |
|
336 | |||
337 | # there is an unconflicted block at i; how long does it |
|
337 | # there is an unconflicted block at i; how long does it | |
338 | # extend? until whichever one ends earlier. |
|
338 | # extend? until whichever one ends earlier. | |
339 | i = intersect((abase, abase+alen), (bbase, bbase+blen)) |
|
339 | i = intersect((abase, abase + alen), (bbase, bbase + blen)) | |
340 | if i: |
|
340 | if i: | |
341 | intbase = i[0] |
|
341 | intbase = i[0] | |
342 | intend = i[1] |
|
342 | intend = i[1] |
@@ -90,9 +90,11 b' class sshrepository(repo.repository):' | |||||
90 | def readerr(self): |
|
90 | def readerr(self): | |
91 | while 1: |
|
91 | while 1: | |
92 | size = util.fstat(self.pipee).st_size |
|
92 | size = util.fstat(self.pipee).st_size | |
93 |
if size == 0: |
|
93 | if size == 0: | |
|
94 | break | |||
94 | l = self.pipee.readline() |
|
95 | l = self.pipee.readline() | |
95 |
if not l: |
|
96 | if not l: | |
|
97 | break | |||
96 | self.ui.status(_("remote: "), l) |
|
98 | self.ui.status(_("remote: "), l) | |
97 |
|
99 | |||
98 | def abort(self, exception): |
|
100 | def abort(self, exception): | |
@@ -190,7 +192,7 b' class sshrepository(repo.repository):' | |||||
190 | n = " ".join(map(hex, nodes)) |
|
192 | n = " ".join(map(hex, nodes)) | |
191 | d = self.call("branches", nodes=n) |
|
193 | d = self.call("branches", nodes=n) | |
192 | try: |
|
194 | try: | |
193 |
br = [ |
|
195 | br = [tuple(map(bin, b.split(" "))) for b in d.splitlines()] | |
194 | return br |
|
196 | return br | |
195 | except: |
|
197 | except: | |
196 | self.abort(error.ResponseError(_("unexpected response:"), d)) |
|
198 | self.abort(error.ResponseError(_("unexpected response:"), d)) | |
@@ -199,7 +201,7 b' class sshrepository(repo.repository):' | |||||
199 | n = " ".join(["-".join(map(hex, p)) for p in pairs]) |
|
201 | n = " ".join(["-".join(map(hex, p)) for p in pairs]) | |
200 | d = self.call("between", pairs=n) |
|
202 | d = self.call("between", pairs=n) | |
201 | try: |
|
203 | try: | |
202 |
p = [ |
|
204 | p = [l and map(bin, l.split(" ")) or [] for l in d.splitlines()] | |
203 | return p |
|
205 | return p | |
204 | except: |
|
206 | except: | |
205 | self.abort(error.ResponseError(_("unexpected response:"), d)) |
|
207 | self.abort(error.ResponseError(_("unexpected response:"), d)) |
@@ -42,7 +42,8 b' class sshserver(object):' | |||||
42 |
|
42 | |||
43 | def serve_forever(self): |
|
43 | def serve_forever(self): | |
44 | try: |
|
44 | try: | |
45 |
while self.serve_one(): |
|
45 | while self.serve_one(): | |
|
46 | pass | |||
46 | finally: |
|
47 | finally: | |
47 | if self.lock is not None: |
|
48 | if self.lock is not None: | |
48 | self.lock.release() |
|
49 | self.lock.release() | |
@@ -52,7 +53,8 b' class sshserver(object):' | |||||
52 | cmd = self.fin.readline()[:-1] |
|
53 | cmd = self.fin.readline()[:-1] | |
53 | if cmd: |
|
54 | if cmd: | |
54 | impl = getattr(self, 'do_' + cmd, None) |
|
55 | impl = getattr(self, 'do_' + cmd, None) | |
55 |
if impl: |
|
56 | if impl: | |
|
57 | impl() | |||
56 | else: self.respond("") |
|
58 | else: self.respond("") | |
57 | return cmd != '' |
|
59 | return cmd != '' | |
58 |
|
60 |
@@ -32,7 +32,7 b' def decodedir(path):' | |||||
32 | def _buildencodefun(): |
|
32 | def _buildencodefun(): | |
33 | e = '_' |
|
33 | e = '_' | |
34 | win_reserved = [ord(x) for x in '\\:*?"<>|'] |
|
34 | win_reserved = [ord(x) for x in '\\:*?"<>|'] | |
35 |
cmap = dict([ |
|
35 | cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) | |
36 | for x in (range(32) + range(126, 256) + win_reserved): |
|
36 | for x in (range(32) + range(126, 256) + win_reserved): | |
37 | cmap[chr(x)] = "~%02x" % x |
|
37 | cmap[chr(x)] = "~%02x" % x | |
38 | for x in range(ord("A"), ord("Z")+1) + [ord(e)]: |
|
38 | for x in range(ord("A"), ord("Z")+1) + [ord(e)]: | |
@@ -45,7 +45,7 b' def _buildencodefun():' | |||||
45 | while i < len(s): |
|
45 | while i < len(s): | |
46 | for l in xrange(1, 4): |
|
46 | for l in xrange(1, 4): | |
47 | try: |
|
47 | try: | |
48 | yield dmap[s[i:i+l]] |
|
48 | yield dmap[s[i:i + l]] | |
49 | i += l |
|
49 | i += l | |
50 | break |
|
50 | break | |
51 | except KeyError: |
|
51 | except KeyError: | |
@@ -59,7 +59,7 b' encodefilename, decodefilename = _builde' | |||||
59 |
|
59 | |||
60 | def _build_lower_encodefun(): |
|
60 | def _build_lower_encodefun(): | |
61 | win_reserved = [ord(x) for x in '\\:*?"<>|'] |
|
61 | win_reserved = [ord(x) for x in '\\:*?"<>|'] | |
62 |
cmap = dict([ |
|
62 | cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) | |
63 | for x in (range(32) + range(126, 256) + win_reserved): |
|
63 | for x in (range(32) + range(126, 256) + win_reserved): | |
64 | cmap[chr(x)] = "~%02x" % x |
|
64 | cmap[chr(x)] = "~%02x" % x | |
65 | for x in range(ord("A"), ord("Z")+1): |
|
65 | for x in range(ord("A"), ord("Z")+1): |
@@ -64,7 +64,8 b' def fill(text, width):' | |||||
64 | m = para_re.search(text, start) |
|
64 | m = para_re.search(text, start) | |
65 | if not m: |
|
65 | if not m: | |
66 | w = len(text) |
|
66 | w = len(text) | |
67 |
while w > start and text[w-1].isspace(): |
|
67 | while w > start and text[w - 1].isspace(): | |
|
68 | w -= 1 | |||
68 | yield text[start:w], text[w:] |
|
69 | yield text[start:w], text[w:] | |
69 | break |
|
70 | break | |
70 | yield text[start:m.start(0)], m.group(1) |
|
71 | yield text[start:m.start(0)], m.group(1) | |
@@ -91,17 +92,21 b' def obfuscate(text):' | |||||
91 | def domain(author): |
|
92 | def domain(author): | |
92 | '''get domain of author, or empty string if none.''' |
|
93 | '''get domain of author, or empty string if none.''' | |
93 | f = author.find('@') |
|
94 | f = author.find('@') | |
94 |
if f == -1: |
|
95 | if f == -1: | |
95 | author = author[f+1:] |
|
96 | return '' | |
|
97 | author = author[f + 1:] | |||
96 | f = author.find('>') |
|
98 | f = author.find('>') | |
97 | if f >= 0: author = author[:f] |
|
99 | if f >= 0: | |
|
100 | author = author[:f] | |||
98 | return author |
|
101 | return author | |
99 |
|
102 | |||
100 | def person(author): |
|
103 | def person(author): | |
101 | '''get name of author, or else username.''' |
|
104 | '''get name of author, or else username.''' | |
102 |
if not '@' in author: |
|
105 | if not '@' in author: | |
|
106 | return author | |||
103 | f = author.find('<') |
|
107 | f = author.find('<') | |
104 | if f == -1: return util.shortuser(author) |
|
108 | if f == -1: | |
|
109 | return util.shortuser(author) | |||
105 | return author[:f].rstrip() |
|
110 | return author[:f].rstrip() | |
106 |
|
111 | |||
107 | def indent(text, prefix): |
|
112 | def indent(text, prefix): |
@@ -29,7 +29,8 b' def showlist(name, values, plural=None, ' | |||||
29 | expand 'end_foos'. |
|
29 | expand 'end_foos'. | |
30 | ''' |
|
30 | ''' | |
31 | templ = args['templ'] |
|
31 | templ = args['templ'] | |
32 |
if plural: |
|
32 | if plural: | |
|
33 | names = plural | |||
33 | else: names = name + 's' |
|
34 | else: names = name + 's' | |
34 | if not values: |
|
35 | if not values: | |
35 | noname = 'no_' + names |
|
36 | noname = 'no_' + names | |
@@ -188,7 +189,7 b' def showfilecopies(**args):' | |||||
188 | rename = getrenamed(fn, ctx.rev()) |
|
189 | rename = getrenamed(fn, ctx.rev()) | |
189 | if rename: |
|
190 | if rename: | |
190 | copies.append((fn, rename[0])) |
|
191 | copies.append((fn, rename[0])) | |
191 |
|
192 | |||
192 | c = [{'name': x[0], 'source': x[1]} for x in copies] |
|
193 | c = [{'name': x[0], 'source': x[1]} for x in copies] | |
193 | return showlist('file_copy', c, plural='file_copies', **args) |
|
194 | return showlist('file_copy', c, plural='file_copies', **args) | |
194 |
|
195 |
@@ -77,7 +77,7 b' class engine(object):' | |||||
77 | raise SyntaxError(_("error expanding '%s%%%s'") % (key, format)) |
|
77 | raise SyntaxError(_("error expanding '%s%%%s'") % (key, format)) | |
78 | lm = map.copy() |
|
78 | lm = map.copy() | |
79 | for i in v: |
|
79 | for i in v: | |
80 |
if isinstance(i, dict): |
|
80 | if isinstance(i, dict): | |
81 | lm.update(i) |
|
81 | lm.update(i) | |
82 | yield self.process(format, lm) |
|
82 | yield self.process(format, lm) | |
83 | else: |
|
83 | else: | |
@@ -244,7 +244,6 b' def stylemap(styles, paths=None):' | |||||
244 | styles = [styles] |
|
244 | styles = [styles] | |
245 |
|
245 | |||
246 | for style in styles: |
|
246 | for style in styles: | |
247 |
|
||||
248 | if not style: |
|
247 | if not style: | |
249 | continue |
|
248 | continue | |
250 | locations = [os.path.join(style, 'map'), 'map-' + style] |
|
249 | locations = [os.path.join(style, 'map'), 'map-' + style] |
@@ -73,8 +73,8 b' class transaction(object):' | |||||
73 |
|
73 | |||
74 | @active |
|
74 | @active | |
75 | def add(self, file, offset, data=None): |
|
75 | def add(self, file, offset, data=None): | |
76 |
if file in self.map: |
|
76 | if file in self.map: | |
77 |
|
77 | return | ||
78 | if self._queue: |
|
78 | if self._queue: | |
79 | self._queue[-1].append((file, offset, data)) |
|
79 | self._queue[-1].append((file, offset, data)) | |
80 | return |
|
80 | return | |
@@ -147,7 +147,8 b' class transaction(object):' | |||||
147 | self.report(_("transaction abort!\n")) |
|
147 | self.report(_("transaction abort!\n")) | |
148 |
|
148 | |||
149 | try: |
|
149 | try: | |
150 |
_playback(self.journal, self.report, self.opener, |
|
150 | _playback(self.journal, self.report, self.opener, | |
|
151 | self.entries, False) | |||
151 | self.report(_("rollback completed\n")) |
|
152 | self.report(_("rollback completed\n")) | |
152 | except: |
|
153 | except: | |
153 | self.report(_("rollback failed - please run hg recover\n")) |
|
154 | self.report(_("rollback failed - please run hg recover\n")) |
@@ -198,7 +198,8 b' class ui(object):' | |||||
198 |
|
198 | |||
199 | def shortuser(self, user): |
|
199 | def shortuser(self, user): | |
200 | """Return a short representation of a user name or email address.""" |
|
200 | """Return a short representation of a user name or email address.""" | |
201 |
if not self.verbose: |
|
201 | if not self.verbose: | |
|
202 | user = util.shortuser(user) | |||
202 | return user |
|
203 | return user | |
203 |
|
204 | |||
204 | def _path(self, loc): |
|
205 | def _path(self, loc): | |
@@ -236,12 +237,14 b' class ui(object):' | |||||
236 |
|
237 | |||
237 | def write_err(self, *args): |
|
238 | def write_err(self, *args): | |
238 | try: |
|
239 | try: | |
239 |
if not sys.stdout.closed: |
|
240 | if not sys.stdout.closed: | |
|
241 | sys.stdout.flush() | |||
240 | for a in args: |
|
242 | for a in args: | |
241 | sys.stderr.write(str(a)) |
|
243 | sys.stderr.write(str(a)) | |
242 | # stderr may be buffered under win32 when redirected to files, |
|
244 | # stderr may be buffered under win32 when redirected to files, | |
243 | # including stdout. |
|
245 | # including stdout. | |
244 |
if not sys.stderr.closed: |
|
246 | if not sys.stderr.closed: | |
|
247 | sys.stderr.flush() | |||
245 | except IOError, inst: |
|
248 | except IOError, inst: | |
246 | if inst.errno != errno.EPIPE: |
|
249 | if inst.errno != errno.EPIPE: | |
247 | raise |
|
250 | raise | |
@@ -311,21 +314,24 b' class ui(object):' | |||||
311 | return resps.index(r.lower()) |
|
314 | return resps.index(r.lower()) | |
312 | self.write(_("unrecognized response\n")) |
|
315 | self.write(_("unrecognized response\n")) | |
313 |
|
316 | |||
314 |
|
||||
315 | def getpass(self, prompt=None, default=None): |
|
317 | def getpass(self, prompt=None, default=None): | |
316 |
if not self.interactive(): |
|
318 | if not self.interactive(): | |
|
319 | return default | |||
317 | try: |
|
320 | try: | |
318 | return getpass.getpass(prompt or _('password: ')) |
|
321 | return getpass.getpass(prompt or _('password: ')) | |
319 | except EOFError: |
|
322 | except EOFError: | |
320 | raise util.Abort(_('response expected')) |
|
323 | raise util.Abort(_('response expected')) | |
321 | def status(self, *msg): |
|
324 | def status(self, *msg): | |
322 |
if not self.quiet: |
|
325 | if not self.quiet: | |
|
326 | self.write(*msg) | |||
323 | def warn(self, *msg): |
|
327 | def warn(self, *msg): | |
324 | self.write_err(*msg) |
|
328 | self.write_err(*msg) | |
325 | def note(self, *msg): |
|
329 | def note(self, *msg): | |
326 |
if self.verbose: |
|
330 | if self.verbose: | |
|
331 | self.write(*msg) | |||
327 | def debug(self, *msg): |
|
332 | def debug(self, *msg): | |
328 |
if self.debugflag: |
|
333 | if self.debugflag: | |
|
334 | self.write(*msg) | |||
329 | def edit(self, text, user): |
|
335 | def edit(self, text, user): | |
330 | (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt", |
|
336 | (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt", | |
331 | text=True) |
|
337 | text=True) |
@@ -30,18 +30,18 b' def netlocsplit(netloc):' | |||||
30 | if a == -1: |
|
30 | if a == -1: | |
31 | user, passwd = None, None |
|
31 | user, passwd = None, None | |
32 | else: |
|
32 | else: | |
33 | userpass, netloc = netloc[:a], netloc[a+1:] |
|
33 | userpass, netloc = netloc[:a], netloc[a + 1:] | |
34 | c = userpass.find(':') |
|
34 | c = userpass.find(':') | |
35 | if c == -1: |
|
35 | if c == -1: | |
36 | user, passwd = urllib.unquote(userpass), None |
|
36 | user, passwd = urllib.unquote(userpass), None | |
37 | else: |
|
37 | else: | |
38 | user = urllib.unquote(userpass[:c]) |
|
38 | user = urllib.unquote(userpass[:c]) | |
39 | passwd = urllib.unquote(userpass[c+1:]) |
|
39 | passwd = urllib.unquote(userpass[c + 1:]) | |
40 | c = netloc.find(':') |
|
40 | c = netloc.find(':') | |
41 | if c == -1: |
|
41 | if c == -1: | |
42 | host, port = netloc, None |
|
42 | host, port = netloc, None | |
43 | else: |
|
43 | else: | |
44 | host, port = netloc[:c], netloc[c+1:] |
|
44 | host, port = netloc[:c], netloc[c + 1:] | |
45 | return host, port, user, passwd |
|
45 | return host, port, user, passwd | |
46 |
|
46 | |||
47 | def netlocunsplit(host, port, user=None, passwd=None): |
|
47 | def netlocunsplit(host, port, user=None, passwd=None): | |
@@ -89,7 +89,8 b' def quotepath(path):' | |||||
89 | l = list(path) |
|
89 | l = list(path) | |
90 | for i in xrange(len(l)): |
|
90 | for i in xrange(len(l)): | |
91 | c = l[i] |
|
91 | c = l[i] | |
92 |
if c == '%' and i + 2 < len(l) and |
|
92 | if (c == '%' and i + 2 < len(l) and | |
|
93 | l[i + 1] in _hex and l[i + 2] in _hex): | |||
93 | pass |
|
94 | pass | |
94 | elif c not in _safeset: |
|
95 | elif c not in _safeset: | |
95 | l[i] = '%%%02X' % ord(c) |
|
96 | l[i] = '%%%02X' % ord(c) | |
@@ -148,7 +149,8 b' class passwordmgr(urllib2.HTTPPasswordMg' | |||||
148 | bestauth = None |
|
149 | bestauth = None | |
149 | for auth in config.itervalues(): |
|
150 | for auth in config.itervalues(): | |
150 | prefix = auth.get('prefix') |
|
151 | prefix = auth.get('prefix') | |
151 |
if not prefix: |
|
152 | if not prefix: | |
|
153 | continue | |||
152 | p = prefix.split('://', 1) |
|
154 | p = prefix.split('://', 1) | |
153 | if len(p) > 1: |
|
155 | if len(p) > 1: | |
154 | schemes, prefix = [p[0]], p[1] |
|
156 | schemes, prefix = [p[0]], p[1] | |
@@ -180,7 +182,7 b' class proxyhandler(urllib2.ProxyHandler)' | |||||
180 | proxypasswd = ui.config("http_proxy", "passwd") |
|
182 | proxypasswd = ui.config("http_proxy", "passwd") | |
181 |
|
183 | |||
182 | # see if we should use a proxy for this url |
|
184 | # see if we should use a proxy for this url | |
183 |
no_list = [ |
|
185 | no_list = ["localhost", "127.0.0.1"] | |
184 | no_list.extend([p.lower() for |
|
186 | no_list.extend([p.lower() for | |
185 | p in ui.configlist("http_proxy", "no")]) |
|
187 | p in ui.configlist("http_proxy", "no")]) | |
186 | no_list.extend([p.strip().lower() for |
|
188 | no_list.extend([p.strip().lower() for | |
@@ -436,7 +438,8 b' if has_https:' | |||||
436 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
438 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
437 | self.sock.connect((self.host, self.port)) |
|
439 | self.sock.connect((self.host, self.port)) | |
438 | if _generic_proxytunnel(self): |
|
440 | if _generic_proxytunnel(self): | |
439 |
self.sock = _ssl_wrap_socket(self.sock, self.cert_file, |
|
441 | self.sock = _ssl_wrap_socket(self.sock, self.cert_file, | |
|
442 | self.key_file) | |||
440 | else: |
|
443 | else: | |
441 | BetterHTTPS.connect(self) |
|
444 | BetterHTTPS.connect(self) | |
442 |
|
445 |
@@ -182,16 +182,21 b' def tempfilter(s, cmd):' | |||||
182 | code = os.system(cmd) |
|
182 | code = os.system(cmd) | |
183 | if sys.platform == 'OpenVMS' and code & 1: |
|
183 | if sys.platform == 'OpenVMS' and code & 1: | |
184 | code = 0 |
|
184 | code = 0 | |
185 | if code: raise Abort(_("command '%s' failed: %s") % |
|
185 | if code: | |
186 | (cmd, explain_exit(code))) |
|
186 | raise Abort(_("command '%s' failed: %s") % | |
|
187 | (cmd, explain_exit(code))) | |||
187 | return open(outname, 'rb').read() |
|
188 | return open(outname, 'rb').read() | |
188 | finally: |
|
189 | finally: | |
189 | try: |
|
190 | try: | |
190 |
if inname: |
|
191 | if inname: | |
191 | except: pass |
|
192 | os.unlink(inname) | |
|
193 | except: | |||
|
194 | pass | |||
192 | try: |
|
195 | try: | |
193 |
if outname: |
|
196 | if outname: | |
194 | except: pass |
|
197 | os.unlink(outname) | |
|
198 | except: | |||
|
199 | pass | |||
195 |
|
200 | |||
196 | filtertable = { |
|
201 | filtertable = { | |
197 | 'tempfile:': tempfilter, |
|
202 | 'tempfile:': tempfilter, | |
@@ -242,8 +247,11 b' def increasingchunks(source, min=1024, m' | |||||
242 |
|
247 | |||
243 | Abort = error.Abort |
|
248 | Abort = error.Abort | |
244 |
|
249 | |||
245 |
def always(fn): |
|
250 | def always(fn): | |
246 | def never(fn): return False |
|
251 | return True | |
|
252 | ||||
|
253 | def never(fn): | |||
|
254 | return False | |||
247 |
|
255 | |||
248 | def pathto(root, n1, n2): |
|
256 | def pathto(root, n1, n2): | |
249 | '''return the relative path from one place to another. |
|
257 | '''return the relative path from one place to another. | |
@@ -256,7 +264,8 b' def pathto(root, n1, n2):' | |||||
256 | relative to root. |
|
264 | relative to root. | |
257 | n2 should always be relative to root. |
|
265 | n2 should always be relative to root. | |
258 | ''' |
|
266 | ''' | |
259 | if not n1: return localpath(n2) |
|
267 | if not n1: | |
|
268 | return localpath(n2) | |||
260 | if os.path.isabs(n1): |
|
269 | if os.path.isabs(n1): | |
261 | if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: |
|
270 | if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: | |
262 | return os.path.join(root, localpath(n2)) |
|
271 | return os.path.join(root, localpath(n2)) | |
@@ -912,11 +921,15 b' def filechunkiter(f, size=65536, limit=N' | |||||
912 | assert size >= 0 |
|
921 | assert size >= 0 | |
913 | assert limit is None or limit >= 0 |
|
922 | assert limit is None or limit >= 0 | |
914 | while True: |
|
923 | while True: | |
915 |
if limit is None: |
|
924 | if limit is None: | |
916 |
|
|
925 | nbytes = size | |
|
926 | else: | |||
|
927 | nbytes = min(limit, size) | |||
917 | s = nbytes and f.read(nbytes) |
|
928 | s = nbytes and f.read(nbytes) | |
918 |
if not s: |
|
929 | if not s: | |
919 | if limit: limit -= len(s) |
|
930 | break | |
|
931 | if limit: | |||
|
932 | limit -= len(s) | |||
920 | yield s |
|
933 | yield s | |
921 |
|
934 | |||
922 | def makedate(): |
|
935 | def makedate(): | |
@@ -1084,7 +1097,7 b' def shortuser(user):' | |||||
1084 | user = user[:f] |
|
1097 | user = user[:f] | |
1085 | f = user.find('<') |
|
1098 | f = user.find('<') | |
1086 | if f >= 0: |
|
1099 | if f >= 0: | |
1087 | user = user[f+1:] |
|
1100 | user = user[f + 1:] | |
1088 | f = user.find(' ') |
|
1101 | f = user.find(' ') | |
1089 | if f >= 0: |
|
1102 | if f >= 0: | |
1090 | user = user[:f] |
|
1103 | user = user[:f] | |
@@ -1096,15 +1109,16 b' def shortuser(user):' | |||||
1096 | def email(author): |
|
1109 | def email(author): | |
1097 | '''get email of author.''' |
|
1110 | '''get email of author.''' | |
1098 | r = author.find('>') |
|
1111 | r = author.find('>') | |
1099 |
if r == -1: |
|
1112 | if r == -1: | |
1100 | return author[author.find('<')+1:r] |
|
1113 | r = None | |
|
1114 | return author[author.find('<') + 1:r] | |||
1101 |
|
1115 | |||
1102 | def ellipsis(text, maxlength=400): |
|
1116 | def ellipsis(text, maxlength=400): | |
1103 | """Trim string to at most maxlength (default: 400) characters.""" |
|
1117 | """Trim string to at most maxlength (default: 400) characters.""" | |
1104 | if len(text) <= maxlength: |
|
1118 | if len(text) <= maxlength: | |
1105 | return text |
|
1119 | return text | |
1106 | else: |
|
1120 | else: | |
1107 | return "%s..." % (text[:maxlength-3]) |
|
1121 | return "%s..." % (text[:maxlength - 3]) | |
1108 |
|
1122 | |||
1109 | def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): |
|
1123 | def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): | |
1110 | '''yield every hg repository under path, recursively.''' |
|
1124 | '''yield every hg repository under path, recursively.''' | |
@@ -1173,7 +1187,8 b' def rcpath():' | |||||
1173 | if 'HGRCPATH' in os.environ: |
|
1187 | if 'HGRCPATH' in os.environ: | |
1174 | _rcpath = [] |
|
1188 | _rcpath = [] | |
1175 | for p in os.environ['HGRCPATH'].split(os.pathsep): |
|
1189 | for p in os.environ['HGRCPATH'].split(os.pathsep): | |
1176 |
if not p: |
|
1190 | if not p: | |
|
1191 | continue | |||
1177 | p = expandpath(p) |
|
1192 | p = expandpath(p) | |
1178 | if os.path.isdir(p): |
|
1193 | if os.path.isdir(p): | |
1179 | for f, kind in osutil.listdir(p): |
|
1194 | for f, kind in osutil.listdir(p): | |
@@ -1189,15 +1204,15 b' def bytecount(nbytes):' | |||||
1189 | '''return byte count formatted as readable string, with units''' |
|
1204 | '''return byte count formatted as readable string, with units''' | |
1190 |
|
1205 | |||
1191 | units = ( |
|
1206 | units = ( | |
1192 | (100, 1<<30, _('%.0f GB')), |
|
1207 | (100, 1 << 30, _('%.0f GB')), | |
1193 | (10, 1<<30, _('%.1f GB')), |
|
1208 | (10, 1 << 30, _('%.1f GB')), | |
1194 | (1, 1<<30, _('%.2f GB')), |
|
1209 | (1, 1 << 30, _('%.2f GB')), | |
1195 | (100, 1<<20, _('%.0f MB')), |
|
1210 | (100, 1 << 20, _('%.0f MB')), | |
1196 | (10, 1<<20, _('%.1f MB')), |
|
1211 | (10, 1 << 20, _('%.1f MB')), | |
1197 | (1, 1<<20, _('%.2f MB')), |
|
1212 | (1, 1 << 20, _('%.2f MB')), | |
1198 | (100, 1<<10, _('%.0f KB')), |
|
1213 | (100, 1 << 10, _('%.0f KB')), | |
1199 | (10, 1<<10, _('%.1f KB')), |
|
1214 | (10, 1 << 10, _('%.1f KB')), | |
1200 | (1, 1<<10, _('%.2f KB')), |
|
1215 | (1, 1 << 10, _('%.2f KB')), | |
1201 | (1, 1, _('%.0f bytes')), |
|
1216 | (1, 1, _('%.0f bytes')), | |
1202 | ) |
|
1217 | ) | |
1203 |
|
1218 | |||
@@ -1219,7 +1234,7 b' def drop_scheme(scheme, path):' | |||||
1219 | # root. On POSIX they are rooted at the file system root. |
|
1234 | # root. On POSIX they are rooted at the file system root. | |
1220 | if os.name == 'nt': |
|
1235 | if os.name == 'nt': | |
1221 | droot = os.path.splitdrive(os.getcwd())[0] + '/' |
|
1236 | droot = os.path.splitdrive(os.getcwd())[0] + '/' | |
1222 | path = os.path.join(droot, path[i+1:]) |
|
1237 | path = os.path.join(droot, path[i + 1:]) | |
1223 | else: |
|
1238 | else: | |
1224 | path = path[i:] |
|
1239 | path = path[i:] | |
1225 | else: |
|
1240 | else: |
@@ -155,7 +155,8 b' def _verify(repo):' | |||||
155 | ui.status(_("crosschecking files in changesets and manifests\n")) |
|
155 | ui.status(_("crosschecking files in changesets and manifests\n")) | |
156 |
|
156 | |||
157 | if havemf: |
|
157 | if havemf: | |
158 |
for c,m in sorted([(c, m) for m in mflinkrevs |
|
158 | for c, m in sorted([(c, m) for m in mflinkrevs | |
|
159 | for c in mflinkrevs[m]]): | |||
159 | err(c, _("changeset refers to unknown manifest %s") % short(m)) |
|
160 | err(c, _("changeset refers to unknown manifest %s") % short(m)) | |
160 | mflinkrevs = None # del is bad here due to scope issues |
|
161 | mflinkrevs = None # del is bad here due to scope issues | |
161 |
|
162 | |||
@@ -259,7 +260,7 b' def _verify(repo):' | |||||
259 |
|
260 | |||
260 | # cross-check |
|
261 | # cross-check | |
261 | if f in filenodes: |
|
262 | if f in filenodes: | |
262 | fns = [(lr, n) for n,lr in filenodes[f].iteritems()] |
|
263 | fns = [(lr, n) for n, lr in filenodes[f].iteritems()] | |
263 | for lr, node in sorted(fns): |
|
264 | for lr, node in sorted(fns): | |
264 | err(lr, _("%s in manifests not found") % short(node), f) |
|
265 | err(lr, _("%s in manifests not found") % short(node), f) | |
265 |
|
266 |
@@ -47,7 +47,8 b' class winstdout(object):' | |||||
47 | self.fp.write(s[start:end]) |
|
47 | self.fp.write(s[start:end]) | |
48 | start = end |
|
48 | start = end | |
49 | except IOError, inst: |
|
49 | except IOError, inst: | |
50 |
if inst.errno != 0: |
|
50 | if inst.errno != 0: | |
|
51 | raise | |||
51 | self.close() |
|
52 | self.close() | |
52 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
53 | raise IOError(errno.EPIPE, 'Broken pipe') | |
53 |
|
54 | |||
@@ -55,7 +56,8 b' class winstdout(object):' | |||||
55 | try: |
|
56 | try: | |
56 | return self.fp.flush() |
|
57 | return self.fp.flush() | |
57 | except IOError, inst: |
|
58 | except IOError, inst: | |
58 |
if inst.errno != errno.EINVAL: |
|
59 | if inst.errno != errno.EINVAL: | |
|
60 | raise | |||
59 | self.close() |
|
61 | self.close() | |
60 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
62 | raise IOError(errno.EPIPE, 'Broken pipe') | |
61 |
|
63 | |||
@@ -333,7 +335,7 b' def spawndetached(args):' | |||||
333 | hStdOutput = None |
|
335 | hStdOutput = None | |
334 | hStdError = None |
|
336 | hStdError = None | |
335 | wShowWindow = subprocess.SW_HIDE |
|
337 | wShowWindow = subprocess.SW_HIDE | |
336 |
|
338 | |||
337 | args = subprocess.list2cmdline(args) |
|
339 | args = subprocess.list2cmdline(args) | |
338 | # Not running the command in shell mode makes python26 hang when |
|
340 | # Not running the command in shell mode makes python26 hang when | |
339 | # writing to hgweb output socket. |
|
341 | # writing to hgweb output socket. |
@@ -240,7 +240,7 b' extmodules = [' | |||||
240 | ] |
|
240 | ] | |
241 |
|
241 | |||
242 | packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert', |
|
242 | packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert', | |
243 |
'hgext.highlight', 'hgext.zeroconf' |
|
243 | 'hgext.highlight', 'hgext.zeroconf'] | |
244 |
|
244 | |||
245 | if sys.platform == 'linux2' and os.uname()[2] > '2.6': |
|
245 | if sys.platform == 'linux2' and os.uname()[2] > '2.6': | |
246 | # The inotify extension is only usable with Linux 2.6 kernels. |
|
246 | # The inotify extension is only usable with Linux 2.6 kernels. | |
@@ -257,7 +257,7 b" packagedata = {'mercurial': ['locale/*/L" | |||||
257 | def ordinarypath(p): |
|
257 | def ordinarypath(p): | |
258 | return p and p[0] != '.' and p[-1] != '~' |
|
258 | return p and p[0] != '.' and p[-1] != '~' | |
259 |
|
259 | |||
260 |
for root in ('templates', |
|
260 | for root in ('templates',): | |
261 | for curdir, dirs, files in os.walk(os.path.join('mercurial', root)): |
|
261 | for curdir, dirs, files in os.walk(os.path.join('mercurial', root)): | |
262 | curdir = curdir.split(os.sep, 1)[1] |
|
262 | curdir = curdir.split(os.sep, 1)[1] | |
263 | dirs[:] = filter(ordinarypath, dirs) |
|
263 | dirs[:] = filter(ordinarypath, dirs) |
@@ -106,20 +106,20 b' class StatementFindingAstVisitor(compile' | |||||
106 | self.excluded = excluded |
|
106 | self.excluded = excluded | |
107 | self.suite_spots = suite_spots |
|
107 | self.suite_spots = suite_spots | |
108 | self.excluding_suite = 0 |
|
108 | self.excluding_suite = 0 | |
109 |
|
109 | |||
110 | def doRecursive(self, node): |
|
110 | def doRecursive(self, node): | |
111 | for n in node.getChildNodes(): |
|
111 | for n in node.getChildNodes(): | |
112 | self.dispatch(n) |
|
112 | self.dispatch(n) | |
113 |
|
113 | |||
114 | visitStmt = visitModule = doRecursive |
|
114 | visitStmt = visitModule = doRecursive | |
115 |
|
115 | |||
116 | def doCode(self, node): |
|
116 | def doCode(self, node): | |
117 | if hasattr(node, 'decorators') and node.decorators: |
|
117 | if hasattr(node, 'decorators') and node.decorators: | |
118 | self.dispatch(node.decorators) |
|
118 | self.dispatch(node.decorators) | |
119 | self.recordAndDispatch(node.code) |
|
119 | self.recordAndDispatch(node.code) | |
120 | else: |
|
120 | else: | |
121 | self.doSuite(node, node.code) |
|
121 | self.doSuite(node, node.code) | |
122 |
|
122 | |||
123 | visitFunction = visitClass = doCode |
|
123 | visitFunction = visitClass = doCode | |
124 |
|
124 | |||
125 | def getFirstLine(self, node): |
|
125 | def getFirstLine(self, node): | |
@@ -139,26 +139,26 b' class StatementFindingAstVisitor(compile' | |||||
139 | for n in node.getChildNodes(): |
|
139 | for n in node.getChildNodes(): | |
140 | lineno = max(lineno, self.getLastLine(n)) |
|
140 | lineno = max(lineno, self.getLastLine(n)) | |
141 | return lineno |
|
141 | return lineno | |
142 |
|
142 | |||
143 | def doStatement(self, node): |
|
143 | def doStatement(self, node): | |
144 | self.recordLine(self.getFirstLine(node)) |
|
144 | self.recordLine(self.getFirstLine(node)) | |
145 |
|
145 | |||
146 | visitAssert = visitAssign = visitAssTuple = visitPrint = \ |
|
146 | visitAssert = visitAssign = visitAssTuple = visitPrint = \ | |
147 | visitPrintnl = visitRaise = visitSubscript = visitDecorators = \ |
|
147 | visitPrintnl = visitRaise = visitSubscript = visitDecorators = \ | |
148 | doStatement |
|
148 | doStatement | |
149 |
|
149 | |||
150 | def visitPass(self, node): |
|
150 | def visitPass(self, node): | |
151 |
# Pass statements have weird interactions with docstrings. |
|
151 | # Pass statements have weird interactions with docstrings. If | |
152 |
# pass statement is part of one of those pairs, claim |
|
152 | # this pass statement is part of one of those pairs, claim | |
153 | # is on the later of the two lines. |
|
153 | # that the statement is on the later of the two lines. | |
154 | l = node.lineno |
|
154 | l = node.lineno | |
155 | if l: |
|
155 | if l: | |
156 | lines = self.suite_spots.get(l, [l,l]) |
|
156 | lines = self.suite_spots.get(l, [l, l]) | |
157 | self.statements[lines[1]] = 1 |
|
157 | self.statements[lines[1]] = 1 | |
158 |
|
158 | |||
159 | def visitDiscard(self, node): |
|
159 | def visitDiscard(self, node): | |
160 | # Discard nodes are statements that execute an expression, but then |
|
160 | # Discard nodes are statements that execute an expression, but then | |
161 |
# discard the results. This includes function calls, so we can't |
|
161 | # discard the results. This includes function calls, so we can't | |
162 | # ignore them all. But if the expression is a constant, the statement |
|
162 | # ignore them all. But if the expression is a constant, the statement | |
163 | # won't be "executed", so don't count it now. |
|
163 | # won't be "executed", so don't count it now. | |
164 | if node.expr.__class__.__name__ != 'Const': |
|
164 | if node.expr.__class__.__name__ != 'Const': | |
@@ -172,7 +172,7 b' class StatementFindingAstVisitor(compile' | |||||
172 | return self.recordLine(self.getFirstLine(node)) |
|
172 | return self.recordLine(self.getFirstLine(node)) | |
173 | else: |
|
173 | else: | |
174 | return 0 |
|
174 | return 0 | |
175 |
|
175 | |||
176 | def recordLine(self, lineno): |
|
176 | def recordLine(self, lineno): | |
177 | # Returns a bool, whether the line is included or excluded. |
|
177 | # Returns a bool, whether the line is included or excluded. | |
178 | if lineno: |
|
178 | if lineno: | |
@@ -196,9 +196,9 b' class StatementFindingAstVisitor(compile' | |||||
196 | self.statements[lineno] = 1 |
|
196 | self.statements[lineno] = 1 | |
197 | return 1 |
|
197 | return 1 | |
198 | return 0 |
|
198 | return 0 | |
199 |
|
199 | |||
200 | default = recordNodeLine |
|
200 | default = recordNodeLine | |
201 |
|
201 | |||
202 | def recordAndDispatch(self, node): |
|
202 | def recordAndDispatch(self, node): | |
203 | self.recordNodeLine(node) |
|
203 | self.recordNodeLine(node) | |
204 | self.dispatch(node) |
|
204 | self.dispatch(node) | |
@@ -209,7 +209,7 b' class StatementFindingAstVisitor(compile' | |||||
209 | self.excluding_suite = 1 |
|
209 | self.excluding_suite = 1 | |
210 | self.recordAndDispatch(body) |
|
210 | self.recordAndDispatch(body) | |
211 | self.excluding_suite = exsuite |
|
211 | self.excluding_suite = exsuite | |
212 |
|
212 | |||
213 | def doPlainWordSuite(self, prevsuite, suite): |
|
213 | def doPlainWordSuite(self, prevsuite, suite): | |
214 | # Finding the exclude lines for else's is tricky, because they aren't |
|
214 | # Finding the exclude lines for else's is tricky, because they aren't | |
215 | # present in the compiler parse tree. Look at the previous suite, |
|
215 | # present in the compiler parse tree. Look at the previous suite, | |
@@ -217,17 +217,17 b' class StatementFindingAstVisitor(compile' | |||||
217 | # first line are excluded, then we exclude the else. |
|
217 | # first line are excluded, then we exclude the else. | |
218 | lastprev = self.getLastLine(prevsuite) |
|
218 | lastprev = self.getLastLine(prevsuite) | |
219 | firstelse = self.getFirstLine(suite) |
|
219 | firstelse = self.getFirstLine(suite) | |
220 | for l in range(lastprev+1, firstelse): |
|
220 | for l in range(lastprev + 1, firstelse): | |
221 | if self.suite_spots.has_key(l): |
|
221 | if self.suite_spots.has_key(l): | |
222 | self.doSuite(None, suite, exclude=self.excluded.has_key(l)) |
|
222 | self.doSuite(None, suite, exclude=self.excluded.has_key(l)) | |
223 | break |
|
223 | break | |
224 | else: |
|
224 | else: | |
225 | self.doSuite(None, suite) |
|
225 | self.doSuite(None, suite) | |
226 |
|
226 | |||
227 | def doElse(self, prevsuite, node): |
|
227 | def doElse(self, prevsuite, node): | |
228 | if node.else_: |
|
228 | if node.else_: | |
229 | self.doPlainWordSuite(prevsuite, node.else_) |
|
229 | self.doPlainWordSuite(prevsuite, node.else_) | |
230 |
|
230 | |||
231 | def visitFor(self, node): |
|
231 | def visitFor(self, node): | |
232 | self.doSuite(node, node.body) |
|
232 | self.doSuite(node, node.body) | |
233 | self.doElse(node.body, node) |
|
233 | self.doElse(node.body, node) | |
@@ -250,21 +250,21 b' class StatementFindingAstVisitor(compile' | |||||
250 | if not a: |
|
250 | if not a: | |
251 | # It's a plain "except:". Find the previous suite. |
|
251 | # It's a plain "except:". Find the previous suite. | |
252 | if i > 0: |
|
252 | if i > 0: | |
253 | prev = node.handlers[i-1][2] |
|
253 | prev = node.handlers[i - 1][2] | |
254 | else: |
|
254 | else: | |
255 | prev = node.body |
|
255 | prev = node.body | |
256 | self.doPlainWordSuite(prev, h) |
|
256 | self.doPlainWordSuite(prev, h) | |
257 | else: |
|
257 | else: | |
258 | self.doSuite(a, h) |
|
258 | self.doSuite(a, h) | |
259 | self.doElse(node.handlers[-1][2], node) |
|
259 | self.doElse(node.handlers[-1][2], node) | |
260 |
|
260 | |||
261 | def visitTryFinally(self, node): |
|
261 | def visitTryFinally(self, node): | |
262 | self.doSuite(node, node.body) |
|
262 | self.doSuite(node, node.body) | |
263 | self.doPlainWordSuite(node.body, node.final) |
|
263 | self.doPlainWordSuite(node.body, node.final) | |
264 |
|
264 | |||
265 | def visitWith(self, node): |
|
265 | def visitWith(self, node): | |
266 | self.doSuite(node, node.body) |
|
266 | self.doSuite(node, node.body) | |
267 |
|
267 | |||
268 | def visitGlobal(self, node): |
|
268 | def visitGlobal(self, node): | |
269 | # "global" statements don't execute like others (they don't call the |
|
269 | # "global" statements don't execute like others (they don't call the | |
270 | # trace function), so don't record their line numbers. |
|
270 | # trace function), so don't record their line numbers. | |
@@ -285,7 +285,7 b' class coverage:' | |||||
285 | # A dictionary with an entry for (Python source file name, line number |
|
285 | # A dictionary with an entry for (Python source file name, line number | |
286 | # in that file) if that line has been executed. |
|
286 | # in that file) if that line has been executed. | |
287 | c = {} |
|
287 | c = {} | |
288 |
|
288 | |||
289 | # A map from canonical Python source file name to a dictionary in |
|
289 | # A map from canonical Python source file name to a dictionary in | |
290 | # which there's an entry for each line number that has been |
|
290 | # which there's an entry for each line number that has been | |
291 | # executed. |
|
291 | # executed. | |
@@ -313,19 +313,18 b' class coverage:' | |||||
313 | self.relative_dir = self.abs_file(os.curdir)+os.sep |
|
313 | self.relative_dir = self.abs_file(os.curdir)+os.sep | |
314 | self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]') |
|
314 | self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]') | |
315 |
|
315 | |||
316 |
# t(f, x, y). This method is passed to sys.settrace as a trace function. |
|
316 | # t(f, x, y). This method is passed to sys.settrace as a trace function. | |
317 |
# See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and |
|
317 | # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and | |
318 | # the arguments and return value of the trace function. |
|
318 | # the arguments and return value of the trace function. | |
319 | # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code |
|
319 | # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code | |
320 | # objects. |
|
320 | # objects. | |
321 |
|
321 | def t(self, f, w, unused): #pragma: no cover | ||
322 | def t(self, f, w, unused): #pragma: no cover |
|
|||
323 | if w == 'line': |
|
322 | if w == 'line': | |
324 | self.c[(f.f_code.co_filename, f.f_lineno)] = 1 |
|
323 | self.c[(f.f_code.co_filename, f.f_lineno)] = 1 | |
325 | #-for c in self.cstack: |
|
324 | #-for c in self.cstack: | |
326 | #- c[(f.f_code.co_filename, f.f_lineno)] = 1 |
|
325 | #- c[(f.f_code.co_filename, f.f_lineno)] = 1 | |
327 | return self.t |
|
326 | return self.t | |
328 |
|
327 | |||
329 | def help(self, error=None): #pragma: no cover |
|
328 | def help(self, error=None): #pragma: no cover | |
330 | if error: |
|
329 | if error: | |
331 | print error |
|
330 | print error | |
@@ -363,7 +362,8 b' class coverage:' | |||||
363 | elif o[2:] + '=' in long_opts: |
|
362 | elif o[2:] + '=' in long_opts: | |
364 | settings[o[2:]+'='] = a |
|
363 | settings[o[2:]+'='] = a | |
365 | else: #pragma: no cover |
|
364 | else: #pragma: no cover | |
366 |
|
|
365 | # Can't get here, because getopt won't return anything unknown. | |
|
366 | pass | |||
367 |
|
367 | |||
368 | if settings.get('help'): |
|
368 | if settings.get('help'): | |
369 | help_fn() |
|
369 | help_fn() | |
@@ -377,14 +377,14 b' class coverage:' | |||||
377 | args_needed = (settings.get('execute') |
|
377 | args_needed = (settings.get('execute') | |
378 | or settings.get('annotate') |
|
378 | or settings.get('annotate') | |
379 | or settings.get('report')) |
|
379 | or settings.get('report')) | |
380 |
action = (settings.get('erase') |
|
380 | action = (settings.get('erase') | |
381 | or settings.get('collect') |
|
381 | or settings.get('collect') | |
382 | or args_needed) |
|
382 | or args_needed) | |
383 | if not action: |
|
383 | if not action: | |
384 | help_fn("You must specify at least one of -e, -x, -c, -r, or -a.") |
|
384 | help_fn("You must specify at least one of -e, -x, -c, -r, or -a.") | |
385 | if not args_needed and args: |
|
385 | if not args_needed and args: | |
386 | help_fn("Unexpected arguments: %s" % " ".join(args)) |
|
386 | help_fn("Unexpected arguments: %s" % " ".join(args)) | |
387 |
|
387 | |||
388 | self.parallel_mode = settings.get('parallel-mode') |
|
388 | self.parallel_mode = settings.get('parallel-mode') | |
389 | self.get_ready() |
|
389 | self.get_ready() | |
390 |
|
390 | |||
@@ -402,7 +402,7 b' class coverage:' | |||||
402 | self.collect() |
|
402 | self.collect() | |
403 | if not args: |
|
403 | if not args: | |
404 | args = self.cexecuted.keys() |
|
404 | args = self.cexecuted.keys() | |
405 |
|
405 | |||
406 | ignore_errors = settings.get('ignore-errors') |
|
406 | ignore_errors = settings.get('ignore-errors') | |
407 | show_missing = settings.get('show-missing') |
|
407 | show_missing = settings.get('show-missing') | |
408 | directory = settings.get('directory=') |
|
408 | directory = settings.get('directory=') | |
@@ -412,7 +412,7 b' class coverage:' | |||||
412 | omit = [self.abs_file(p) for p in omit.split(',')] |
|
412 | omit = [self.abs_file(p) for p in omit.split(',')] | |
413 | else: |
|
413 | else: | |
414 | omit = [] |
|
414 | omit = [] | |
415 |
|
415 | |||
416 | if settings.get('report'): |
|
416 | if settings.get('report'): | |
417 | self.report(args, show_missing, ignore_errors, omit_prefixes=omit) |
|
417 | self.report(args, show_missing, ignore_errors, omit_prefixes=omit) | |
418 | if settings.get('annotate'): |
|
418 | if settings.get('annotate'): | |
@@ -422,7 +422,7 b' class coverage:' | |||||
422 | self.usecache = usecache |
|
422 | self.usecache = usecache | |
423 | if cache_file and not self.cache: |
|
423 | if cache_file and not self.cache: | |
424 | self.cache_default = cache_file |
|
424 | self.cache_default = cache_file | |
425 |
|
425 | |||
426 | def get_ready(self, parallel_mode=False): |
|
426 | def get_ready(self, parallel_mode=False): | |
427 | if self.usecache and not self.cache: |
|
427 | if self.usecache and not self.cache: | |
428 | self.cache = os.environ.get(self.cache_env, self.cache_default) |
|
428 | self.cache = os.environ.get(self.cache_env, self.cache_default) | |
@@ -430,7 +430,7 b' class coverage:' | |||||
430 | self.cache += "." + gethostname() + "." + str(os.getpid()) |
|
430 | self.cache += "." + gethostname() + "." + str(os.getpid()) | |
431 | self.restore() |
|
431 | self.restore() | |
432 | self.analysis_cache = {} |
|
432 | self.analysis_cache = {} | |
433 |
|
433 | |||
434 | def start(self, parallel_mode=False): |
|
434 | def start(self, parallel_mode=False): | |
435 | self.get_ready() |
|
435 | self.get_ready() | |
436 | if self.nesting == 0: #pragma: no cover |
|
436 | if self.nesting == 0: #pragma: no cover | |
@@ -438,7 +438,7 b' class coverage:' | |||||
438 | if hasattr(threading, 'settrace'): |
|
438 | if hasattr(threading, 'settrace'): | |
439 | threading.settrace(self.t) |
|
439 | threading.settrace(self.t) | |
440 | self.nesting += 1 |
|
440 | self.nesting += 1 | |
441 |
|
441 | |||
442 | def stop(self): |
|
442 | def stop(self): | |
443 | self.nesting -= 1 |
|
443 | self.nesting -= 1 | |
444 | if self.nesting == 0: #pragma: no cover |
|
444 | if self.nesting == 0: #pragma: no cover | |
@@ -462,7 +462,7 b' class coverage:' | |||||
462 | def begin_recursive(self): |
|
462 | def begin_recursive(self): | |
463 | self.cstack.append(self.c) |
|
463 | self.cstack.append(self.c) | |
464 | self.xstack.append(self.exclude_re) |
|
464 | self.xstack.append(self.exclude_re) | |
465 |
|
465 | |||
466 | def end_recursive(self): |
|
466 | def end_recursive(self): | |
467 | self.c = self.cstack.pop() |
|
467 | self.c = self.cstack.pop() | |
468 | self.exclude_re = self.xstack.pop() |
|
468 | self.exclude_re = self.xstack.pop() | |
@@ -568,7 +568,7 b' class coverage:' | |||||
568 | self.canonical_filename_cache[filename] = cf |
|
568 | self.canonical_filename_cache[filename] = cf | |
569 | return self.canonical_filename_cache[filename] |
|
569 | return self.canonical_filename_cache[filename] | |
570 |
|
570 | |||
571 |
# canonicalize_filenames(). Copy results from "c" to "cexecuted", |
|
571 | # canonicalize_filenames(). Copy results from "c" to "cexecuted", | |
572 | # canonicalizing filenames on the way. Clear the "c" map. |
|
572 | # canonicalizing filenames on the way. Clear the "c" map. | |
573 |
|
573 | |||
574 | def canonicalize_filenames(self): |
|
574 | def canonicalize_filenames(self): | |
@@ -598,7 +598,6 b' class coverage:' | |||||
598 | # in the source code, (3) a list of lines of excluded statements, |
|
598 | # in the source code, (3) a list of lines of excluded statements, | |
599 | # and (4), a map of line numbers to multi-line line number ranges, for |
|
599 | # and (4), a map of line numbers to multi-line line number ranges, for | |
600 | # statements that cross lines. |
|
600 | # statements that cross lines. | |
601 |
|
||||
602 | def analyze_morf(self, morf): |
|
601 | def analyze_morf(self, morf): | |
603 | if self.analysis_cache.has_key(morf): |
|
602 | if self.analysis_cache.has_key(morf): | |
604 | return self.analysis_cache[morf] |
|
603 | return self.analysis_cache[morf] | |
@@ -636,26 +635,27 b' class coverage:' | |||||
636 | if len(tree) == 3 and type(tree[2]) == type(1): |
|
635 | if len(tree) == 3 and type(tree[2]) == type(1): | |
637 | return tree[2] |
|
636 | return tree[2] | |
638 | tree = tree[1] |
|
637 | tree = tree[1] | |
639 |
|
638 | |||
640 | def last_line_of_tree(self, tree): |
|
639 | def last_line_of_tree(self, tree): | |
641 | while True: |
|
640 | while True: | |
642 | if len(tree) == 3 and type(tree[2]) == type(1): |
|
641 | if len(tree) == 3 and type(tree[2]) == type(1): | |
643 | return tree[2] |
|
642 | return tree[2] | |
644 | tree = tree[-1] |
|
643 | tree = tree[-1] | |
645 |
|
644 | |||
646 | def find_docstring_pass_pair(self, tree, spots): |
|
645 | def find_docstring_pass_pair(self, tree, spots): | |
647 | for i in range(1, len(tree)): |
|
646 | for i in range(1, len(tree)): | |
648 |
if self.is_string_constant(tree[i]) and |
|
647 | if (self.is_string_constant(tree[i]) and | |
|
648 | self.is_pass_stmt(tree[i + 1]): | |||
649 | first_line = self.first_line_of_tree(tree[i]) |
|
649 | first_line = self.first_line_of_tree(tree[i]) | |
650 | last_line = self.last_line_of_tree(tree[i+1]) |
|
650 | last_line = self.last_line_of_tree(tree[i + 1]) | |
651 | self.record_multiline(spots, first_line, last_line) |
|
651 | self.record_multiline(spots, first_line, last_line) | |
652 |
|
652 | |||
653 | def is_string_constant(self, tree): |
|
653 | def is_string_constant(self, tree): | |
654 | try: |
|
654 | try: | |
655 | return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt |
|
655 | return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt | |
656 | except: |
|
656 | except: | |
657 | return False |
|
657 | return False | |
658 |
|
658 | |||
659 | def is_pass_stmt(self, tree): |
|
659 | def is_pass_stmt(self, tree): | |
660 | try: |
|
660 | try: | |
661 | return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt |
|
661 | return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt | |
@@ -663,9 +663,9 b' class coverage:' | |||||
663 | return False |
|
663 | return False | |
664 |
|
664 | |||
665 | def record_multiline(self, spots, i, j): |
|
665 | def record_multiline(self, spots, i, j): | |
666 | for l in range(i, j+1): |
|
666 | for l in range(i, j + 1): | |
667 | spots[l] = (i, j) |
|
667 | spots[l] = (i, j) | |
668 |
|
668 | |||
669 | def get_suite_spots(self, tree, spots): |
|
669 | def get_suite_spots(self, tree, spots): | |
670 | """ Analyze a parse tree to find suite introducers which span a number |
|
670 | """ Analyze a parse tree to find suite introducers which span a number | |
671 | of lines. |
|
671 | of lines. | |
@@ -675,16 +675,16 b' class coverage:' | |||||
675 | if tree[i][0] == symbol.suite: |
|
675 | if tree[i][0] == symbol.suite: | |
676 | # Found a suite, look back for the colon and keyword. |
|
676 | # Found a suite, look back for the colon and keyword. | |
677 | lineno_colon = lineno_word = None |
|
677 | lineno_colon = lineno_word = None | |
678 | for j in range(i-1, 0, -1): |
|
678 | for j in range(i - 1, 0, -1): | |
679 | if tree[j][0] == token.COLON: |
|
679 | if tree[j][0] == token.COLON: | |
680 | # Colons are never executed themselves: we want the |
|
680 | # Colons are never executed themselves: we want the | |
681 | # line number of the last token before the colon. |
|
681 | # line number of the last token before the colon. | |
682 | lineno_colon = self.last_line_of_tree(tree[j-1]) |
|
682 | lineno_colon = self.last_line_of_tree(tree[j - 1]) | |
683 | elif tree[j][0] == token.NAME: |
|
683 | elif tree[j][0] == token.NAME: | |
684 | if tree[j][1] == 'elif': |
|
684 | if tree[j][1] == 'elif': | |
685 |
# Find the line number of the first |
|
685 | # Find the line number of the first | |
686 | # after the keyword. |
|
686 | # non-terminal after the keyword. | |
687 | t = tree[j+1] |
|
687 | t = tree[j + 1] | |
688 | while t and token.ISNONTERMINAL(t[0]): |
|
688 | while t and token.ISNONTERMINAL(t[0]): | |
689 | t = t[1] |
|
689 | t = t[1] | |
690 | if t: |
|
690 | if t: | |
@@ -694,7 +694,7 b' class coverage:' | |||||
694 | break |
|
694 | break | |
695 | elif tree[j][0] == symbol.except_clause: |
|
695 | elif tree[j][0] == symbol.except_clause: | |
696 | # "except" clauses look like: |
|
696 | # "except" clauses look like: | |
697 |
# ('except_clause', ('NAME', 'except', lineno), |
|
697 | # ('except_clause', ('NAME', 'except', lineno),...) | |
698 | if tree[j][1][0] == token.NAME: |
|
698 | if tree[j][1][0] == token.NAME: | |
699 | lineno_word = tree[j][1][2] |
|
699 | lineno_word = tree[j][1][2] | |
700 | break |
|
700 | break | |
@@ -703,11 +703,11 b' class coverage:' | |||||
703 | # between the two with the two line numbers. |
|
703 | # between the two with the two line numbers. | |
704 | self.record_multiline(spots, lineno_word, lineno_colon) |
|
704 | self.record_multiline(spots, lineno_word, lineno_colon) | |
705 |
|
705 | |||
706 |
# "pass" statements are tricky: different versions |
|
706 | # "pass" statements are tricky: different versions | |
707 |
# treat them differently, especially in |
|
707 | # of Python treat them differently, especially in | |
708 |
# function with a doc string |
|
708 | # the common case of a function with a doc string | |
|
709 | # and a single pass statement. | |||
709 | self.find_docstring_pass_pair(tree[i], spots) |
|
710 | self.find_docstring_pass_pair(tree[i], spots) | |
710 |
|
||||
711 | elif tree[i][0] == symbol.simple_stmt: |
|
711 | elif tree[i][0] == symbol.simple_stmt: | |
712 | first_line = self.first_line_of_tree(tree[i]) |
|
712 | first_line = self.first_line_of_tree(tree[i]) | |
713 | last_line = self.last_line_of_tree(tree[i]) |
|
713 | last_line = self.last_line_of_tree(tree[i]) | |
@@ -724,7 +724,7 b' class coverage:' | |||||
724 | lines = text.split('\n') |
|
724 | lines = text.split('\n') | |
725 | for i in range(len(lines)): |
|
725 | for i in range(len(lines)): | |
726 | if reExclude.search(lines[i]): |
|
726 | if reExclude.search(lines[i]): | |
727 | excluded[i+1] = 1 |
|
727 | excluded[i + 1] = 1 | |
728 |
|
728 | |||
729 | # Parse the code and analyze the parse tree to find out which statements |
|
729 | # Parse the code and analyze the parse tree to find out which statements | |
730 | # are multiline, and where suites begin and end. |
|
730 | # are multiline, and where suites begin and end. | |
@@ -732,7 +732,7 b' class coverage:' | |||||
732 | tree = parser.suite(text+'\n\n').totuple(1) |
|
732 | tree = parser.suite(text+'\n\n').totuple(1) | |
733 | self.get_suite_spots(tree, suite_spots) |
|
733 | self.get_suite_spots(tree, suite_spots) | |
734 | #print "Suite spots:", suite_spots |
|
734 | #print "Suite spots:", suite_spots | |
735 |
|
735 | |||
736 | # Use the compiler module to parse the text and find the executable |
|
736 | # Use the compiler module to parse the text and find the executable | |
737 | # statements. We add newlines to be impervious to final partial lines. |
|
737 | # statements. We add newlines to be impervious to final partial lines. | |
738 | statements = {} |
|
738 | statements = {} | |
@@ -831,7 +831,8 b' class coverage:' | |||||
831 | def morf_name_compare(self, x, y): |
|
831 | def morf_name_compare(self, x, y): | |
832 | return cmp(self.morf_name(x), self.morf_name(y)) |
|
832 | return cmp(self.morf_name(x), self.morf_name(y)) | |
833 |
|
833 | |||
834 |
def report(self, morfs, show_missing=1, ignore_errors=0, file=None, |
|
834 | def report(self, morfs, show_missing=1, ignore_errors=0, file=None, | |
|
835 | omit_prefixes=[]): | |||
835 | if not isinstance(morfs, types.ListType): |
|
836 | if not isinstance(morfs, types.ListType): | |
836 | morfs = [morfs] |
|
837 | morfs = [morfs] | |
837 | # On windows, the shell doesn't expand wildcards. Do it here. |
|
838 | # On windows, the shell doesn't expand wildcards. Do it here. | |
@@ -842,7 +843,7 b' class coverage:' | |||||
842 | else: |
|
843 | else: | |
843 | globbed.append(morf) |
|
844 | globbed.append(morf) | |
844 | morfs = globbed |
|
845 | morfs = globbed | |
845 |
|
846 | |||
846 | morfs = self.filter_by_prefix(morfs, omit_prefixes) |
|
847 | morfs = self.filter_by_prefix(morfs, omit_prefixes) | |
847 | morfs.sort(self.morf_name_compare) |
|
848 | morfs.sort(self.morf_name_compare) | |
848 |
|
849 | |||
@@ -856,8 +857,8 b' class coverage:' | |||||
856 | fmt_coverage = fmt_coverage + " %s" |
|
857 | fmt_coverage = fmt_coverage + " %s" | |
857 | if not file: |
|
858 | if not file: | |
858 | file = sys.stdout |
|
859 | file = sys.stdout | |
859 | print >>file, header |
|
860 | print >> file, header | |
860 | print >>file, "-" * len(header) |
|
861 | print >> file, "-" * len(header) | |
861 | total_statements = 0 |
|
862 | total_statements = 0 | |
862 | total_executed = 0 |
|
863 | total_executed = 0 | |
863 | for morf in morfs: |
|
864 | for morf in morfs: | |
@@ -903,14 +904,16 b' class coverage:' | |||||
903 | for morf in morfs: |
|
904 | for morf in morfs: | |
904 | try: |
|
905 | try: | |
905 | filename, statements, excluded, missing, _ = self.analysis2(morf) |
|
906 | filename, statements, excluded, missing, _ = self.analysis2(morf) | |
906 |
self.annotate_file(filename, statements, excluded, missing, |
|
907 | self.annotate_file(filename, statements, excluded, missing, | |
|
908 | directory) | |||
907 | except KeyboardInterrupt: |
|
909 | except KeyboardInterrupt: | |
908 | raise |
|
910 | raise | |
909 | except: |
|
911 | except: | |
910 | if not ignore_errors: |
|
912 | if not ignore_errors: | |
911 | raise |
|
913 | raise | |
912 |
|
914 | |||
913 |
def annotate_file(self, filename, statements, excluded, missing, |
|
915 | def annotate_file(self, filename, statements, excluded, missing, | |
|
916 | directory=None): | |||
914 | source = open(filename, 'r') |
|
917 | source = open(filename, 'r') | |
915 | if directory: |
|
918 | if directory: | |
916 | dest_file = os.path.join(directory, |
|
919 | dest_file = os.path.join(directory, | |
@@ -937,7 +940,7 b' class coverage:' | |||||
937 | if self.blank_re.match(line): |
|
940 | if self.blank_re.match(line): | |
938 | dest.write(' ') |
|
941 | dest.write(' ') | |
939 | elif self.else_re.match(line): |
|
942 | elif self.else_re.match(line): | |
940 |
# Special logic for lines containing only 'else:'. |
|
943 | # Special logic for lines containing only 'else:'. | |
941 | # See [GDR 2001-12-04b, 3.2]. |
|
944 | # See [GDR 2001-12-04b, 3.2]. | |
942 | if i >= len(statements) and j >= len(missing): |
|
945 | if i >= len(statements) and j >= len(missing): | |
943 | dest.write('! ') |
|
946 | dest.write('! ') | |
@@ -961,40 +964,40 b' class coverage:' | |||||
961 | the_coverage = coverage() |
|
964 | the_coverage = coverage() | |
962 |
|
965 | |||
963 | # Module functions call methods in the singleton object. |
|
966 | # Module functions call methods in the singleton object. | |
964 |
def use_cache(*args, **kw): |
|
967 | def use_cache(*args, **kw): | |
965 | return the_coverage.use_cache(*args, **kw) |
|
968 | return the_coverage.use_cache(*args, **kw) | |
966 |
|
969 | |||
967 |
def start(*args, **kw): |
|
970 | def start(*args, **kw): | |
968 | return the_coverage.start(*args, **kw) |
|
971 | return the_coverage.start(*args, **kw) | |
969 |
|
972 | |||
970 |
def stop(*args, **kw): |
|
973 | def stop(*args, **kw): | |
971 | return the_coverage.stop(*args, **kw) |
|
974 | return the_coverage.stop(*args, **kw) | |
972 |
|
975 | |||
973 |
def erase(*args, **kw): |
|
976 | def erase(*args, **kw): | |
974 | return the_coverage.erase(*args, **kw) |
|
977 | return the_coverage.erase(*args, **kw) | |
975 |
|
978 | |||
976 |
def begin_recursive(*args, **kw): |
|
979 | def begin_recursive(*args, **kw): | |
977 | return the_coverage.begin_recursive(*args, **kw) |
|
980 | return the_coverage.begin_recursive(*args, **kw) | |
978 |
|
981 | |||
979 |
def end_recursive(*args, **kw): |
|
982 | def end_recursive(*args, **kw): | |
980 | return the_coverage.end_recursive(*args, **kw) |
|
983 | return the_coverage.end_recursive(*args, **kw) | |
981 |
|
984 | |||
982 |
def exclude(*args, **kw): |
|
985 | def exclude(*args, **kw): | |
983 | return the_coverage.exclude(*args, **kw) |
|
986 | return the_coverage.exclude(*args, **kw) | |
984 |
|
987 | |||
985 |
def analysis(*args, **kw): |
|
988 | def analysis(*args, **kw): | |
986 | return the_coverage.analysis(*args, **kw) |
|
989 | return the_coverage.analysis(*args, **kw) | |
987 |
|
990 | |||
988 |
def analysis2(*args, **kw): |
|
991 | def analysis2(*args, **kw): | |
989 | return the_coverage.analysis2(*args, **kw) |
|
992 | return the_coverage.analysis2(*args, **kw) | |
990 |
|
993 | |||
991 |
def report(*args, **kw): |
|
994 | def report(*args, **kw): | |
992 | return the_coverage.report(*args, **kw) |
|
995 | return the_coverage.report(*args, **kw) | |
993 |
|
996 | |||
994 |
def annotate(*args, **kw): |
|
997 | def annotate(*args, **kw): | |
995 | return the_coverage.annotate(*args, **kw) |
|
998 | return the_coverage.annotate(*args, **kw) | |
996 |
|
999 | |||
997 |
def annotate_file(*args, **kw): |
|
1000 | def annotate_file(*args, **kw): | |
998 | return the_coverage.annotate_file(*args, **kw) |
|
1001 | return the_coverage.annotate_file(*args, **kw) | |
999 |
|
1002 | |||
1000 | # Save coverage data when Python exits. (The atexit module wasn't |
|
1003 | # Save coverage data when Python exits. (The atexit module wasn't | |
@@ -1008,7 +1011,7 b' except ImportError:' | |||||
1008 |
|
1011 | |||
1009 | def main(): |
|
1012 | def main(): | |
1010 | the_coverage.command_line(sys.argv[1:]) |
|
1013 | the_coverage.command_line(sys.argv[1:]) | |
1011 |
|
1014 | |||
1012 | # Command-line interface. |
|
1015 | # Command-line interface. | |
1013 | if __name__ == '__main__': |
|
1016 | if __name__ == '__main__': | |
1014 | main() |
|
1017 | main() | |
@@ -1072,7 +1075,7 b" if __name__ == '__main__':" | |||||
1072 | # Thanks, Allen. |
|
1075 | # Thanks, Allen. | |
1073 | # |
|
1076 | # | |
1074 | # 2005-12-02 NMB Call threading.settrace so that all threads are measured. |
|
1077 | # 2005-12-02 NMB Call threading.settrace so that all threads are measured. | |
1075 |
# Thanks Martin Fuzzey. Add a file argument to report so that reports can be |
|
1078 | # Thanks Martin Fuzzey. Add a file argument to report so that reports can be | |
1076 | # captured to a different destination. |
|
1079 | # captured to a different destination. | |
1077 | # |
|
1080 | # | |
1078 | # 2005-12-03 NMB coverage.py can now measure itself. |
|
1081 | # 2005-12-03 NMB coverage.py can now measure itself. |
@@ -48,7 +48,8 b' elif url.startswith("remote:http"):' | |||||
48 |
|
48 | |||
49 | out.write("%s hook: " % name) |
|
49 | out.write("%s hook: " % name) | |
50 | for v in env: |
|
50 | for v in env: | |
51 | out.write("%s=%s " % (v, os.environ[v].replace(os.environ["HGTMP"], '$HGTMP'))) |
|
51 | out.write("%s=%s " % | |
|
52 | (v, os.environ[v].replace(os.environ["HGTMP"], '$HGTMP'))) | |||
52 | out.write("\n") |
|
53 | out.write("\n") | |
53 | out.close() |
|
54 | out.close() | |
54 |
|
55 |
@@ -6,7 +6,8 b' for f in sys.argv[1:]:' | |||||
6 | try: |
|
6 | try: | |
7 | print f, '->', os.readlink(f) |
|
7 | print f, '->', os.readlink(f) | |
8 | except OSError, err: |
|
8 | except OSError, err: | |
9 |
if err.errno != errno.EINVAL: |
|
9 | if err.errno != errno.EINVAL: | |
|
10 | raise | |||
10 | print f, 'not a symlink' |
|
11 | print f, 'not a symlink' | |
11 |
|
12 | |||
12 | sys.exit(0) |
|
13 | sys.exit(0) |
@@ -231,7 +231,7 b' def splitnewlines(text):' | |||||
231 | if last: |
|
231 | if last: | |
232 | lines.append(last) |
|
232 | lines.append(last) | |
233 | return lines |
|
233 | return lines | |
234 | lines.append(text[i:n+1]) |
|
234 | lines.append(text[i:n + 1]) | |
235 | i = n + 1 |
|
235 | i = n + 1 | |
236 |
|
236 | |||
237 | def parsehghaveoutput(lines): |
|
237 | def parsehghaveoutput(lines): | |
@@ -660,7 +660,8 b' def runchildren(options, tests):' | |||||
660 | jobs = [[] for j in xrange(options.jobs)] |
|
660 | jobs = [[] for j in xrange(options.jobs)] | |
661 | while tests: |
|
661 | while tests: | |
662 | for job in jobs: |
|
662 | for job in jobs: | |
663 |
if not tests: |
|
663 | if not tests: | |
|
664 | break | |||
664 | job.append(tests.pop()) |
|
665 | job.append(tests.pop()) | |
665 | fps = {} |
|
666 | fps = {} | |
666 | for j, job in enumerate(jobs): |
|
667 | for j, job in enumerate(jobs): |
@@ -1,13 +1,13 b'' | |||||
1 | #!/bin/sh |
|
1 | #!/bin/sh | |
2 |
|
2 | |||
3 | "$TESTDIR/hghave" rst2html || exit 80 |
|
3 | "$TESTDIR/hghave" rst2html || exit 80 | |
4 |
RST2HTML= |
|
4 | RST2HTML=`which rst2html 2> /dev/null || which rst2html.py` | |
5 |
|
5 | |||
6 | HGENCODING=UTF-8 |
|
6 | HGENCODING=UTF-8 | |
7 | export HGENCODING |
|
7 | export HGENCODING | |
8 |
|
8 | |||
9 | for PO in C $TESTDIR/../i18n/*.po; do |
|
9 | for PO in C $TESTDIR/../i18n/*.po; do | |
10 |
LOCALE= |
|
10 | LOCALE=`basename $PO .po` | |
11 | echo |
|
11 | echo | |
12 | echo "% extracting documentation from $LOCALE" |
|
12 | echo "% extracting documentation from $LOCALE" | |
13 | echo ".. -*- coding: utf-8 -*-" > gendoc-$LOCALE.txt |
|
13 | echo ".. -*- coding: utf-8 -*-" > gendoc-$LOCALE.txt |
@@ -16,7 +16,8 b' def writeauth(items):' | |||||
16 | return ui |
|
16 | return ui | |
17 |
|
17 | |||
18 | def dumpdict(dict): |
|
18 | def dumpdict(dict): | |
19 |
return '{' + ', '.join(['%s: %s' % (k, dict[k]) |
|
19 | return '{' + ', '.join(['%s: %s' % (k, dict[k]) | |
|
20 | for k in sorted(dict.iterkeys())]) + '}' | |||
20 |
|
21 | |||
21 | def test(auth): |
|
22 | def test(auth): | |
22 | print 'CFG:', dumpdict(auth) |
|
23 | print 'CFG:', dumpdict(auth) | |
@@ -56,6 +57,8 b" test({'x.prefix': 'example.org', 'x.sche" | |||||
56 | test({'x.prefix': 'example.org', 'x.schemes': 'http https'}) |
|
57 | test({'x.prefix': 'example.org', 'x.schemes': 'http https'}) | |
57 |
|
58 | |||
58 | print '\n*** Test prefix matching\n' |
|
59 | print '\n*** Test prefix matching\n' | |
59 |
test({'x.prefix': 'http://example.org/foo', |
|
60 | test({'x.prefix': 'http://example.org/foo', | |
60 |
|
|
61 | 'y.prefix': 'http://example.org/bar'}) | |
|
62 | test({'x.prefix': 'http://example.org/foo', | |||
|
63 | 'y.prefix': 'http://example.org/foo/bar'}) | |||
61 | test({'x.prefix': '*', 'y.prefix': 'https://example.org/bar'}) |
|
64 | test({'x.prefix': '*', 'y.prefix': 'https://example.org/bar'}) |
@@ -83,7 +83,7 b' echo "# or true.executable not found in ' | |||||
83 | domerge -r 2 --config merge-tools.true.executable=nonexistingmergetool |
|
83 | domerge -r 2 --config merge-tools.true.executable=nonexistingmergetool | |
84 |
|
84 | |||
85 | echo "# or true.executable with bogus path:" |
|
85 | echo "# or true.executable with bogus path:" | |
86 |
domerge -r 2 --config merge-tools.true.executable=/ |
|
86 | domerge -r 2 --config merge-tools.true.executable=/nonexisting/mergetool | |
87 |
|
87 | |||
88 | echo "# but true.executable set to cat found in PATH works:" |
|
88 | echo "# but true.executable set to cat found in PATH works:" | |
89 | echo "true.executable=cat" >> .hg/hgrc |
|
89 | echo "true.executable=cat" >> .hg/hgrc | |
@@ -103,7 +103,7 b' echo "# merge-patterns specifies executa' | |||||
103 | domerge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistingmergetool |
|
103 | domerge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistingmergetool | |
104 |
|
104 | |||
105 | echo "# merge-patterns specifies executable with bogus path and gets warning:" |
|
105 | echo "# merge-patterns specifies executable with bogus path and gets warning:" | |
106 |
domerge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/ |
|
106 | domerge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexisting/mergetool | |
107 |
|
107 | |||
108 | echo |
|
108 | echo | |
109 | echo ui.merge overrules priority |
|
109 | echo ui.merge overrules priority |
@@ -116,7 +116,7 b' M f' | |||||
116 | false.whatever= |
|
116 | false.whatever= | |
117 | true.priority=1 |
|
117 | true.priority=1 | |
118 | # hg update -C 1 |
|
118 | # hg update -C 1 | |
119 |
# hg merge -r 2 --config merge-tools.true.executable=/ |
|
119 | # hg merge -r 2 --config merge-tools.true.executable=/nonexisting/mergetool | |
120 | merging f |
|
120 | merging f | |
121 | merging f failed! |
|
121 | merging f failed! | |
122 | 0 files updated, 0 files merged, 0 files removed, 1 files unresolved |
|
122 | 0 files updated, 0 files merged, 0 files removed, 1 files unresolved | |
@@ -218,7 +218,7 b' false.whatever=' | |||||
218 | true.priority=1 |
|
218 | true.priority=1 | |
219 | true.executable=cat |
|
219 | true.executable=cat | |
220 | # hg update -C 1 |
|
220 | # hg update -C 1 | |
221 |
# hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/ |
|
221 | # hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexisting/mergetool | |
222 | couldn't find merge tool true specified for f |
|
222 | couldn't find merge tool true specified for f | |
223 | merging f |
|
223 | merging f | |
224 | merging f failed! |
|
224 | merging f failed! |
@@ -76,4 +76,4 b" echo '% git=no: regular patch after qref" | |||||
76 | hg cp a c |
|
76 | hg cp a c | |
77 | hg qrefresh -d '0 0' |
|
77 | hg qrefresh -d '0 0' | |
78 | cat .hg/patches/regular |
|
78 | cat .hg/patches/regular | |
79 | cd .. No newline at end of file |
|
79 | cd .. |
@@ -166,4 +166,4 b' hg qnew -f p' | |||||
166 | hg mv a b |
|
166 | hg mv a b | |
167 | hg qrefresh |
|
167 | hg qrefresh | |
168 | hg qdiff --nodates |
|
168 | hg qdiff --nodates | |
169 | cd .. No newline at end of file |
|
169 | cd .. |
@@ -84,4 +84,4 b" hg log -d '> 02/01 ' --template '{date|d" | |||||
84 | hg log -d '< 02/01 ' --template '{date|date}\n' |
|
84 | hg log -d '< 02/01 ' --template '{date|date}\n' | |
85 |
|
85 | |||
86 | hg log -d ' > 02/01 ' --template '{date|date}\n' |
|
86 | hg log -d ' > 02/01 ' --template '{date|date}\n' | |
87 | hg log -d ' < 02/01 ' --template '{date|date}\n' No newline at end of file |
|
87 | hg log -d ' < 02/01 ' --template '{date|date}\n' |
@@ -89,7 +89,8 b' TAO = split_lines(""" The Way that c' | |||||
89 |
|
89 | |||
90 | """) |
|
90 | """) | |
91 |
|
91 | |||
92 | MERGED_RESULT = split_lines(""" The Way that can be told of is not the eternal Way; |
|
92 | MERGED_RESULT = split_lines("""\ | |
|
93 | The Way that can be told of is not the eternal Way; | |||
93 | The name that can be named is not the eternal name. |
|
94 | The name that can be named is not the eternal name. | |
94 | The Nameless is the origin of Heaven and Earth; |
|
95 | The Nameless is the origin of Heaven and Earth; | |
95 | The Named is the mother of all things. |
|
96 | The Named is the mother of all things. | |
@@ -125,7 +126,7 b' class TestMerge3(TestCase):' | |||||
125 | [(0, 2, |
|
126 | [(0, 2, | |
126 | 0, 2, |
|
127 | 0, 2, | |
127 | 0, 2), |
|
128 | 0, 2), | |
128 |
(2,2, 2,2, |
|
129 | (2, 2, 2, 2, 2, 2)]) | |
129 |
|
130 | |||
130 | self.assertEquals(list(m3.merge_regions()), |
|
131 | self.assertEquals(list(m3.merge_regions()), | |
131 | [('unchanged', 0, 2)]) |
|
132 | [('unchanged', 0, 2)]) | |
@@ -141,8 +142,8 b' class TestMerge3(TestCase):' | |||||
141 | # todo: should use a sentinal at end as from get_matching_blocks |
|
142 | # todo: should use a sentinal at end as from get_matching_blocks | |
142 | # to match without zz |
|
143 | # to match without zz | |
143 | self.assertEquals(list(m3.find_sync_regions()), |
|
144 | self.assertEquals(list(m3.find_sync_regions()), | |
144 | [(0,1, 2,3, 0,1), |
|
145 | [(0, 1, 2, 3, 0, 1), | |
145 |
(1,1, 3, |
|
146 | (1, 1, 3, 3, 1, 1),]) | |
146 |
|
147 | |||
147 | self.assertEquals(list(m3.merge_regions()), |
|
148 | self.assertEquals(list(m3.merge_regions()), | |
148 | [('a', 0, 2), |
|
149 | [('a', 0, 2), | |
@@ -159,7 +160,7 b' class TestMerge3(TestCase):' | |||||
159 | # todo: should use a sentinal at end as from get_matching_blocks |
|
160 | # todo: should use a sentinal at end as from get_matching_blocks | |
160 | # to match without zz |
|
161 | # to match without zz | |
161 | self.assertEquals(list(m3.find_sync_regions()), |
|
162 | self.assertEquals(list(m3.find_sync_regions()), | |
162 |
[(0,0, 2, |
|
163 | [(0, 0, 2, 2, 0, 0)]) | |
163 |
|
164 | |||
164 | self.assertEquals(list(m3.merge_regions()), |
|
165 | self.assertEquals(list(m3.merge_regions()), | |
165 | [('a', 0, 2)]) |
|
166 | [('a', 0, 2)]) | |
@@ -177,9 +178,9 b' class TestMerge3(TestCase):' | |||||
177 | [(0, 1), (1, 2)]) |
|
178 | [(0, 1), (1, 2)]) | |
178 |
|
179 | |||
179 | self.assertEquals(list(m3.find_sync_regions()), |
|
180 | self.assertEquals(list(m3.find_sync_regions()), | |
180 | [(0,1, 0,1, 0,1), |
|
181 | [(0, 1, 0, 1, 0, 1), | |
181 |
(1,2, |
|
182 | (1, 2, 2, 3, 1, 2), | |
182 |
(2,2, 3, |
|
183 | (2, 2, 3, 3, 2, 2),]) | |
183 |
|
184 | |||
184 | self.assertEquals(list(m3.merge_regions()), |
|
185 | self.assertEquals(list(m3.merge_regions()), | |
185 | [('unchanged', 0, 1), |
|
186 | [('unchanged', 0, 1), | |
@@ -253,14 +254,14 b' class TestMerge3(TestCase):' | |||||
253 | [(0, 1), (1, 2)]) |
|
254 | [(0, 1), (1, 2)]) | |
254 |
|
255 | |||
255 | self.assertEquals(list(m3.find_sync_regions()), |
|
256 | self.assertEquals(list(m3.find_sync_regions()), | |
256 | [(0,1, 0,1, 0,1), |
|
257 | [(0, 1, 0, 1, 0, 1), | |
257 |
(1,2, 2, |
|
258 | (1, 2, 2, 3, 2, 3), | |
258 |
(2,2, |
|
259 | (2, 2, 3, 3, 3, 3),]) | |
259 |
|
260 | |||
260 | self.assertEquals(list(m3.merge_regions()), |
|
261 | self.assertEquals(list(m3.merge_regions()), | |
261 | [('unchanged', 0,1), |
|
262 | [('unchanged', 0, 1), | |
262 |
('conflict', 1,1, 1, |
|
263 | ('conflict', 1, 1, 1, 2, 1, 2), | |
263 | ('unchanged', 1,2)]) |
|
264 | ('unchanged', 1, 2)]) | |
264 |
|
265 | |||
265 | self.assertEquals(list(m3.merge_groups()), |
|
266 | self.assertEquals(list(m3.merge_groups()), | |
266 | [('unchanged', ['aaa\n']), |
|
267 | [('unchanged', ['aaa\n']), | |
@@ -293,9 +294,9 b' bbb' | |||||
293 | [(0, 1), (2, 3)]) |
|
294 | [(0, 1), (2, 3)]) | |
294 |
|
295 | |||
295 | self.assertEquals(list(m3.find_sync_regions()), |
|
296 | self.assertEquals(list(m3.find_sync_regions()), | |
296 | [(0,1, 0,1, 0,1), |
|
297 | [(0, 1, 0, 1, 0, 1), | |
297 | (2,3, 2,3, 2,3), |
|
298 | (2, 3, 2, 3, 2, 3), | |
298 |
(3,3, 3,3, |
|
299 | (3, 3, 3, 3, 3, 3),]) | |
299 |
|
300 | |||
300 | def test_replace_multi(self): |
|
301 | def test_replace_multi(self): | |
301 | """Replacement with regions of different size.""" |
|
302 | """Replacement with regions of different size.""" | |
@@ -308,9 +309,9 b' bbb' | |||||
308 |
|
309 | |||
309 |
|
310 | |||
310 | self.assertEquals(list(m3.find_sync_regions()), |
|
311 | self.assertEquals(list(m3.find_sync_regions()), | |
311 | [(0,1, 0,1, 0,1), |
|
312 | [(0, 1, 0, 1, 0, 1), | |
312 |
(3, |
|
313 | (3, 4, 4, 5, 5, 6), | |
313 |
(4,4, 5, |
|
314 | (4, 4, 5, 5, 6, 6)]) | |
314 |
|
315 | |||
315 | def test_merge_poem(self): |
|
316 | def test_merge_poem(self): | |
316 | """Test case from diff3 manual""" |
|
317 | """Test case from diff3 manual""" |
@@ -25,14 +25,15 b' class ProxyHandler (BaseHTTPServer.BaseH' | |||||
25 | (ip, port) = self.client_address |
|
25 | (ip, port) = self.client_address | |
26 | if hasattr(self, 'allowed_clients') and ip not in self.allowed_clients: |
|
26 | if hasattr(self, 'allowed_clients') and ip not in self.allowed_clients: | |
27 | self.raw_requestline = self.rfile.readline() |
|
27 | self.raw_requestline = self.rfile.readline() | |
28 |
if self.parse_request(): |
|
28 | if self.parse_request(): | |
|
29 | self.send_error(403) | |||
29 | else: |
|
30 | else: | |
30 | self.__base_handle() |
|
31 | self.__base_handle() | |
31 |
|
32 | |||
32 | def _connect_to(self, netloc, soc): |
|
33 | def _connect_to(self, netloc, soc): | |
33 | i = netloc.find(':') |
|
34 | i = netloc.find(':') | |
34 | if i >= 0: |
|
35 | if i >= 0: | |
35 | host_port = netloc[:i], int(netloc[i+1:]) |
|
36 | host_port = netloc[:i], int(netloc[i + 1:]) | |
36 | else: |
|
37 | else: | |
37 | host_port = netloc, 80 |
|
38 | host_port = netloc, 80 | |
38 | print "\t" "connect to %s:%d" % host_port |
|
39 | print "\t" "connect to %s:%d" % host_port | |
@@ -91,7 +92,8 b' class ProxyHandler (BaseHTTPServer.BaseH' | |||||
91 | while 1: |
|
92 | while 1: | |
92 | count += 1 |
|
93 | count += 1 | |
93 | (ins, _, exs) = select.select(iw, ow, iw, 3) |
|
94 | (ins, _, exs) = select.select(iw, ow, iw, 3) | |
94 |
if exs: |
|
95 | if exs: | |
|
96 | break | |||
95 | if ins: |
|
97 | if ins: | |
96 | for i in ins: |
|
98 | for i in ins: | |
97 | if i is soc: |
|
99 | if i is soc: | |
@@ -104,12 +106,13 b' class ProxyHandler (BaseHTTPServer.BaseH' | |||||
104 | count = 0 |
|
106 | count = 0 | |
105 | else: |
|
107 | else: | |
106 | print "\t" "idle", count |
|
108 | print "\t" "idle", count | |
107 |
if count == max_idling: |
|
109 | if count == max_idling: | |
|
110 | break | |||
108 |
|
111 | |||
109 | do_HEAD = do_GET |
|
112 | do_HEAD = do_GET | |
110 | do_POST = do_GET |
|
113 | do_POST = do_GET | |
111 | do_PUT = do_GET |
|
114 | do_PUT = do_GET | |
112 | do_DELETE=do_GET |
|
115 | do_DELETE = do_GET | |
113 |
|
116 | |||
114 | class ThreadingHTTPServer (SocketServer.ThreadingMixIn, |
|
117 | class ThreadingHTTPServer (SocketServer.ThreadingMixIn, | |
115 | BaseHTTPServer.HTTPServer): pass |
|
118 | BaseHTTPServer.HTTPServer): pass |
General Comments 0
You need to be logged in to leave comments.
Login now