Show More
@@ -0,0 +1,45 b'' | |||
|
1 | #!/usr/bin/env python | |
|
2 | # | |
|
3 | # Copyright 2005 by Intevation GmbH <intevation@intevation.de> | |
|
4 | # Author(s): | |
|
5 | # Thomas Arendsen Hein <thomas@intevation.de> | |
|
6 | # | |
|
7 | # This software may be used and distributed according to the terms | |
|
8 | # of the GNU General Public License, incorporated herein by reference. | |
|
9 | ||
|
10 | """ | |
|
11 | hg-ssh - a wrapper for ssh access to a limited set of mercurial repos | |
|
12 | ||
|
13 | To be used in ~/.ssh/authorized_keys with the "command" option, see sshd(8): | |
|
14 | command="hg-ssh path/to/repo1 /path/to/repo2 ~/repo3 ~user/repo4" ssh-dss ... | |
|
15 | (probably together with these other useful options: | |
|
16 | no-port-forwarding,no-X11-forwarding,no-agent-forwarding) | |
|
17 | ||
|
18 | This allows pull/push over ssh to to the repositories given as arguments. | |
|
19 | ||
|
20 | If all your repositories are subdirectories of a common directory, you can | |
|
21 | allow shorter paths with: | |
|
22 | command="cd path/to/my/repositories && hg-ssh repo1 subdir/repo2" | |
|
23 | """ | |
|
24 | ||
|
25 | from mercurial import commands | |
|
26 | ||
|
27 | import sys, os | |
|
28 | ||
|
29 | cwd = os.getcwd() | |
|
30 | allowed_paths = [os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) | |
|
31 | for path in sys.argv[1:]] | |
|
32 | orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?') | |
|
33 | ||
|
34 | if orig_cmd.startswith('hg -R ') and orig_cmd.endswith(' serve --stdio'): | |
|
35 | path = orig_cmd[6:-14] | |
|
36 | repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) | |
|
37 | if repo in allowed_paths: | |
|
38 | commands.dispatch(['-R', repo, 'serve', '--stdio']) | |
|
39 | else: | |
|
40 | sys.stderr.write("Illegal repository %r\n" % repo) | |
|
41 | sys.exit(-1) | |
|
42 | else: | |
|
43 | sys.stderr.write("Illegal command %r\n" % orig_cmd) | |
|
44 | sys.exit(-1) | |
|
45 |
@@ -0,0 +1,15 b'' | |||
|
1 | #header# | |
|
2 | <title>Mercurial Error</title> | |
|
3 | </head> | |
|
4 | <body> | |
|
5 | ||
|
6 | <h2>Mercurial Error</h2> | |
|
7 | ||
|
8 | <p> | |
|
9 | An error occured while processing your request: | |
|
10 | </p> | |
|
11 | <p> | |
|
12 | #error|escape# | |
|
13 | </p> | |
|
14 | ||
|
15 | #footer# |
@@ -0,0 +1,18 b'' | |||
|
1 | #!/bin/sh | |
|
2 | # | |
|
3 | mkdir t | |
|
4 | cd t | |
|
5 | hg init | |
|
6 | echo 0 > a | |
|
7 | echo 0 > b | |
|
8 | hg ci -A -m m -d "0 0" | |
|
9 | hg rm a | |
|
10 | hg cat a | |
|
11 | sleep 1 # make sure mtime is changed | |
|
12 | echo 1 > b | |
|
13 | hg ci -m m -d "0 0" | |
|
14 | echo 2 > b | |
|
15 | hg cat -r 0 a | |
|
16 | hg cat -r 0 b | |
|
17 | hg cat -r 1 a | |
|
18 | hg cat -r 1 b |
@@ -0,0 +1,136 b'' | |||
|
1 | #!/bin/sh | |
|
2 | ||
|
3 | hg init | |
|
4 | mkdir d1 d1/d11 d2 | |
|
5 | echo d1/a > d1/a | |
|
6 | echo d1/ba > d1/ba | |
|
7 | echo d1/a1 > d1/d11/a1 | |
|
8 | echo d1/b > d1/b | |
|
9 | echo d2/b > d2/b | |
|
10 | hg add d1/a d1/b d1/ba d1/d11/a1 d2/b | |
|
11 | hg commit -m "1" -d "0 0" | |
|
12 | ||
|
13 | echo "# rename a single file" | |
|
14 | hg rename d1/d11/a1 d2/c | |
|
15 | hg status | |
|
16 | hg update -C | |
|
17 | ||
|
18 | echo "# rename --after a single file" | |
|
19 | mv d1/d11/a1 d2/c | |
|
20 | hg rename --after d1/d11/a1 d2/c | |
|
21 | hg status | |
|
22 | hg update -C | |
|
23 | ||
|
24 | echo "# move a single file to an existing directory" | |
|
25 | hg rename d1/d11/a1 d2 | |
|
26 | hg status | |
|
27 | hg update -C | |
|
28 | ||
|
29 | echo "# move --after a single file to an existing directory" | |
|
30 | mv d1/d11/a1 d2 | |
|
31 | hg rename --after d1/d11/a1 d2 | |
|
32 | hg status | |
|
33 | hg update -C | |
|
34 | ||
|
35 | echo "# rename a file using a relative path" | |
|
36 | (cd d1/d11; hg rename ../../d2/b e) | |
|
37 | hg status | |
|
38 | hg update -C | |
|
39 | ||
|
40 | echo "# rename --after a file using a relative path" | |
|
41 | (cd d1/d11; mv ../../d2/b e; hg rename --after ../../d2/b e) | |
|
42 | hg status | |
|
43 | hg update -C | |
|
44 | ||
|
45 | echo "# rename directory d1 as d3" | |
|
46 | hg rename d1/ d3 | |
|
47 | hg status | |
|
48 | hg update -C | |
|
49 | ||
|
50 | echo "# rename --after directory d1 as d3" | |
|
51 | mv d1 d3 | |
|
52 | hg rename --after d1 d3 | |
|
53 | hg status | |
|
54 | hg update -C | |
|
55 | ||
|
56 | echo "# move a directory using a relative path" | |
|
57 | (cd d2; mkdir d3; hg rename ../d1/d11 d3) | |
|
58 | hg status | |
|
59 | hg update -C | |
|
60 | ||
|
61 | echo "# move --after a directory using a relative path" | |
|
62 | (cd d2; mkdir d3; mv ../d1/d11 d3; hg rename --after ../d1/d11 d3) | |
|
63 | hg status | |
|
64 | hg update -C | |
|
65 | ||
|
66 | echo "# move directory d1/d11 to an existing directory d2 (removes empty d1)" | |
|
67 | hg rename d1/d11/ d2 | |
|
68 | hg status | |
|
69 | hg update -C | |
|
70 | ||
|
71 | echo "# move directories d1 and d2 to a new directory d3" | |
|
72 | mkdir d3 | |
|
73 | hg rename d1 d2 d3 | |
|
74 | hg status | |
|
75 | hg update -C | |
|
76 | ||
|
77 | echo "# move --after directories d1 and d2 to a new directory d3" | |
|
78 | mkdir d3 | |
|
79 | mv d1 d2 d3 | |
|
80 | hg rename --after d1 d2 d3 | |
|
81 | hg status | |
|
82 | hg update -C | |
|
83 | ||
|
84 | echo "# move everything under directory d1 to existing directory d2, do not" | |
|
85 | echo "# overwrite existing files (d2/b)" | |
|
86 | hg rename d1/* d2 | |
|
87 | hg status | |
|
88 | diff d1/b d2/b | |
|
89 | hg update -C | |
|
90 | ||
|
91 | echo "# attempt to move potentially more than one file into a non-existent" | |
|
92 | echo "# directory" | |
|
93 | hg rename 'glob:d1/**' dx | |
|
94 | ||
|
95 | echo "# move every file under d1 to d2/d21 (glob)" | |
|
96 | mkdir d2/d21 | |
|
97 | hg rename 'glob:d1/**' d2/d21 | |
|
98 | hg status | |
|
99 | hg update -C | |
|
100 | ||
|
101 | echo "# move --after some files under d1 to d2/d21 (glob)" | |
|
102 | mkdir d2/d21 | |
|
103 | mv d1/a d1/d11/a1 d2/d21 | |
|
104 | hg rename --after 'glob:d1/**' d2/d21 | |
|
105 | hg status | |
|
106 | hg update -C | |
|
107 | ||
|
108 | echo "# move every file under d1 starting with an 'a' to d2/d21 (regexp)" | |
|
109 | mkdir d2/d21 | |
|
110 | hg rename 're:d1/([^a][^/]*/)*a.*' d2/d21 | |
|
111 | hg status | |
|
112 | hg update -C | |
|
113 | ||
|
114 | echo "# attempt to overwrite an existing file" | |
|
115 | echo "ca" > d1/ca | |
|
116 | hg rename d1/ba d1/ca | |
|
117 | hg status | |
|
118 | hg update -C | |
|
119 | ||
|
120 | echo "# forced overwrite of an existing file" | |
|
121 | echo "ca" > d1/ca | |
|
122 | hg rename --force d1/ba d1/ca | |
|
123 | hg status | |
|
124 | hg update -C | |
|
125 | ||
|
126 | echo "# replace a symlink with a file" | |
|
127 | ln -s ba d1/ca | |
|
128 | hg rename --force d1/ba d1/ca | |
|
129 | hg status | |
|
130 | hg update -C | |
|
131 | ||
|
132 | echo "# do not copy more than one source file to the same destination file" | |
|
133 | mkdir d3 | |
|
134 | hg rename d1/* d2/* d3 | |
|
135 | hg status | |
|
136 | hg update -C |
@@ -0,0 +1,183 b'' | |||
|
1 | # rename a single file | |
|
2 | A d2/c | |
|
3 | R d1/d11/a1 | |
|
4 | # rename --after a single file | |
|
5 | A d2/c | |
|
6 | R d1/d11/a1 | |
|
7 | # move a single file to an existing directory | |
|
8 | A d2/a1 | |
|
9 | R d1/d11/a1 | |
|
10 | # move --after a single file to an existing directory | |
|
11 | A d2/a1 | |
|
12 | R d1/d11/a1 | |
|
13 | # rename a file using a relative path | |
|
14 | A d1/d11/e | |
|
15 | R d2/b | |
|
16 | # rename --after a file using a relative path | |
|
17 | A d1/d11/e | |
|
18 | R d2/b | |
|
19 | # rename directory d1 as d3 | |
|
20 | copying d1/a to d3/a | |
|
21 | copying d1/b to d3/b | |
|
22 | copying d1/ba to d3/ba | |
|
23 | copying d1/d11/a1 to d3/d11/a1 | |
|
24 | removing d1/a | |
|
25 | removing d1/b | |
|
26 | removing d1/ba | |
|
27 | removing d1/d11/a1 | |
|
28 | A d3/a | |
|
29 | A d3/b | |
|
30 | A d3/ba | |
|
31 | A d3/d11/a1 | |
|
32 | R d1/a | |
|
33 | R d1/b | |
|
34 | R d1/ba | |
|
35 | R d1/d11/a1 | |
|
36 | # rename --after directory d1 as d3 | |
|
37 | copying d1/a to d3/a | |
|
38 | copying d1/b to d3/b | |
|
39 | copying d1/ba to d3/ba | |
|
40 | copying d1/d11/a1 to d3/d11/a1 | |
|
41 | removing d1/a | |
|
42 | removing d1/b | |
|
43 | removing d1/ba | |
|
44 | removing d1/d11/a1 | |
|
45 | A d3/a | |
|
46 | A d3/b | |
|
47 | A d3/ba | |
|
48 | A d3/d11/a1 | |
|
49 | R d1/a | |
|
50 | R d1/b | |
|
51 | R d1/ba | |
|
52 | R d1/d11/a1 | |
|
53 | # move a directory using a relative path | |
|
54 | copying ../d1/d11/a1 to d3/d11/a1 | |
|
55 | removing ../d1/d11/a1 | |
|
56 | A d2/d3/d11/a1 | |
|
57 | R d1/d11/a1 | |
|
58 | # move --after a directory using a relative path | |
|
59 | copying ../d1/d11/a1 to d3/d11/a1 | |
|
60 | removing ../d1/d11/a1 | |
|
61 | A d2/d3/d11/a1 | |
|
62 | R d1/d11/a1 | |
|
63 | # move directory d1/d11 to an existing directory d2 (removes empty d1) | |
|
64 | copying d1/d11/a1 to d2/d11/a1 | |
|
65 | removing d1/d11/a1 | |
|
66 | A d2/d11/a1 | |
|
67 | R d1/d11/a1 | |
|
68 | # move directories d1 and d2 to a new directory d3 | |
|
69 | copying d1/a to d3/d1/a | |
|
70 | copying d1/b to d3/d1/b | |
|
71 | copying d1/ba to d3/d1/ba | |
|
72 | copying d1/d11/a1 to d3/d1/d11/a1 | |
|
73 | copying d2/b to d3/d2/b | |
|
74 | removing d1/a | |
|
75 | removing d1/b | |
|
76 | removing d1/ba | |
|
77 | removing d1/d11/a1 | |
|
78 | removing d2/b | |
|
79 | A d3/d1/a | |
|
80 | A d3/d1/b | |
|
81 | A d3/d1/ba | |
|
82 | A d3/d1/d11/a1 | |
|
83 | A d3/d2/b | |
|
84 | R d1/a | |
|
85 | R d1/b | |
|
86 | R d1/ba | |
|
87 | R d1/d11/a1 | |
|
88 | R d2/b | |
|
89 | # move --after directories d1 and d2 to a new directory d3 | |
|
90 | copying d1/a to d3/d1/a | |
|
91 | copying d1/b to d3/d1/b | |
|
92 | copying d1/ba to d3/d1/ba | |
|
93 | copying d1/d11/a1 to d3/d1/d11/a1 | |
|
94 | copying d2/b to d3/d2/b | |
|
95 | removing d1/a | |
|
96 | removing d1/b | |
|
97 | removing d1/ba | |
|
98 | removing d1/d11/a1 | |
|
99 | removing d2/b | |
|
100 | A d3/d1/a | |
|
101 | A d3/d1/b | |
|
102 | A d3/d1/ba | |
|
103 | A d3/d1/d11/a1 | |
|
104 | A d3/d2/b | |
|
105 | R d1/a | |
|
106 | R d1/b | |
|
107 | R d1/ba | |
|
108 | R d1/d11/a1 | |
|
109 | R d2/b | |
|
110 | # move everything under directory d1 to existing directory d2, do not | |
|
111 | # overwrite existing files (d2/b) | |
|
112 | d2/b: not overwriting - file exists | |
|
113 | copying d1/d11/a1 to d2/d11/a1 | |
|
114 | removing d1/d11/a1 | |
|
115 | A d2/a | |
|
116 | A d2/ba | |
|
117 | A d2/d11/a1 | |
|
118 | R d1/a | |
|
119 | R d1/ba | |
|
120 | R d1/d11/a1 | |
|
121 | 1c1 | |
|
122 | < d1/b | |
|
123 | --- | |
|
124 | > d2/b | |
|
125 | # attempt to move potentially more than one file into a non-existent | |
|
126 | # directory | |
|
127 | abort: with multiple sources, destination must be an existing directory | |
|
128 | # move every file under d1 to d2/d21 (glob) | |
|
129 | copying d1/a to d2/d21/a | |
|
130 | copying d1/b to d2/d21/b | |
|
131 | copying d1/ba to d2/d21/ba | |
|
132 | copying d1/d11/a1 to d2/d21/a1 | |
|
133 | removing d1/a | |
|
134 | removing d1/b | |
|
135 | removing d1/ba | |
|
136 | removing d1/d11/a1 | |
|
137 | A d2/d21/a | |
|
138 | A d2/d21/a1 | |
|
139 | A d2/d21/b | |
|
140 | A d2/d21/ba | |
|
141 | R d1/a | |
|
142 | R d1/b | |
|
143 | R d1/ba | |
|
144 | R d1/d11/a1 | |
|
145 | # move --after some files under d1 to d2/d21 (glob) | |
|
146 | copying d1/a to d2/d21/a | |
|
147 | copying d1/d11/a1 to d2/d21/a1 | |
|
148 | removing d1/a | |
|
149 | removing d1/d11/a1 | |
|
150 | A d2/d21/a | |
|
151 | A d2/d21/a1 | |
|
152 | R d1/a | |
|
153 | R d1/d11/a1 | |
|
154 | # move every file under d1 starting with an 'a' to d2/d21 (regexp) | |
|
155 | copying d1/a to d2/d21/a | |
|
156 | copying d1/d11/a1 to d2/d21/a1 | |
|
157 | removing d1/a | |
|
158 | removing d1/d11/a1 | |
|
159 | A d2/d21/a | |
|
160 | A d2/d21/a1 | |
|
161 | R d1/a | |
|
162 | R d1/d11/a1 | |
|
163 | # attempt to overwrite an existing file | |
|
164 | d1/ca: not overwriting - file exists | |
|
165 | ? d1/ca | |
|
166 | # forced overwrite of an existing file | |
|
167 | A d1/ca | |
|
168 | R d1/ba | |
|
169 | # replace a symlink with a file | |
|
170 | A d1/ca | |
|
171 | R d1/ba | |
|
172 | # do not copy more than one source file to the same destination file | |
|
173 | copying d1/d11/a1 to d3/d11/a1 | |
|
174 | d3/b: not overwriting - d2/b collides with d1/b | |
|
175 | removing d1/d11/a1 | |
|
176 | A d3/a | |
|
177 | A d3/b | |
|
178 | A d3/ba | |
|
179 | A d3/d11/a1 | |
|
180 | R d1/a | |
|
181 | R d1/b | |
|
182 | R d1/ba | |
|
183 | R d1/d11/a1 |
@@ -2,18 +2,25 b' shopt -s extglob' | |||
|
2 | 2 | |
|
3 | 3 | _hg_commands() |
|
4 | 4 | { |
|
5 | local commands="$(hg -v help | sed -e '1,/^list of commands:/d' \ | |
|
6 | -e '/^global options:/,$d' \ | |
|
7 | -e '/^ [^ ]/!d; s/[,:]//g;')" | |
|
5 | local all commands result | |
|
6 | ||
|
7 | all=($(hg --debug help | sed -e '1,/^list of commands:/d' \ | |
|
8 | -e '/^global options:/,$d' \ | |
|
9 | -e '/^ [^ ]/!d; s/^ //; s/[,:]//g;')) | |
|
10 | ||
|
11 | commands="${all[*]##debug*}" | |
|
12 | result=$(compgen -W "${commands[*]}" -- "$cur") | |
|
8 | 13 | |
|
9 | 14 | # hide debug commands from users, but complete them if |
|
10 | # specifically asked for | |
|
11 |
if [ |
|
|
12 | commands="$commands debugcheckstate debugstate debugindex" | |
|
13 | commands="$commands debugindexdot debugwalk debugdata" | |
|
14 | commands="$commands debugancestor debugconfig debugrename" | |
|
15 | # there is no other possible command | |
|
16 | if [ "$result" = "" ]; then | |
|
17 | local debug | |
|
18 | debug=(${all[*]##!(debug*)}) | |
|
19 | debug="${debug[*]/g/debug}" | |
|
20 | result=$(compgen -W "$debug" -- "$cur") | |
|
15 | 21 | fi |
|
16 | COMPREPLY=( ${COMPREPLY[@]:-} $(compgen -W "$commands" -- "$cur") ) | |
|
22 | ||
|
23 | COMPREPLY=(${COMPREPLY[@]:-} $result) | |
|
17 | 24 | } |
|
18 | 25 | |
|
19 | 26 | _hg_paths() |
@@ -161,7 +168,7 b' shopt -s extglob' | |||
|
161 | 168 | fi |
|
162 | 169 | ;; |
|
163 | 170 | *) |
|
164 |
|
|
|
171 | COMPREPLY=(${COMPREPLY[@]:-} $( compgen -f -- "$cur" )) | |
|
165 | 172 | ;; |
|
166 | 173 | esac |
|
167 | 174 |
@@ -26,7 +26,7 b' def check_clean(ui, repo):' | |||
|
26 | 26 | ui.warn("Repository is not clean, please commit or revert\n") |
|
27 | 27 | sys.exit(1) |
|
28 | 28 | |
|
29 | class bisect: | |
|
29 | class bisect(object): | |
|
30 | 30 | """dichotomic search in the DAG of changesets""" |
|
31 | 31 | def __init__(self, ui, repo): |
|
32 | 32 | self.repo = repo |
@@ -116,7 +116,7 b' case $service in' | |||
|
116 | 116 | '*:file:_files' |
|
117 | 117 | ;; |
|
118 | 118 | |
|
119 | (status) | |
|
119 | (status|st) | |
|
120 | 120 | _arguments $includeExclude \ |
|
121 | 121 | '(--no-status)-n[hide status prefix]' \ |
|
122 | 122 | '(-n)--no-status[hide status prefix]' \ |
@@ -87,7 +87,7 b' addremove [options] [files ...]::' | |||
|
87 | 87 | New files are ignored if they match any of the patterns in .hgignore. As |
|
88 | 88 | with add, these changes take effect at the next commit. |
|
89 | 89 | |
|
90 | annotate [-r <rev> -u -n -c] [files ...]:: | |
|
90 | annotate [-r <rev> -u -n -c -d] [files ...]:: | |
|
91 | 91 | List changes in files, showing the revision id responsible for each line |
|
92 | 92 | |
|
93 | 93 | This command is useful to discover who did a change or when a change took |
@@ -103,6 +103,7 b' annotate [-r <rev> -u -n -c] [files ...]' | |||
|
103 | 103 | -X, --exclude <pat> exclude names matching the given patterns |
|
104 | 104 | -r, --revision <rev> annotate the specified revision |
|
105 | 105 | -u, --user list the author |
|
106 | -d, --date list the commit date | |
|
106 | 107 | -c, --changeset list the changeset |
|
107 | 108 | -n, --number list the revision number (default) |
|
108 | 109 |
@@ -147,7 +147,7 b' static int equatelines(struct line *a, i' | |||
|
147 | 147 | break; |
|
148 | 148 | |
|
149 | 149 | a[i].e = j; /* use equivalence class for quick compare */ |
|
150 | if(h[j].len <= t) | |
|
150 | if (h[j].len <= t) | |
|
151 | 151 | a[i].n = h[j].pos; /* point to head of match list */ |
|
152 | 152 | else |
|
153 | 153 | a[i].n = -1; /* too popular */ |
@@ -270,7 +270,7 b' static PyObject *blocks(PyObject *self, ' | |||
|
270 | 270 | if (!l.head || !rl) |
|
271 | 271 | goto nomem; |
|
272 | 272 | |
|
273 | for(h = l.base; h != l.head; h++) { | |
|
273 | for (h = l.base; h != l.head; h++) { | |
|
274 | 274 | m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); |
|
275 | 275 | PyList_SetItem(rl, pos, m); |
|
276 | 276 | pos++; |
@@ -305,7 +305,7 b' static PyObject *bdiff(PyObject *self, P' | |||
|
305 | 305 | goto nomem; |
|
306 | 306 | |
|
307 | 307 | /* calculate length of output */ |
|
308 | for(h = l.base; h != l.head; h++) { | |
|
308 | for (h = l.base; h != l.head; h++) { | |
|
309 | 309 | if (h->a1 != la || h->b1 != lb) |
|
310 | 310 | len += 12 + bl[h->b1].l - bl[lb].l; |
|
311 | 311 | la = h->a2; |
@@ -320,7 +320,7 b' static PyObject *bdiff(PyObject *self, P' | |||
|
320 | 320 | rb = PyString_AsString(result); |
|
321 | 321 | la = lb = 0; |
|
322 | 322 | |
|
323 | for(h = l.base; h != l.head; h++) { | |
|
323 | for (h = l.base; h != l.head; h++) { | |
|
324 | 324 | if (h->a1 != la || h->b1 != lb) { |
|
325 | 325 | len = bl[h->b1].l - bl[lb].l; |
|
326 | 326 | *(uint32_t *)(encode) = htonl(al[la].l - al->l); |
@@ -353,3 +353,4 b' PyMODINIT_FUNC initbdiff(void)' | |||
|
353 | 353 | { |
|
354 | 354 | Py_InitModule3("bdiff", methods, mdiff_doc); |
|
355 | 355 | } |
|
356 |
@@ -15,6 +15,8 b' demandload(globals(), "errno socket vers' | |||
|
15 | 15 | |
|
16 | 16 | class UnknownCommand(Exception): |
|
17 | 17 | """Exception raised if command is not in the command table.""" |
|
18 | class AmbiguousCommand(Exception): | |
|
19 | """Exception raised if command shortcut matches more than one command.""" | |
|
18 | 20 | |
|
19 | 21 | def filterfiles(filters, files): |
|
20 | 22 | l = [x for x in files if x in filters] |
@@ -31,25 +33,29 b' def relpath(repo, args):' | |||
|
31 | 33 | return [util.normpath(os.path.join(cwd, x)) for x in args] |
|
32 | 34 | return args |
|
33 | 35 | |
|
34 |
def matchpats(repo |
|
|
36 | def matchpats(repo, pats=[], opts={}, head=''): | |
|
37 | cwd = repo.getcwd() | |
|
38 | if not pats and cwd: | |
|
39 | opts['include'] = [os.path.join(cwd, i) for i in opts['include']] | |
|
40 | opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']] | |
|
41 | cwd = '' | |
|
35 | 42 | return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'), |
|
36 | opts.get('exclude'), head) | |
|
43 | opts.get('exclude'), head) + (cwd,) | |
|
37 | 44 | |
|
38 | def makewalk(repo, pats, opts, head=''): | |
|
39 | cwd = repo.getcwd() | |
|
40 | files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head) | |
|
45 | def makewalk(repo, pats, opts, node=None, head=''): | |
|
46 | files, matchfn, anypats, cwd = matchpats(repo, pats, opts, head) | |
|
41 | 47 | exact = dict(zip(files, files)) |
|
42 | 48 | def walk(): |
|
43 | for src, fn in repo.walk(files=files, match=matchfn): | |
|
49 | for src, fn in repo.walk(node=node, files=files, match=matchfn): | |
|
44 | 50 | yield src, fn, util.pathto(cwd, fn), fn in exact |
|
45 | 51 | return files, matchfn, walk() |
|
46 | 52 | |
|
47 | def walk(repo, pats, opts, head=''): | |
|
48 | files, matchfn, results = makewalk(repo, pats, opts, head) | |
|
53 | def walk(repo, pats, opts, node=None, head=''): | |
|
54 | files, matchfn, results = makewalk(repo, pats, opts, node, head) | |
|
49 | 55 | for r in results: |
|
50 | 56 | yield r |
|
51 | 57 | |
|
52 |
def walkchangerevs(ui, repo, |
|
|
58 | def walkchangerevs(ui, repo, pats, opts): | |
|
53 | 59 | '''Iterate over files and the revs they changed in. |
|
54 | 60 | |
|
55 | 61 | Callers most commonly need to iterate backwards over the history |
@@ -79,12 +85,7 b' def walkchangerevs(ui, repo, cwd, pats, ' | |||
|
79 | 85 | if repo.changelog.count() == 0: |
|
80 | 86 | return [], False |
|
81 | 87 | |
|
82 | cwd = repo.getcwd() | |
|
83 | if not pats and cwd: | |
|
84 | opts['include'] = [os.path.join(cwd, i) for i in opts['include']] | |
|
85 | opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']] | |
|
86 | files, matchfn, anypats = matchpats(repo, (pats and cwd) or '', | |
|
87 | pats, opts) | |
|
88 | files, matchfn, anypats, cwd = matchpats(repo, pats, opts) | |
|
88 | 89 | revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0'])) |
|
89 | 90 | wanted = {} |
|
90 | 91 | slowpath = anypats |
@@ -387,7 +388,7 b' def help_(ui, cmd=None, with_version=Fal' | |||
|
387 | 388 | if with_version: |
|
388 | 389 | show_version(ui) |
|
389 | 390 | ui.write('\n') |
|
390 |
|
|
|
391 | aliases, i = find(cmd) | |
|
391 | 392 | # synopsis |
|
392 | 393 | ui.write("%s\n\n" % i[2]) |
|
393 | 394 | |
@@ -399,9 +400,8 b' def help_(ui, cmd=None, with_version=Fal' | |||
|
399 | 400 | |
|
400 | 401 | if not ui.quiet: |
|
401 | 402 | # aliases |
|
402 | aliases = ', '.join(key.split('|')[1:]) | |
|
403 | if aliases: | |
|
404 | ui.write(_("\naliases: %s\n") % aliases) | |
|
403 | if len(aliases) > 1: | |
|
404 | ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) | |
|
405 | 405 | |
|
406 | 406 | # options |
|
407 | 407 | if i[1]: |
@@ -482,8 +482,7 b' def add(ui, repo, *pats, **opts):' | |||
|
482 | 482 | |
|
483 | 483 | The files will be added to the repository at the next commit. |
|
484 | 484 | |
|
485 |
If no names are given, add all files in the |
|
|
486 | its subdirectories. | |
|
485 | If no names are given, add all files in the repository. | |
|
487 | 486 | """ |
|
488 | 487 | |
|
489 | 488 | names = [] |
@@ -537,11 +536,20 b' def annotate(ui, repo, *pats, **opts):' | |||
|
537 | 536 | cl = repo.changelog.read(repo.changelog.node(rev)) |
|
538 | 537 | return trimuser(ui, cl[1], rev, ucache) |
|
539 | 538 | |
|
539 | dcache = {} | |
|
540 | def getdate(rev): | |
|
541 | datestr = dcache.get(rev) | |
|
542 | if datestr is None: | |
|
543 | cl = repo.changelog.read(repo.changelog.node(rev)) | |
|
544 | datestr = dcache[rev] = util.datestr(cl[2]) | |
|
545 | return datestr | |
|
546 | ||
|
540 | 547 | if not pats: |
|
541 | 548 | raise util.Abort(_('at least one file name or pattern required')) |
|
542 | 549 | |
|
543 |
opmap = [['user', getname], ['number', str], ['changeset', getnode] |
|
|
544 | if not opts['user'] and not opts['changeset']: | |
|
550 | opmap = [['user', getname], ['number', str], ['changeset', getnode], | |
|
551 | ['date', getdate]] | |
|
552 | if not opts['user'] and not opts['changeset'] and not opts['date']: | |
|
545 | 553 | opts['number'] = 1 |
|
546 | 554 | |
|
547 | 555 | if opts['rev']: |
@@ -624,21 +632,16 b' def cat(ui, repo, file1, *pats, **opts):' | |||
|
624 | 632 | %p root-relative path name of file being printed |
|
625 | 633 | """ |
|
626 | 634 | mf = {} |
|
627 |
|
|
|
628 | change = repo.changelog.read(repo.lookup(opts['rev'])) | |
|
629 | mf = repo.manifest.read(change[0]) | |
|
630 | for src, abs, rel, exact in walk(repo, (file1,) + pats, opts): | |
|
635 | rev = opts['rev'] | |
|
636 | if rev: | |
|
637 | node = repo.lookup(rev) | |
|
638 | else: | |
|
639 | node = repo.changelog.tip() | |
|
640 | change = repo.changelog.read(node) | |
|
641 | mf = repo.manifest.read(change[0]) | |
|
642 | for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node): | |
|
631 | 643 | r = repo.file(abs) |
|
632 | if opts['rev']: | |
|
633 | try: | |
|
634 | n = mf[abs] | |
|
635 | except (hg.RepoError, KeyError): | |
|
636 | try: | |
|
637 | n = r.lookup(rev) | |
|
638 | except KeyError, inst: | |
|
639 | raise util.Abort(_('cannot find file %s in rev %s'), rel, rev) | |
|
640 | else: | |
|
641 | n = r.tip() | |
|
644 | n = mf[abs] | |
|
642 | 645 | fp = make_file(repo, r, opts['output'], node=n, pathname=abs) |
|
643 | 646 | fp.write(r.read(n)) |
|
644 | 647 | |
@@ -667,7 +670,7 b' def clone(ui, source, dest=None, **opts)' | |||
|
667 | 670 | |
|
668 | 671 | dest = os.path.realpath(dest) |
|
669 | 672 | |
|
670 | class Dircleanup: | |
|
673 | class Dircleanup(object): | |
|
671 | 674 | def __init__(self, dir_): |
|
672 | 675 | self.rmtree = shutil.rmtree |
|
673 | 676 | self.dir_ = dir_ |
@@ -735,6 +738,7 b' def clone(ui, source, dest=None, **opts)' | |||
|
735 | 738 | f = repo.opener("hgrc", "w", text=True) |
|
736 | 739 | f.write("[paths]\n") |
|
737 | 740 | f.write("default = %s\n" % abspath) |
|
741 | f.close() | |
|
738 | 742 | |
|
739 | 743 | if not opts['noupdate']: |
|
740 | 744 | update(ui, repo) |
@@ -747,7 +751,7 b' def commit(ui, repo, *pats, **opts):' | |||
|
747 | 751 | Commit changes to the given files into the repository. |
|
748 | 752 | |
|
749 | 753 | If a list of files is omitted, all changes reported by "hg status" |
|
750 | from the root of the repository will be commited. | |
|
754 | will be commited. | |
|
751 | 755 | |
|
752 | 756 | The HGEDITOR or EDITOR environment variables are used to start an |
|
753 | 757 | editor to add a commit comment. |
@@ -770,12 +774,7 b' def commit(ui, repo, *pats, **opts):' | |||
|
770 | 774 | |
|
771 | 775 | if opts['addremove']: |
|
772 | 776 | addremove(ui, repo, *pats, **opts) |
|
773 | cwd = repo.getcwd() | |
|
774 | if not pats and cwd: | |
|
775 | opts['include'] = [os.path.join(cwd, i) for i in opts['include']] | |
|
776 | opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']] | |
|
777 | fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '', | |
|
778 | pats, opts) | |
|
777 | fns, match, anypats, cwd = matchpats(repo, pats, opts) | |
|
779 | 778 | if pats: |
|
780 | 779 | c, a, d, u = repo.changes(files=fns, match=match) |
|
781 | 780 | files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r'] |
@@ -787,14 +786,10 b' def commit(ui, repo, *pats, **opts):' | |||
|
787 | 786 | raise util.Abort(str(inst)) |
|
788 | 787 | |
|
789 | 788 | def docopy(ui, repo, pats, opts): |
|
790 | if not pats: | |
|
791 | raise util.Abort(_('no source or destination specified')) | |
|
792 | elif len(pats) == 1: | |
|
793 | raise util.Abort(_('no destination specified')) | |
|
794 | pats = list(pats) | |
|
795 | dest = pats.pop() | |
|
796 | sources = [] | |
|
797 | dir2dir = len(pats) == 1 and os.path.isdir(pats[0]) | |
|
789 | cwd = repo.getcwd() | |
|
790 | errors = 0 | |
|
791 | copied = [] | |
|
792 | targets = {} | |
|
798 | 793 | |
|
799 | 794 | def okaytocopy(abs, rel, exact): |
|
800 | 795 | reasons = {'?': _('is not managed'), |
@@ -805,74 +800,133 b' def docopy(ui, repo, pats, opts):' | |||
|
805 | 800 | else: |
|
806 | 801 | return True |
|
807 | 802 | |
|
808 | for src, abs, rel, exact in walk(repo, pats, opts): | |
|
809 | if okaytocopy(abs, rel, exact): | |
|
810 | sources.append((abs, rel, exact)) | |
|
811 | if not sources: | |
|
812 | raise util.Abort(_('no files to copy')) | |
|
813 | ||
|
814 | cwd = repo.getcwd() | |
|
815 | absdest = util.canonpath(repo.root, cwd, dest) | |
|
816 | reldest = util.pathto(cwd, absdest) | |
|
817 | if os.path.exists(reldest): | |
|
818 | destisfile = not os.path.isdir(reldest) | |
|
819 | else: | |
|
820 | destisfile = not dir2dir and (len(sources) == 1 | |
|
821 | or repo.dirstate.state(absdest) != '?') | |
|
822 | ||
|
823 | if destisfile and len(sources) > 1: | |
|
824 | raise util.Abort(_('with multiple sources, destination must be a ' | |
|
825 | 'directory')) | |
|
826 | ||
|
827 | srcpfxlen = 0 | |
|
828 | if dir2dir: | |
|
829 | srcpfx = util.pathto(cwd, util.canonpath(repo.root, cwd, pats[0])) | |
|
830 | if os.path.exists(reldest): | |
|
831 | srcpfx = os.path.split(srcpfx)[0] | |
|
832 | if srcpfx: | |
|
833 | srcpfx += os.sep | |
|
834 | srcpfxlen = len(srcpfx) | |
|
835 | ||
|
836 | errs, copied = 0, [] | |
|
837 | for abs, rel, exact in sources: | |
|
838 | if destisfile: | |
|
839 | mydest = reldest | |
|
840 | elif dir2dir: | |
|
841 | mydest = os.path.join(dest, rel[srcpfxlen:]) | |
|
803 | def copy(abssrc, relsrc, target, exact): | |
|
804 | abstarget = util.canonpath(repo.root, cwd, target) | |
|
805 | reltarget = util.pathto(cwd, abstarget) | |
|
806 | prevsrc = targets.get(abstarget) | |
|
807 | if prevsrc is not None: | |
|
808 | ui.warn(_('%s: not overwriting - %s collides with %s\n') % | |
|
809 | (reltarget, abssrc, prevsrc)) | |
|
810 | return | |
|
811 | if (not opts['after'] and os.path.exists(reltarget) or | |
|
812 | opts['after'] and repo.dirstate.state(abstarget) not in '?r'): | |
|
813 | if not opts['force']: | |
|
814 | ui.warn(_('%s: not overwriting - file exists\n') % | |
|
815 | reltarget) | |
|
816 | return | |
|
817 | if not opts['after']: | |
|
818 | os.unlink(reltarget) | |
|
819 | if opts['after']: | |
|
820 | if not os.path.exists(reltarget): | |
|
821 | return | |
|
842 | 822 | else: |
|
843 | mydest = os.path.join(dest, os.path.basename(rel)) | |
|
844 | myabsdest = util.canonpath(repo.root, cwd, mydest) | |
|
845 | myreldest = util.pathto(cwd, myabsdest) | |
|
846 | if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?': | |
|
847 | ui.warn(_('%s: not overwriting - file already managed\n') % myreldest) | |
|
848 | continue | |
|
849 | mydestdir = os.path.dirname(myreldest) or '.' | |
|
850 | if not opts['after']: | |
|
823 | targetdir = os.path.dirname(reltarget) or '.' | |
|
824 | if not os.path.isdir(targetdir): | |
|
825 | os.makedirs(targetdir) | |
|
851 | 826 | try: |
|
852 | if dir2dir: os.makedirs(mydestdir) | |
|
853 | elif not destisfile: os.mkdir(mydestdir) | |
|
854 | except OSError, inst: | |
|
855 | if inst.errno != errno.EEXIST: raise | |
|
856 | if ui.verbose or not exact: | |
|
857 | ui.status(_('copying %s to %s\n') % (rel, myreldest)) | |
|
858 | if not opts['after']: | |
|
859 | try: | |
|
860 | shutil.copyfile(rel, myreldest) | |
|
861 | shutil.copymode(rel, myreldest) | |
|
827 | shutil.copyfile(relsrc, reltarget) | |
|
828 | shutil.copymode(relsrc, reltarget) | |
|
862 | 829 | except shutil.Error, inst: |
|
863 | 830 | raise util.Abort(str(inst)) |
|
864 | 831 | except IOError, inst: |
|
865 | 832 | if inst.errno == errno.ENOENT: |
|
866 | ui.warn(_('%s: deleted in working copy\n') % rel) | |
|
833 | ui.warn(_('%s: deleted in working copy\n') % relsrc) | |
|
867 | 834 | else: |
|
868 |
ui.warn(_('%s: cannot copy - %s\n') % |
|
|
869 | errs += 1 | |
|
870 |
|
|
|
871 | repo.copy(abs, myabsdest) | |
|
872 | copied.append((abs, rel, exact)) | |
|
873 | if errs: | |
|
835 | ui.warn(_('%s: cannot copy - %s\n') % | |
|
836 | (relsrc, inst.strerror)) | |
|
837 | errors += 1 | |
|
838 | return | |
|
839 | if ui.verbose or not exact: | |
|
840 | ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) | |
|
841 | targets[abstarget] = abssrc | |
|
842 | repo.copy(abssrc, abstarget) | |
|
843 | copied.append((abssrc, relsrc, exact)) | |
|
844 | ||
|
845 | def targetpathfn(pat, dest, srcs): | |
|
846 | if os.path.isdir(pat): | |
|
847 | if pat.endswith(os.sep): | |
|
848 | pat = pat[:-len(os.sep)] | |
|
849 | if destdirexists: | |
|
850 | striplen = len(os.path.split(pat)[0]) | |
|
851 | else: | |
|
852 | striplen = len(pat) | |
|
853 | if striplen: | |
|
854 | striplen += len(os.sep) | |
|
855 | res = lambda p: os.path.join(dest, p[striplen:]) | |
|
856 | elif destdirexists: | |
|
857 | res = lambda p: os.path.join(dest, os.path.basename(p)) | |
|
858 | else: | |
|
859 | res = lambda p: dest | |
|
860 | return res | |
|
861 | ||
|
862 | def targetpathafterfn(pat, dest, srcs): | |
|
863 | if util.patkind(pat, None)[0]: | |
|
864 | # a mercurial pattern | |
|
865 | res = lambda p: os.path.join(dest, os.path.basename(p)) | |
|
866 | elif len(util.canonpath(repo.root, cwd, pat)) < len(srcs[0][0]): | |
|
867 | # A directory. Either the target path contains the last | |
|
868 | # component of the source path or it does not. | |
|
869 | def evalpath(striplen): | |
|
870 | score = 0 | |
|
871 | for s in srcs: | |
|
872 | t = os.path.join(dest, s[1][striplen:]) | |
|
873 | if os.path.exists(t): | |
|
874 | score += 1 | |
|
875 | return score | |
|
876 | ||
|
877 | if pat.endswith(os.sep): | |
|
878 | pat = pat[:-len(os.sep)] | |
|
879 | striplen = len(pat) + len(os.sep) | |
|
880 | if os.path.isdir(os.path.join(dest, os.path.split(pat)[1])): | |
|
881 | score = evalpath(striplen) | |
|
882 | striplen1 = len(os.path.split(pat)[0]) | |
|
883 | if striplen1: | |
|
884 | striplen1 += len(os.sep) | |
|
885 | if evalpath(striplen1) > score: | |
|
886 | striplen = striplen1 | |
|
887 | res = lambda p: os.path.join(dest, p[striplen:]) | |
|
888 | else: | |
|
889 | # a file | |
|
890 | if destdirexists: | |
|
891 | res = lambda p: os.path.join(dest, os.path.basename(p)) | |
|
892 | else: | |
|
893 | res = lambda p: dest | |
|
894 | return res | |
|
895 | ||
|
896 | ||
|
897 | pats = list(pats) | |
|
898 | if not pats: | |
|
899 | raise util.Abort(_('no source or destination specified')) | |
|
900 | if len(pats) == 1: | |
|
901 | raise util.Abort(_('no destination specified')) | |
|
902 | dest = pats.pop() | |
|
903 | destdirexists = os.path.isdir(dest) | |
|
904 | if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists: | |
|
905 | raise util.Abort(_('with multiple sources, destination must be an ' | |
|
906 | 'existing directory')) | |
|
907 | if opts['after']: | |
|
908 | tfn = targetpathafterfn | |
|
909 | else: | |
|
910 | tfn = targetpathfn | |
|
911 | copylist = [] | |
|
912 | for pat in pats: | |
|
913 | srcs = [] | |
|
914 | for tag, abssrc, relsrc, exact in walk(repo, [pat], opts): | |
|
915 | if okaytocopy(abssrc, relsrc, exact): | |
|
916 | srcs.append((abssrc, relsrc, exact)) | |
|
917 | if not srcs: | |
|
918 | continue | |
|
919 | copylist.append((tfn(pat, dest, srcs), srcs)) | |
|
920 | if not copylist: | |
|
921 | raise util.Abort(_('no files to copy')) | |
|
922 | ||
|
923 | for targetpath, srcs in copylist: | |
|
924 | for abssrc, relsrc, exact in srcs: | |
|
925 | copy(abssrc, relsrc, targetpath(relsrc), exact) | |
|
926 | ||
|
927 | if errors: | |
|
874 | 928 | ui.warn(_('(consider using --after)\n')) |
|
875 | return errs, copied | |
|
929 | return errors, copied | |
|
876 | 930 | |
|
877 | 931 | def copy(ui, repo, *pats, **opts): |
|
878 | 932 | """mark files as copied for the next commit |
@@ -1007,7 +1061,7 b' def debugrename(ui, repo, file, rev=None' | |||
|
1007 | 1061 | change = repo.changelog.read(n) |
|
1008 | 1062 | m = repo.manifest.read(change[0]) |
|
1009 | 1063 | n = m[relpath(repo, [file])[0]] |
|
1010 | except hg.RepoError, KeyError: | |
|
1064 | except (hg.RepoError, KeyError): | |
|
1011 | 1065 | n = r.lookup(rev) |
|
1012 | 1066 | else: |
|
1013 | 1067 | n = r.tip() |
@@ -1030,7 +1084,7 b' def debugwalk(ui, repo, *pats, **opts):' | |||
|
1030 | 1084 | ui.write("%s\n" % line.rstrip()) |
|
1031 | 1085 | |
|
1032 | 1086 | def diff(ui, repo, *pats, **opts): |
|
1033 |
"""diff |
|
|
1087 | """diff repository (or selected files) | |
|
1034 | 1088 | |
|
1035 | 1089 | Show differences between revisions for the specified files. |
|
1036 | 1090 | |
@@ -1056,7 +1110,7 b' def diff(ui, repo, *pats, **opts):' | |||
|
1056 | 1110 | if len(revs) > 2: |
|
1057 | 1111 | raise util.Abort(_("too many revisions to diff")) |
|
1058 | 1112 | |
|
1059 |
fns, matchfn, anypats = matchpats(repo, |
|
|
1113 | fns, matchfn, anypats, cwd = matchpats(repo, pats, opts) | |
|
1060 | 1114 | |
|
1061 | 1115 | dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn, |
|
1062 | 1116 | text=opts['text']) |
@@ -1177,7 +1231,7 b' def grep(ui, repo, pattern, *pats, **opt' | |||
|
1177 | 1231 | yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] |
|
1178 | 1232 | begin = lend + 1 |
|
1179 | 1233 | |
|
1180 | class linestate: | |
|
1234 | class linestate(object): | |
|
1181 | 1235 | def __init__(self, line, linenum, colstart, colend): |
|
1182 | 1236 | self.line = line |
|
1183 | 1237 | self.linenum = linenum |
@@ -1227,7 +1281,7 b' def grep(ui, repo, pattern, *pats, **opt' | |||
|
1227 | 1281 | |
|
1228 | 1282 | fstate = {} |
|
1229 | 1283 | skip = {} |
|
1230 |
changeiter, getchange = walkchangerevs(ui, repo, |
|
|
1284 | changeiter, getchange = walkchangerevs(ui, repo, pats, opts) | |
|
1231 | 1285 | count = 0 |
|
1232 | 1286 | incrementing = False |
|
1233 | 1287 | for st, rev, fns in changeiter: |
@@ -1275,11 +1329,14 b' def heads(ui, repo, **opts):' | |||
|
1275 | 1329 | changesets. They are where development generally takes place and |
|
1276 | 1330 | are the usual targets for update and merge operations. |
|
1277 | 1331 | """ |
|
1278 | heads = repo.changelog.heads() | |
|
1332 | if opts['rev']: | |
|
1333 | heads = repo.heads(repo.lookup(opts['rev'])) | |
|
1334 | else: | |
|
1335 | heads = repo.heads() | |
|
1279 | 1336 | br = None |
|
1280 | 1337 | if opts['branches']: |
|
1281 | 1338 | br = repo.branchlookup(heads) |
|
1282 |
for n in |
|
|
1339 | for n in heads: | |
|
1283 | 1340 | show_changeset(ui, repo, changenode=n, brinfo=br) |
|
1284 | 1341 | |
|
1285 | 1342 | def identify(ui, repo): |
@@ -1461,11 +1518,11 b' def log(ui, repo, *pats, **opts):' | |||
|
1461 | 1518 | Print the revision history of the specified files or the entire project. |
|
1462 | 1519 | |
|
1463 | 1520 | By default this command outputs: changeset id and hash, tags, |
|
1464 |
parents, user, date and time, and a summary for each |
|
|
1465 | -v switch adds some more detail, such as changed files, manifest | |
|
1466 | hashes or message signatures. | |
|
1521 | non-trivial parents, user, date and time, and a summary for each | |
|
1522 | commit. When the -v/--verbose switch is used, the list of changed | |
|
1523 | files and full commit message is shown. | |
|
1467 | 1524 | """ |
|
1468 | class dui: | |
|
1525 | class dui(object): | |
|
1469 | 1526 | # Implement and delegate some ui protocol. Save hunks of |
|
1470 | 1527 | # output for later display in the desired order. |
|
1471 | 1528 | def __init__(self, ui): |
@@ -1487,12 +1544,7 b' def log(ui, repo, *pats, **opts):' | |||
|
1487 | 1544 | self.write(*args) |
|
1488 | 1545 | def __getattr__(self, key): |
|
1489 | 1546 | return getattr(self.ui, key) |
|
1490 | cwd = repo.getcwd() | |
|
1491 | if not pats and cwd: | |
|
1492 | opts['include'] = [os.path.join(cwd, i) for i in opts['include']] | |
|
1493 | opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']] | |
|
1494 | changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '', | |
|
1495 | pats, opts) | |
|
1547 | changeiter, getchange = walkchangerevs(ui, repo, pats, opts) | |
|
1496 | 1548 | for st, rev, fns in changeiter: |
|
1497 | 1549 | if st == 'window': |
|
1498 | 1550 | du = dui(ui) |
@@ -1733,7 +1785,9 b' def recover(ui, repo):' | |||
|
1733 | 1785 | This command tries to fix the repository status after an interrupted |
|
1734 | 1786 | operation. It should only be necessary when Mercurial suggests it. |
|
1735 | 1787 | """ |
|
1736 | repo.recover() | |
|
1788 | if repo.recover(): | |
|
1789 | return repo.verify() | |
|
1790 | return False | |
|
1737 | 1791 | |
|
1738 | 1792 | def remove(ui, repo, pat, *pats, **opts): |
|
1739 | 1793 | """remove the specified files on the next commit |
@@ -1799,13 +1853,12 b' def revert(ui, repo, *pats, **opts):' | |||
|
1799 | 1853 | |
|
1800 | 1854 | If names are given, all files matching the names are reverted. |
|
1801 | 1855 | |
|
1802 |
If no |
|
|
1803 | its subdirectories are reverted. | |
|
1856 | If no arguments are given, all files in the repository are reverted. | |
|
1804 | 1857 | """ |
|
1805 | 1858 | node = opts['rev'] and repo.lookup(opts['rev']) or \ |
|
1806 | 1859 | repo.dirstate.parents()[0] |
|
1807 | 1860 | |
|
1808 |
files, choose, anypats = matchpats(repo, |
|
|
1861 | files, choose, anypats, cwd = matchpats(repo, pats, opts) | |
|
1809 | 1862 | (c, a, d, u) = repo.changes(match=choose) |
|
1810 | 1863 | repo.forget(a) |
|
1811 | 1864 | repo.undelete(d) |
@@ -1928,9 +1981,8 b' def serve(ui, repo, **opts):' | |||
|
1928 | 1981 | def status(ui, repo, *pats, **opts): |
|
1929 | 1982 | """show changed files in the working directory |
|
1930 | 1983 | |
|
1931 |
Show changed files in the |
|
|
1932 | given, all files are shown. Otherwise, only files matching the | |
|
1933 | given names are shown. | |
|
1984 | Show changed files in the repository. If names are | |
|
1985 | given, only files that match are shown. | |
|
1934 | 1986 | |
|
1935 | 1987 | The codes used to show the status of files are: |
|
1936 | 1988 | M = modified |
@@ -1939,8 +1991,7 b' def status(ui, repo, *pats, **opts):' | |||
|
1939 | 1991 | ? = not tracked |
|
1940 | 1992 | """ |
|
1941 | 1993 | |
|
1942 | cwd = repo.getcwd() | |
|
1943 | files, matchfn, anypats = matchpats(repo, cwd, pats, opts) | |
|
1994 | files, matchfn, anypats, cwd = matchpats(repo, pats, opts) | |
|
1944 | 1995 | (c, a, d, u) = [[util.pathto(cwd, x) for x in n] |
|
1945 | 1996 | for n in repo.changes(files=files, match=matchfn)] |
|
1946 | 1997 | |
@@ -1986,8 +2037,10 b' def tag(ui, repo, name, rev=None, **opts' | |||
|
1986 | 2037 | else: |
|
1987 | 2038 | r = hex(repo.changelog.tip()) |
|
1988 | 2039 | |
|
1989 | if name.find(revrangesep) >= 0: | |
|
1990 | raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep) | |
|
2040 | disallowed = (revrangesep, '\r', '\n') | |
|
2041 | for c in disallowed: | |
|
2042 | if name.find(c) >= 0: | |
|
2043 | raise util.Abort(_("%s cannot be used in a tag name") % repr(c)) | |
|
1991 | 2044 | |
|
1992 | 2045 | if opts['local']: |
|
1993 | 2046 | repo.opener("localtags", "a").write("%s %s\n" % (r, name)) |
@@ -2138,6 +2191,7 b' table = {' | |||
|
2138 | 2191 | [('r', 'rev', '', _('annotate the specified revision')), |
|
2139 | 2192 | ('a', 'text', None, _('treat all files as text')), |
|
2140 | 2193 | ('u', 'user', None, _('list the author')), |
|
2194 | ('d', 'date', None, _('list the date')), | |
|
2141 | 2195 | ('n', 'number', None, _('list the revision number (default)')), |
|
2142 | 2196 | ('c', 'changeset', None, _('list the changeset')), |
|
2143 | 2197 | ('I', 'include', [], _('include names matching the given patterns')), |
@@ -2223,8 +2277,9 b' table = {' | |||
|
2223 | 2277 | "hg grep [OPTION]... PATTERN [FILE]..."), |
|
2224 | 2278 | "heads": |
|
2225 | 2279 | (heads, |
|
2226 |
[('b', 'branches', None, _('find branch info')) |
|
|
2227 | _('hg heads [-b]')), | |
|
2280 | [('b', 'branches', None, _('find branch info')), | |
|
2281 | ('r', 'rev', "", _('show only heads which are descendants of rev'))], | |
|
2282 | _('hg heads [-b] [-r <rev>]')), | |
|
2228 | 2283 | "help": (help_, [], _('hg help [COMMAND]')), |
|
2229 | 2284 | "identify|id": (identify, [], _('hg identify')), |
|
2230 | 2285 | "import|patch": |
@@ -2374,17 +2429,21 b' norepo = ("clone init version help debug' | |||
|
2374 | 2429 | " debugindex debugindexdot paths") |
|
2375 | 2430 | |
|
2376 | 2431 | def find(cmd): |
|
2377 | choice = [] | |
|
2432 | """Return (aliases, command table entry) for command string.""" | |
|
2433 | choice = None | |
|
2378 | 2434 | for e in table.keys(): |
|
2379 | 2435 | aliases = e.lstrip("^").split("|") |
|
2380 | 2436 | if cmd in aliases: |
|
2381 | return e, table[e] | |
|
2437 | return aliases, table[e] | |
|
2382 | 2438 | for a in aliases: |
|
2383 | 2439 | if a.startswith(cmd): |
|
2384 |
choice |
|
|
2385 | if len(choice) == 1: | |
|
2386 | e = choice[0] | |
|
2387 | return e, table[e] | |
|
2440 | if choice: | |
|
2441 | raise AmbiguousCommand(cmd) | |
|
2442 | else: | |
|
2443 | choice = aliases, table[e] | |
|
2444 | break | |
|
2445 | if choice: | |
|
2446 | return choice | |
|
2388 | 2447 | |
|
2389 | 2448 | raise UnknownCommand(cmd) |
|
2390 | 2449 | |
@@ -2411,18 +2470,11 b' def parse(ui, args):' | |||
|
2411 | 2470 | |
|
2412 | 2471 | if args: |
|
2413 | 2472 | cmd, args = args[0], args[1:] |
|
2473 | aliases, i = find(cmd) | |
|
2474 | cmd = aliases[0] | |
|
2414 | 2475 | defaults = ui.config("defaults", cmd) |
|
2415 | 2476 | if defaults: |
|
2416 | # reparse with command defaults added | |
|
2417 | args = [cmd] + defaults.split() + args | |
|
2418 | try: | |
|
2419 | args = fancyopts.fancyopts(args, globalopts, options) | |
|
2420 | except fancyopts.getopt.GetoptError, inst: | |
|
2421 | raise ParseError(None, inst) | |
|
2422 | ||
|
2423 | cmd, args = args[0], args[1:] | |
|
2424 | ||
|
2425 | i = find(cmd)[1] | |
|
2477 | args = defaults.split() + args | |
|
2426 | 2478 | c = list(i[1]) |
|
2427 | 2479 | else: |
|
2428 | 2480 | cmd = None |
@@ -2460,7 +2512,7 b' def dispatch(args):' | |||
|
2460 | 2512 | |
|
2461 | 2513 | external = [] |
|
2462 | 2514 | for x in u.extensions(): |
|
2463 |
def on_exception( |
|
|
2515 | def on_exception(exc, inst): | |
|
2464 | 2516 | u.warn(_("*** failed to import extension %s\n") % x[1]) |
|
2465 | 2517 | u.warn("%s\n" % inst) |
|
2466 | 2518 | if "--traceback" in sys.argv[1:]: |
@@ -2502,6 +2554,9 b' def dispatch(args):' | |||
|
2502 | 2554 | u.warn(_("hg: %s\n") % inst.args[1]) |
|
2503 | 2555 | help_(u, 'shortlist') |
|
2504 | 2556 | sys.exit(-1) |
|
2557 | except AmbiguousCommand, inst: | |
|
2558 | u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0]) | |
|
2559 | sys.exit(1) | |
|
2505 | 2560 | except UnknownCommand, inst: |
|
2506 | 2561 | u.warn(_("hg: unknown command '%s'\n") % inst.args[0]) |
|
2507 | 2562 | help_(u, 'shortlist') |
@@ -2620,6 +2675,9 b' def dispatch(args):' | |||
|
2620 | 2675 | u.debug(inst, "\n") |
|
2621 | 2676 | u.warn(_("%s: invalid arguments\n") % cmd) |
|
2622 | 2677 | help_(u, cmd) |
|
2678 | except AmbiguousCommand, inst: | |
|
2679 | u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0]) | |
|
2680 | help_(u, 'shortlist') | |
|
2623 | 2681 | except UnknownCommand, inst: |
|
2624 | 2682 | u.warn(_("hg: unknown command '%s'\n") % inst.args[0]) |
|
2625 | 2683 | help_(u, 'shortlist') |
@@ -2629,6 +2687,8 b' def dispatch(args):' | |||
|
2629 | 2687 | except: |
|
2630 | 2688 | u.warn(_("** unknown exception encountered, details follow\n")) |
|
2631 | 2689 | u.warn(_("** report bug details to mercurial@selenic.com\n")) |
|
2690 | u.warn(_("** Mercurial Distributed SCM (version %s)\n") | |
|
2691 | % version.get_version()) | |
|
2632 | 2692 | raise |
|
2633 | 2693 | |
|
2634 | 2694 | sys.exit(-1) |
@@ -13,7 +13,7 b' from i18n import gettext as _' | |||
|
13 | 13 | from demandload import * |
|
14 | 14 | demandload(globals(), "time bisect stat util re errno") |
|
15 | 15 | |
|
16 | class dirstate: | |
|
16 | class dirstate(object): | |
|
17 | 17 | def __init__(self, opener, ui, root): |
|
18 | 18 | self.opener = opener |
|
19 | 19 | self.root = root |
@@ -101,16 +101,15 b' class dirstate:' | |||
|
101 | 101 | try: |
|
102 | 102 | return self.map[key] |
|
103 | 103 | except TypeError: |
|
104 | self.read() | |
|
104 | self.lazyread() | |
|
105 | 105 | return self[key] |
|
106 | 106 | |
|
107 | 107 | def __contains__(self, key): |
|
108 |
|
|
|
108 | self.lazyread() | |
|
109 | 109 | return key in self.map |
|
110 | 110 | |
|
111 | 111 | def parents(self): |
|
112 |
|
|
|
113 | self.read() | |
|
112 | self.lazyread() | |
|
114 | 113 | return self.pl |
|
115 | 114 | |
|
116 | 115 | def markdirty(self): |
@@ -118,8 +117,7 b' class dirstate:' | |||
|
118 | 117 | self.dirty = 1 |
|
119 | 118 | |
|
120 | 119 | def setparents(self, p1, p2=nullid): |
|
121 |
|
|
|
122 | self.read() | |
|
120 | self.lazyread() | |
|
123 | 121 | self.markdirty() |
|
124 | 122 | self.pl = p1, p2 |
|
125 | 123 | |
@@ -129,9 +127,11 b' class dirstate:' | |||
|
129 | 127 | except KeyError: |
|
130 | 128 | return "?" |
|
131 | 129 | |
|
130 | def lazyread(self): | |
|
131 | if self.map is None: | |
|
132 | self.read() | |
|
133 | ||
|
132 | 134 | def read(self): |
|
133 | if self.map is not None: return self.map | |
|
134 | ||
|
135 | 135 | self.map = {} |
|
136 | 136 | self.pl = [nullid, nullid] |
|
137 | 137 | try: |
@@ -154,7 +154,7 b' class dirstate:' | |||
|
154 | 154 | pos += l |
|
155 | 155 | |
|
156 | 156 | def copy(self, source, dest): |
|
157 | self.read() | |
|
157 | self.lazyread() | |
|
158 | 158 | self.markdirty() |
|
159 | 159 | self.copies[dest] = source |
|
160 | 160 | |
@@ -169,13 +169,13 b' class dirstate:' | |||
|
169 | 169 | a marked for addition''' |
|
170 | 170 | |
|
171 | 171 | if not files: return |
|
172 | self.read() | |
|
172 | self.lazyread() | |
|
173 | 173 | self.markdirty() |
|
174 | 174 | for f in files: |
|
175 | 175 | if state == "r": |
|
176 | 176 | self.map[f] = ('r', 0, 0, 0) |
|
177 | 177 | else: |
|
178 |
s = os.lstat( |
|
|
178 | s = os.lstat(self.wjoin(f)) | |
|
179 | 179 | st_size = kw.get('st_size', s.st_size) |
|
180 | 180 | st_mtime = kw.get('st_mtime', s.st_mtime) |
|
181 | 181 | self.map[f] = (state, s.st_mode, st_size, st_mtime) |
@@ -184,7 +184,7 b' class dirstate:' | |||
|
184 | 184 | |
|
185 | 185 | def forget(self, files): |
|
186 | 186 | if not files: return |
|
187 | self.read() | |
|
187 | self.lazyread() | |
|
188 | 188 | self.markdirty() |
|
189 | 189 | for f in files: |
|
190 | 190 | try: |
@@ -198,7 +198,7 b' class dirstate:' | |||
|
198 | 198 | self.markdirty() |
|
199 | 199 | |
|
200 | 200 | def write(self): |
|
201 | st = self.opener("dirstate", "w") | |
|
201 | st = self.opener("dirstate", "w", atomic=True) | |
|
202 | 202 | st.write("".join(self.pl)) |
|
203 | 203 | for f, e in self.map.items(): |
|
204 | 204 | c = self.copied(f) |
@@ -213,7 +213,7 b' class dirstate:' | |||
|
213 | 213 | unknown = [] |
|
214 | 214 | |
|
215 | 215 | for x in files: |
|
216 |
if x |
|
|
216 | if x == '.': | |
|
217 | 217 | return self.map.copy() |
|
218 | 218 | if x not in self.map: |
|
219 | 219 | unknown.append(x) |
@@ -241,7 +241,7 b' class dirstate:' | |||
|
241 | 241 | bs += 1 |
|
242 | 242 | return ret |
|
243 | 243 | |
|
244 |
def supported_type(self, f, st, verbose= |
|
|
244 | def supported_type(self, f, st, verbose=False): | |
|
245 | 245 | if stat.S_ISREG(st.st_mode): |
|
246 | 246 | return True |
|
247 | 247 | if verbose: |
@@ -258,7 +258,7 b' class dirstate:' | |||
|
258 | 258 | return False |
|
259 | 259 | |
|
260 | 260 | def statwalk(self, files=None, match=util.always, dc=None): |
|
261 | self.read() | |
|
261 | self.lazyread() | |
|
262 | 262 | |
|
263 | 263 | # walk all files by default |
|
264 | 264 | if not files: |
@@ -296,7 +296,6 b' class dirstate:' | |||
|
296 | 296 | def walkhelper(self, files, statmatch, dc): |
|
297 | 297 | # recursion free walker, faster than os.walk. |
|
298 | 298 | def findfiles(s): |
|
299 | retfiles = [] | |
|
300 | 299 | work = [s] |
|
301 | 300 | while work: |
|
302 | 301 | top = work.pop() |
@@ -306,7 +305,7 b' class dirstate:' | |||
|
306 | 305 | nd = util.normpath(top[len(self.root) + 1:]) |
|
307 | 306 | if nd == '.': nd = '' |
|
308 | 307 | for f in names: |
|
309 | np = os.path.join(nd, f) | |
|
308 | np = util.pconvert(os.path.join(nd, f)) | |
|
310 | 309 | if seen(np): |
|
311 | 310 | continue |
|
312 | 311 | p = os.path.join(top, f) |
@@ -317,12 +316,12 b' class dirstate:' | |||
|
317 | 316 | if statmatch(ds, st): |
|
318 | 317 | work.append(p) |
|
319 | 318 | if statmatch(np, st) and np in dc: |
|
320 |
yield 'm', |
|
|
319 | yield 'm', np, st | |
|
321 | 320 | elif statmatch(np, st): |
|
322 | 321 | if self.supported_type(np, st): |
|
323 |
yield 'f', |
|
|
322 | yield 'f', np, st | |
|
324 | 323 | elif np in dc: |
|
325 |
yield 'm', |
|
|
324 | yield 'm', np, st | |
|
326 | 325 | |
|
327 | 326 | known = {'.hg': 1} |
|
328 | 327 | def seen(fn): |
@@ -332,13 +331,20 b' class dirstate:' | |||
|
332 | 331 | # step one, find all files that match our criteria |
|
333 | 332 | files.sort() |
|
334 | 333 | for ff in util.unique(files): |
|
335 |
f = |
|
|
334 | f = self.wjoin(ff) | |
|
336 | 335 | try: |
|
337 | 336 | st = os.lstat(f) |
|
338 | 337 | except OSError, inst: |
|
339 | if ff not in dc: self.ui.warn('%s: %s\n' % ( | |
|
340 | util.pathto(self.getcwd(), ff), | |
|
341 |
|
|
|
338 | nf = util.normpath(ff) | |
|
339 | found = False | |
|
340 | for fn in dc: | |
|
341 | if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'): | |
|
342 | found = True | |
|
343 | break | |
|
344 | if not found: | |
|
345 | self.ui.warn('%s: %s\n' % ( | |
|
346 | util.pathto(self.getcwd(), ff), | |
|
347 | inst.strerror)) | |
|
342 | 348 | continue |
|
343 | 349 | if stat.S_ISDIR(st.st_mode): |
|
344 | 350 | cmp1 = (lambda x, y: cmp(x[1], y[1])) |
@@ -352,7 +358,7 b' class dirstate:' | |||
|
352 | 358 | continue |
|
353 | 359 | self.blockignore = True |
|
354 | 360 | if statmatch(ff, st): |
|
355 | if self.supported_type(ff, st): | |
|
361 | if self.supported_type(ff, st, verbose=True): | |
|
356 | 362 | yield 'f', ff, st |
|
357 | 363 | elif ff in dc: |
|
358 | 364 | yield 'm', ff, st |
@@ -380,7 +386,7 b' class dirstate:' | |||
|
380 | 386 | nonexistent = True |
|
381 | 387 | if not st: |
|
382 | 388 | try: |
|
383 |
f = |
|
|
389 | f = self.wjoin(fn) | |
|
384 | 390 | st = os.lstat(f) |
|
385 | 391 | except OSError, inst: |
|
386 | 392 | if inst.errno != errno.ENOENT: |
@@ -1,10 +1,10 b'' | |||
|
1 | 1 | import getopt |
|
2 | 2 | |
|
3 | 3 | def fancyopts(args, options, state): |
|
4 | long=[] | |
|
5 | short='' | |
|
6 | map={} | |
|
7 | dt={} | |
|
4 | long = [] | |
|
5 | short = '' | |
|
6 | map = {} | |
|
7 | dt = {} | |
|
8 | 8 | |
|
9 | 9 | for s, l, d, c in options: |
|
10 | 10 | pl = l.replace('-', '_') |
@@ -54,11 +54,11 b' class filelog(revlog):' | |||
|
54 | 54 | mt = "" |
|
55 | 55 | if meta: |
|
56 | 56 | mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ] |
|
57 |
text = "\1\n" |
|
|
57 | text = "\1\n%s\1\n%s" % ("".join(mt), text) | |
|
58 | 58 | return self.addrevision(text, transaction, link, p1, p2) |
|
59 | 59 | |
|
60 | 60 | def renamed(self, node): |
|
61 | if 0 and self.parents(node)[0] != nullid: | |
|
61 | if 0 and self.parents(node)[0] != nullid: # XXX | |
|
62 | 62 | return False |
|
63 | 63 | m = self.readmeta(node) |
|
64 | 64 | if m and m.has_key("copy"): |
@@ -71,7 +71,7 b' def get_mtime(repo_path):' | |||
|
71 | 71 | else: |
|
72 | 72 | return os.stat(hg_path).st_mtime |
|
73 | 73 | |
|
74 | class hgrequest: | |
|
74 | class hgrequest(object): | |
|
75 | 75 | def __init__(self, inp=None, out=None, env=None): |
|
76 | 76 | self.inp = inp or sys.stdin |
|
77 | 77 | self.out = out or sys.stdout |
@@ -104,7 +104,7 b' class hgrequest:' | |||
|
104 | 104 | headers.append(('Content-length', str(size))) |
|
105 | 105 | self.header(headers) |
|
106 | 106 | |
|
107 | class templater: | |
|
107 | class templater(object): | |
|
108 | 108 | def __init__(self, mapfile, filters={}, defaults={}): |
|
109 | 109 | self.cache = {} |
|
110 | 110 | self.map = {} |
@@ -165,7 +165,6 b' class templater:' | |||
|
165 | 165 | common_filters = { |
|
166 | 166 | "escape": cgi.escape, |
|
167 | 167 | "strip": lambda x: x.strip(), |
|
168 | "rstrip": lambda x: x.rstrip(), | |
|
169 | 168 | "age": age, |
|
170 | 169 | "date": lambda x: util.datestr(x), |
|
171 | 170 | "addbreaks": nl2br, |
@@ -176,7 +175,7 b' common_filters = {' | |||
|
176 | 175 | "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"), |
|
177 | 176 | } |
|
178 | 177 | |
|
179 | class hgweb: | |
|
178 | class hgweb(object): | |
|
180 | 179 | def __init__(self, repo, name=None): |
|
181 | 180 | if type(repo) == type(""): |
|
182 | 181 | self.repo = hg.repository(ui.ui(), repo) |
@@ -952,14 +951,8 b' def create_server(repo):' | |||
|
952 | 951 | else: |
|
953 | 952 | return BaseHTTPServer.HTTPServer((address, port), hgwebhandler) |
|
954 | 953 | |
|
955 | def server(path, name, templates, address, port, use_ipv6=False, | |
|
956 | accesslog=sys.stdout, errorlog=sys.stderr): | |
|
957 | httpd = create_server(path, name, templates, address, port, use_ipv6, | |
|
958 | accesslog, errorlog) | |
|
959 | httpd.serve_forever() | |
|
960 | ||
|
961 | 954 | # This is a stopgap |
|
962 | class hgwebdir: | |
|
955 | class hgwebdir(object): | |
|
963 | 956 | def __init__(self, config): |
|
964 | 957 | def cleannames(items): |
|
965 | 958 | return [(name.strip('/'), path) for name, path in items] |
@@ -1000,7 +993,10 b' class hgwebdir:' | |||
|
1000 | 993 | .replace("//", "/")) |
|
1001 | 994 | |
|
1002 | 995 | # update time with local timezone |
|
1003 | d = (get_mtime(path), util.makedate()[1]) | |
|
996 | try: | |
|
997 | d = (get_mtime(path), util.makedate()[1]) | |
|
998 | except OSError: | |
|
999 | continue | |
|
1004 | 1000 | |
|
1005 | 1001 | yield dict(contact=(get("ui", "username") or # preferred |
|
1006 | 1002 | get("web", "contact") or # deprecated |
@@ -1017,7 +1013,12 b' class hgwebdir:' | |||
|
1017 | 1013 | if virtual: |
|
1018 | 1014 | real = dict(self.repos).get(virtual) |
|
1019 | 1015 | if real: |
|
1020 |
|
|
|
1016 | try: | |
|
1017 | hgweb(real).run(req) | |
|
1018 | except IOError, inst: | |
|
1019 | req.write(tmpl("error", error=inst.strerror)) | |
|
1020 | except hg.RepoError, inst: | |
|
1021 | req.write(tmpl("error", error=str(inst))) | |
|
1021 | 1022 | else: |
|
1022 | 1023 | req.write(tmpl("notfound", repo=virtual)) |
|
1023 | 1024 | else: |
@@ -7,7 +7,7 b'' | |||
|
7 | 7 | |
|
8 | 8 | import byterange, urllib2 |
|
9 | 9 | |
|
10 | class httprangereader: | |
|
10 | class httprangereader(object): | |
|
11 | 11 | def __init__(self, url): |
|
12 | 12 | self.url = url |
|
13 | 13 | self.pos = 0 |
@@ -12,7 +12,7 b' from i18n import gettext as _' | |||
|
12 | 12 | from demandload import * |
|
13 | 13 | demandload(globals(), "re lock transaction tempfile stat mdiff errno") |
|
14 | 14 | |
|
15 | class localrepository: | |
|
15 | class localrepository(object): | |
|
16 | 16 | def __init__(self, ui, path=None, create=0): |
|
17 | 17 | if not path: |
|
18 | 18 | p = os.getcwd() |
@@ -43,7 +43,7 b' class localrepository:' | |||
|
43 | 43 | |
|
44 | 44 | self.dirstate = dirstate.dirstate(self.opener, ui, self.root) |
|
45 | 45 | try: |
|
46 |
self.ui.readconfig( |
|
|
46 | self.ui.readconfig(self.join("hgrc")) | |
|
47 | 47 | except IOError: pass |
|
48 | 48 | |
|
49 | 49 | def hook(self, name, **args): |
@@ -225,18 +225,20 b' class localrepository:' | |||
|
225 | 225 | lock = self.lock() |
|
226 | 226 | if os.path.exists(self.join("journal")): |
|
227 | 227 | self.ui.status(_("rolling back interrupted transaction\n")) |
|
228 |
|
|
|
228 | transaction.rollback(self.opener, self.join("journal")) | |
|
229 | return True | |
|
229 | 230 | else: |
|
230 | 231 | self.ui.warn(_("no interrupted transaction available\n")) |
|
232 | return False | |
|
231 | 233 | |
|
232 | 234 | def undo(self): |
|
235 | wlock = self.wlock() | |
|
233 | 236 | lock = self.lock() |
|
234 | 237 | if os.path.exists(self.join("undo")): |
|
235 | 238 | self.ui.status(_("rolling back last transaction\n")) |
|
236 | 239 | transaction.rollback(self.opener, self.join("undo")) |
|
237 | self.dirstate = None | |
|
238 | 240 | util.rename(self.join("undo.dirstate"), self.join("dirstate")) |
|
239 | self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root) | |
|
241 | self.dirstate.read() | |
|
240 | 242 | else: |
|
241 | 243 | self.ui.warn(_("no undo information available\n")) |
|
242 | 244 | |
@@ -249,6 +251,17 b' class localrepository:' | |||
|
249 | 251 | return lock.lock(self.join("lock"), wait) |
|
250 | 252 | raise inst |
|
251 | 253 | |
|
254 | def wlock(self, wait=1): | |
|
255 | try: | |
|
256 | wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write) | |
|
257 | except lock.LockHeld, inst: | |
|
258 | if not wait: | |
|
259 | raise inst | |
|
260 | self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0]) | |
|
261 | wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write) | |
|
262 | self.dirstate.read() | |
|
263 | return wlock | |
|
264 | ||
|
252 | 265 | def rawcommit(self, files, text, user, date, p1=None, p2=None): |
|
253 | 266 | orig_parent = self.dirstate.parents()[0] or nullid |
|
254 | 267 | p1 = p1 or self.dirstate.parents()[0] or nullid |
@@ -265,6 +278,8 b' class localrepository:' | |||
|
265 | 278 | else: |
|
266 | 279 | update_dirstate = 0 |
|
267 | 280 | |
|
281 | wlock = self.wlock() | |
|
282 | lock = self.lock() | |
|
268 | 283 | tr = self.transaction() |
|
269 | 284 | mm = m1.copy() |
|
270 | 285 | mfm = mf1.copy() |
@@ -353,6 +368,7 b' class localrepository:' | |||
|
353 | 368 | if not self.hook("precommit"): |
|
354 | 369 | return None |
|
355 | 370 | |
|
371 | wlock = self.wlock() | |
|
356 | 372 | lock = self.lock() |
|
357 | 373 | tr = self.transaction() |
|
358 | 374 | |
@@ -446,8 +462,14 b' class localrepository:' | |||
|
446 | 462 | |
|
447 | 463 | def walk(self, node=None, files=[], match=util.always): |
|
448 | 464 | if node: |
|
465 | fdict = dict.fromkeys(files) | |
|
449 | 466 | for fn in self.manifest.read(self.changelog.read(node)[0]): |
|
450 |
|
|
|
467 | fdict.pop(fn, None) | |
|
468 | if match(fn): | |
|
469 | yield 'm', fn | |
|
470 | for fn in fdict: | |
|
471 | self.ui.warn(_('%s: No such file in rev %s\n') % ( | |
|
472 | util.pathto(self.getcwd(), fn), short(node))) | |
|
451 | 473 | else: |
|
452 | 474 | for src, fn in self.dirstate.walk(files, match): |
|
453 | 475 | yield src, fn |
@@ -470,6 +492,10 b' class localrepository:' | |||
|
470 | 492 | |
|
471 | 493 | # are we comparing the working directory? |
|
472 | 494 | if not node2: |
|
495 | try: | |
|
496 | wlock = self.wlock(wait=0) | |
|
497 | except lock.LockHeld: | |
|
498 | wlock = None | |
|
473 | 499 | l, c, a, d, u = self.dirstate.changes(files, match) |
|
474 | 500 | |
|
475 | 501 | # are we comparing working dir against its parent? |
@@ -481,6 +507,8 b' class localrepository:' | |||
|
481 | 507 | for f in l: |
|
482 | 508 | if fcmp(f, mf2): |
|
483 | 509 | c.append(f) |
|
510 | elif wlock is not None: | |
|
511 | self.dirstate.update([f], "n") | |
|
484 | 512 | |
|
485 | 513 | for l in c, a, d, u: |
|
486 | 514 | l.sort() |
@@ -524,6 +552,7 b' class localrepository:' | |||
|
524 | 552 | return (c, a, d, u) |
|
525 | 553 | |
|
526 | 554 | def add(self, list): |
|
555 | wlock = self.wlock() | |
|
527 | 556 | for f in list: |
|
528 | 557 | p = self.wjoin(f) |
|
529 | 558 | if not os.path.exists(p): |
@@ -536,6 +565,7 b' class localrepository:' | |||
|
536 | 565 | self.dirstate.update([f], "a") |
|
537 | 566 | |
|
538 | 567 | def forget(self, list): |
|
568 | wlock = self.wlock() | |
|
539 | 569 | for f in list: |
|
540 | 570 | if self.dirstate.state(f) not in 'ai': |
|
541 | 571 | self.ui.warn(_("%s not added!\n") % f) |
@@ -549,6 +579,7 b' class localrepository:' | |||
|
549 | 579 | util.unlink(self.wjoin(f)) |
|
550 | 580 | except OSError, inst: |
|
551 | 581 | if inst.errno != errno.ENOENT: raise |
|
582 | wlock = self.wlock() | |
|
552 | 583 | for f in list: |
|
553 | 584 | p = self.wjoin(f) |
|
554 | 585 | if os.path.exists(p): |
@@ -566,6 +597,7 b' class localrepository:' | |||
|
566 | 597 | mn = self.changelog.read(p)[0] |
|
567 | 598 | mf = self.manifest.readflags(mn) |
|
568 | 599 | m = self.manifest.read(mn) |
|
600 | wlock = self.wlock() | |
|
569 | 601 | for f in list: |
|
570 | 602 | if self.dirstate.state(f) not in "r": |
|
571 | 603 | self.ui.warn("%s not removed!\n" % f) |
@@ -582,12 +614,17 b' class localrepository:' | |||
|
582 | 614 | elif not os.path.isfile(p): |
|
583 | 615 | self.ui.warn(_("copy failed: %s is not a file\n") % dest) |
|
584 | 616 | else: |
|
617 | wlock = self.wlock() | |
|
585 | 618 | if self.dirstate.state(dest) == '?': |
|
586 | 619 | self.dirstate.update([dest], "a") |
|
587 | 620 | self.dirstate.copy(source, dest) |
|
588 | 621 | |
|
589 | def heads(self): | |
|
590 |
|
|
|
622 | def heads(self, start=None): | |
|
623 | heads = self.changelog.heads(start) | |
|
624 | # sort the output in rev descending order | |
|
625 | heads = [(-self.changelog.rev(h), h) for h in heads] | |
|
626 | heads.sort() | |
|
627 | return [n for (r, n) in heads] | |
|
591 | 628 | |
|
592 | 629 | # branchlookup returns a dict giving a list of branches for |
|
593 | 630 | # each head. A branch is defined as the tag of a node or |
@@ -1372,6 +1409,9 b' class localrepository:' | |||
|
1372 | 1409 | mw[f] = "" |
|
1373 | 1410 | mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False)) |
|
1374 | 1411 | |
|
1412 | if moddirstate: | |
|
1413 | wlock = self.wlock() | |
|
1414 | ||
|
1375 | 1415 | for f in d: |
|
1376 | 1416 | if f in mw: del mw[f] |
|
1377 | 1417 |
@@ -11,11 +11,12 b' import util' | |||
|
11 | 11 | class LockHeld(Exception): |
|
12 | 12 | pass |
|
13 | 13 | |
|
14 | class lock: | |
|
15 | def __init__(self, file, wait=1): | |
|
14 | class lock(object): | |
|
15 | def __init__(self, file, wait=1, releasefn=None): | |
|
16 | 16 | self.f = file |
|
17 | 17 | self.held = 0 |
|
18 | 18 | self.wait = wait |
|
19 | self.releasefn = releasefn | |
|
19 | 20 | self.lock() |
|
20 | 21 | |
|
21 | 22 | def __del__(self): |
@@ -43,6 +44,8 b' class lock:' | |||
|
43 | 44 | def release(self): |
|
44 | 45 | if self.held: |
|
45 | 46 | self.held = 0 |
|
47 | if self.releasefn: | |
|
48 | self.releasefn() | |
|
46 | 49 | try: |
|
47 | 50 | os.unlink(self.f) |
|
48 | 51 | except: pass |
@@ -5,17 +5,16 b'' | |||
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 |
import |
|
|
8 | import struct | |
|
9 | 9 | from revlog import * |
|
10 | 10 | from i18n import gettext as _ |
|
11 | 11 | from demandload import * |
|
12 | demandload(globals(), "bisect") | |
|
12 | demandload(globals(), "bisect array") | |
|
13 | 13 | |
|
14 | 14 | class manifest(revlog): |
|
15 | 15 | def __init__(self, opener): |
|
16 | 16 | self.mapcache = None |
|
17 | 17 | self.listcache = None |
|
18 | self.addlist = None | |
|
19 | 18 | revlog.__init__(self, opener, "00manifest.i", "00manifest.d") |
|
20 | 19 | |
|
21 | 20 | def read(self, node): |
@@ -25,8 +24,9 b' class manifest(revlog):' | |||
|
25 | 24 | text = self.revision(node) |
|
26 | 25 | map = {} |
|
27 | 26 | flag = {} |
|
28 |
self.listcache = ( |
|
|
29 | for l in self.listcache[1]: | |
|
27 | self.listcache = array.array('c', text) | |
|
28 | lines = text.splitlines(1) | |
|
29 | for l in lines: | |
|
30 | 30 | (f, n) = l.split('\0') |
|
31 | 31 | map[f] = bin(n[:40]) |
|
32 | 32 | flag[f] = (n[40:-1] == "x") |
@@ -39,57 +39,67 b' class manifest(revlog):' | |||
|
39 | 39 | self.read(node) |
|
40 | 40 | return self.mapcache[2] |
|
41 | 41 | |
|
42 | def diff(self, a, b): | |
|
43 | return mdiff.textdiff(str(a), str(b)) | |
|
44 | ||
|
42 | 45 | def add(self, map, flags, transaction, link, p1=None, p2=None, |
|
43 | 46 | changed=None): |
|
44 | # directly generate the mdiff delta from the data collected during | |
|
45 | # the bisect loop below | |
|
46 | def gendelta(delta): | |
|
47 | i = 0 | |
|
48 | result = [] | |
|
49 | while i < len(delta): | |
|
50 | start = delta[i][2] | |
|
51 | end = delta[i][3] | |
|
52 | l = delta[i][4] | |
|
53 | if l == None: | |
|
54 | l = "" | |
|
55 | while i < len(delta) - 1 and start <= delta[i+1][2] \ | |
|
56 | and end >= delta[i+1][2]: | |
|
57 | if delta[i+1][3] > end: | |
|
58 | end = delta[i+1][3] | |
|
59 | if delta[i+1][4]: | |
|
60 | l += delta[i+1][4] | |
|
47 | ||
|
48 | # returns a tuple (start, end). If the string is found | |
|
49 | # m[start:end] are the line containing that string. If start == end | |
|
50 | # the string was not found and they indicate the proper sorted | |
|
51 | # insertion point. This was taken from bisect_left, and modified | |
|
52 | # to find line start/end as it goes along. | |
|
53 | # | |
|
54 | # m should be a buffer or a string | |
|
55 | # s is a string | |
|
56 | # | |
|
57 | def manifestsearch(m, s, lo=0, hi=None): | |
|
58 | def advance(i, c): | |
|
59 | while i < lenm and m[i] != c: | |
|
61 | 60 | i += 1 |
|
62 | result.append(struct.pack(">lll", start, end, len(l)) + l) | |
|
63 |
|
|
|
64 | return result | |
|
61 | return i | |
|
62 | lenm = len(m) | |
|
63 | if not hi: | |
|
64 | hi = lenm | |
|
65 | while lo < hi: | |
|
66 | mid = (lo + hi) // 2 | |
|
67 | start = mid | |
|
68 | while start > 0 and m[start-1] != '\n': | |
|
69 | start -= 1 | |
|
70 | end = advance(start, '\0') | |
|
71 | if m[start:end] < s: | |
|
72 | # we know that after the null there are 40 bytes of sha1 | |
|
73 | # this translates to the bisect lo = mid + 1 | |
|
74 | lo = advance(end + 40, '\n') + 1 | |
|
75 | else: | |
|
76 | # this translates to the bisect hi = mid | |
|
77 | hi = start | |
|
78 | end = advance(lo, '\0') | |
|
79 | found = m[lo:end] | |
|
80 | if cmp(s, found) == 0: | |
|
81 | # we know that after the null there are 40 bytes of sha1 | |
|
82 | end = advance(end + 40, '\n') | |
|
83 | return (lo, end+1) | |
|
84 | else: | |
|
85 | return (lo, lo) | |
|
65 | 86 | |
|
66 | 87 | # apply the changes collected during the bisect loop to our addlist |
|
67 | def addlistdelta(addlist, delta): | |
|
68 | # apply the deltas to the addlist. start from the bottom up | |
|
88 | # return a delta suitable for addrevision | |
|
89 | def addlistdelta(addlist, x): | |
|
90 | # start from the bottom up | |
|
69 | 91 | # so changes to the offsets don't mess things up. |
|
70 |
i = len( |
|
|
92 | i = len(x) | |
|
71 | 93 | while i > 0: |
|
72 | 94 | i -= 1 |
|
73 |
start = |
|
|
74 |
end = |
|
|
75 |
if |
|
|
76 |
addlist[start:end] = |
|
|
95 | start = x[i][0] | |
|
96 | end = x[i][1] | |
|
97 | if x[i][2]: | |
|
98 | addlist[start:end] = array.array('c', x[i][2]) | |
|
77 | 99 | else: |
|
78 | 100 | del addlist[start:end] |
|
79 | return addlist | |
|
80 | ||
|
81 | # calculate the byte offset of the start of each line in the | |
|
82 | # manifest | |
|
83 | def calcoffsets(addlist): | |
|
84 | offsets = [0] * (len(addlist) + 1) | |
|
85 | offset = 0 | |
|
86 | i = 0 | |
|
87 | while i < len(addlist): | |
|
88 | offsets[i] = offset | |
|
89 | offset += len(addlist[i]) | |
|
90 | i += 1 | |
|
91 | offsets[i] = offset | |
|
92 | return offsets | |
|
101 | return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \ | |
|
102 | for d in x ]) | |
|
93 | 103 | |
|
94 | 104 | # if we're using the listcache, make sure it is valid and |
|
95 | 105 | # parented by the same node we're diffing against |
@@ -98,15 +108,13 b' class manifest(revlog):' | |||
|
98 | 108 | files = map.keys() |
|
99 | 109 | files.sort() |
|
100 | 110 | |
|
101 |
|
|
|
111 | text = ["%s\000%s%s\n" % | |
|
102 | 112 | (f, hex(map[f]), flags[f] and "x" or '') |
|
103 | 113 | for f in files] |
|
114 | self.listcache = array.array('c', "".join(text)) | |
|
104 | 115 | cachedelta = None |
|
105 | 116 | else: |
|
106 |
addlist = self.listcache |
|
|
107 | ||
|
108 | # find the starting offset for each line in the add list | |
|
109 | offsets = calcoffsets(addlist) | |
|
117 | addlist = self.listcache | |
|
110 | 118 | |
|
111 | 119 | # combine the changed lists into one list for sorting |
|
112 | 120 | work = [[x, 0] for x in changed[0]] |
@@ -114,45 +122,52 b' class manifest(revlog):' | |||
|
114 | 122 | work.sort() |
|
115 | 123 | |
|
116 | 124 | delta = [] |
|
117 |
|
|
|
125 | dstart = None | |
|
126 | dend = None | |
|
127 | dline = [""] | |
|
128 | start = 0 | |
|
129 | # zero copy representation of addlist as a buffer | |
|
130 | addbuf = buffer(addlist) | |
|
118 | 131 | |
|
132 | # start with a readonly loop that finds the offset of | |
|
133 | # each line and creates the deltas | |
|
119 | 134 | for w in work: |
|
120 | 135 | f = w[0] |
|
121 | 136 | # bs will either be the index of the item or the insert point |
|
122 |
|
|
|
123 | if bs < len(addlist): | |
|
124 | fn = addlist[bs][:addlist[bs].index('\0')] | |
|
125 | else: | |
|
126 | fn = None | |
|
137 | start, end = manifestsearch(addbuf, f, start) | |
|
127 | 138 | if w[1] == 0: |
|
128 | 139 | l = "%s\000%s%s\n" % (f, hex(map[f]), |
|
129 | 140 | flags[f] and "x" or '') |
|
130 | 141 | else: |
|
131 |
l = |
|
|
132 |
start = |
|
|
133 | if fn != f: | |
|
134 | # item not found, insert a new one | |
|
135 | end = bs | |
|
136 | if w[1] == 1: | |
|
137 | raise AssertionError( | |
|
142 | l = "" | |
|
143 | if start == end and w[1] == 1: | |
|
144 | # item we want to delete was not found, error out | |
|
145 | raise AssertionError( | |
|
138 | 146 | _("failed to remove %s from manifest\n") % f) |
|
147 | if dstart != None and dstart <= start and dend >= start: | |
|
148 | if dend < end: | |
|
149 | dend = end | |
|
150 | if l: | |
|
151 | dline.append(l) | |
|
139 | 152 | else: |
|
140 | # item is found, replace/delete the existing line | |
|
141 | end = bs + 1 | |
|
142 | delta.append([start, end, offsets[start], offsets[end], l]) | |
|
153 | if dstart != None: | |
|
154 | delta.append([dstart, dend, "".join(dline)]) | |
|
155 | dstart = start | |
|
156 | dend = end | |
|
157 | dline = [l] | |
|
143 | 158 | |
|
144 | self.addlist = addlistdelta(addlist, delta) | |
|
145 | if self.mapcache[0] == self.tip(): | |
|
146 | cachedelta = "".join(gendelta(delta)) | |
|
147 | else: | |
|
148 | cachedelta = None | |
|
159 | if dstart != None: | |
|
160 | delta.append([dstart, dend, "".join(dline)]) | |
|
161 | # apply the delta to the addlist, and get a delta for addrevision | |
|
162 | cachedelta = addlistdelta(addlist, delta) | |
|
149 | 163 | |
|
150 | text = "".join(self.addlist) | |
|
151 | if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text: | |
|
152 | raise AssertionError(_("manifest delta failure\n")) | |
|
153 | n = self.addrevision(text, transaction, link, p1, p2, cachedelta) | |
|
164 | # the delta is only valid if we've been processing the tip revision | |
|
165 | if self.mapcache[0] != self.tip(): | |
|
166 | cachedelta = None | |
|
167 | self.listcache = addlist | |
|
168 | ||
|
169 | n = self.addrevision(buffer(self.listcache), transaction, link, p1, \ | |
|
170 | p2, cachedelta) | |
|
154 | 171 | self.mapcache = (n, map, flags) |
|
155 | self.listcache = (text, self.addlist) | |
|
156 | self.addlist = None | |
|
157 | 172 | |
|
158 | 173 | return n |
@@ -32,8 +32,8 b' def unidiff(a, ad, b, bd, fn, r=None, te' | |||
|
32 | 32 | l = list(difflib.unified_diff(a, b, "a/" + fn, "b/" + fn)) |
|
33 | 33 | if not l: return "" |
|
34 | 34 | # difflib uses a space, rather than a tab |
|
35 |
l[0] = l[0][:-2] |
|
|
36 |
l[1] = l[1][:-2] |
|
|
35 | l[0] = "%s\t%s\n" % (l[0][:-2], ad) | |
|
36 | l[1] = "%s\t%s\n" % (l[1][:-2], bd) | |
|
37 | 37 | |
|
38 | 38 | for ln in xrange(len(l)): |
|
39 | 39 | if l[ln][-1] != '\n': |
@@ -7,7 +7,7 b' This software may be used and distribute' | |||
|
7 | 7 | of the GNU General Public License, incorporated herein by reference. |
|
8 | 8 | """ |
|
9 | 9 | |
|
10 |
import |
|
|
10 | import binascii | |
|
11 | 11 | |
|
12 | 12 | nullid = "\0" * 20 |
|
13 | 13 |
@@ -5,11 +5,11 b'' | |||
|
5 | 5 | # This software may be used and distributed according to the terms |
|
6 | 6 | # of the GNU General Public License, incorporated herein by reference. |
|
7 | 7 | |
|
8 | class remoterepository: | |
|
8 | class remoterepository(object): | |
|
9 | 9 | def local(self): |
|
10 | 10 | return False |
|
11 | 11 | |
|
12 | class remotelock: | |
|
12 | class remotelock(object): | |
|
13 | 13 | def __init__(self, repo): |
|
14 | 14 | self.repo = repo |
|
15 | 15 | def release(self): |
@@ -31,15 +31,15 b' def hash(text, p1, p2):' | |||
|
31 | 31 | |
|
32 | 32 | def compress(text): |
|
33 | 33 | """ generate a possibly-compressed representation of text """ |
|
34 | if not text: return text | |
|
34 | if not text: return ("", text) | |
|
35 | 35 | if len(text) < 44: |
|
36 | if text[0] == '\0': return text | |
|
37 |
return 'u' |
|
|
36 | if text[0] == '\0': return ("", text) | |
|
37 | return ('u', text) | |
|
38 | 38 | bin = zlib.compress(text) |
|
39 | 39 | if len(bin) > len(text): |
|
40 | if text[0] == '\0': return text | |
|
41 |
return 'u' |
|
|
42 | return bin | |
|
40 | if text[0] == '\0': return ("", text) | |
|
41 | return ('u', text) | |
|
42 | return ("", bin) | |
|
43 | 43 | |
|
44 | 44 | def decompress(bin): |
|
45 | 45 | """ decompress the given input """ |
@@ -52,7 +52,7 b' def decompress(bin):' | |||
|
52 | 52 | |
|
53 | 53 | indexformat = ">4l20s20s20s" |
|
54 | 54 | |
|
55 | class lazyparser: | |
|
55 | class lazyparser(object): | |
|
56 | 56 | """ |
|
57 | 57 | this class avoids the need to parse the entirety of large indices |
|
58 | 58 | |
@@ -71,6 +71,9 b' class lazyparser:' | |||
|
71 | 71 | self.all = 0 |
|
72 | 72 | self.revlog = revlog |
|
73 | 73 | |
|
74 | def trunc(self, pos): | |
|
75 | self.l = pos/self.s | |
|
76 | ||
|
74 | 77 | def load(self, pos=None): |
|
75 | 78 | if self.all: return |
|
76 | 79 | if pos is not None: |
@@ -91,7 +94,7 b' class lazyparser:' | |||
|
91 | 94 | self.map[e[6]] = i |
|
92 | 95 | i += 1 |
|
93 | 96 | |
|
94 | class lazyindex: | |
|
97 | class lazyindex(object): | |
|
95 | 98 | """a lazy version of the index array""" |
|
96 | 99 | def __init__(self, parser): |
|
97 | 100 | self.p = parser |
@@ -104,10 +107,14 b' class lazyindex:' | |||
|
104 | 107 | return self.p.index[pos] |
|
105 | 108 | def __getitem__(self, pos): |
|
106 | 109 | return self.p.index[pos] or self.load(pos) |
|
110 | def __delitem__(self, pos): | |
|
111 | del self.p.index[pos] | |
|
107 | 112 | def append(self, e): |
|
108 | 113 | self.p.index.append(e) |
|
114 | def trunc(self, pos): | |
|
115 | self.p.trunc(pos) | |
|
109 | 116 | |
|
110 | class lazymap: | |
|
117 | class lazymap(object): | |
|
111 | 118 | """a lazy version of the node map""" |
|
112 | 119 | def __init__(self, parser): |
|
113 | 120 | self.p = parser |
@@ -140,10 +147,12 b' class lazymap:' | |||
|
140 | 147 | raise KeyError("node " + hex(key)) |
|
141 | 148 | def __setitem__(self, key, val): |
|
142 | 149 | self.p.map[key] = val |
|
150 | def __delitem__(self, key): | |
|
151 | del self.p.map[key] | |
|
143 | 152 | |
|
144 | 153 | class RevlogError(Exception): pass |
|
145 | 154 | |
|
146 | class revlog: | |
|
155 | class revlog(object): | |
|
147 | 156 | """ |
|
148 | 157 | the underlying revision storage object |
|
149 | 158 | |
@@ -400,25 +409,28 b' class revlog:' | |||
|
400 | 409 | assert heads |
|
401 | 410 | return (orderedout, roots, heads) |
|
402 | 411 | |
|
403 |
def heads(self, st |
|
|
404 |
"""return the list of all nodes that have no children |
|
|
405 | p = {} | |
|
406 | h = [] | |
|
407 | stoprev = 0 | |
|
408 | if stop and stop in self.nodemap: | |
|
409 | stoprev = self.rev(stop) | |
|
412 | def heads(self, start=None): | |
|
413 | """return the list of all nodes that have no children | |
|
414 | ||
|
415 | if start is specified, only heads that are descendants of | |
|
416 | start will be returned | |
|
410 | 417 |
|
|
411 | for r in range(self.count() - 1, -1, -1): | |
|
418 | """ | |
|
419 | if start is None: | |
|
420 | start = nullid | |
|
421 | reachable = {start: 1} | |
|
422 | heads = {start: 1} | |
|
423 | startrev = self.rev(start) | |
|
424 | ||
|
425 | for r in xrange(startrev + 1, self.count()): | |
|
412 | 426 | n = self.node(r) |
|
413 | if n not in p: | |
|
414 | h.append(n) | |
|
415 | if n == stop: | |
|
416 | break | |
|
417 | if r < stoprev: | |
|
418 | break | |
|
419 | 427 | for pn in self.parents(n): |
|
420 |
|
|
|
421 | return h | |
|
428 | if pn in reachable: | |
|
429 | reachable[n] = 1 | |
|
430 | heads[n] = 1 | |
|
431 | if pn in heads: | |
|
432 | del heads[pn] | |
|
433 | return heads.keys() | |
|
422 | 434 | |
|
423 | 435 | def children(self, node): |
|
424 | 436 | """find the children of a given node""" |
@@ -543,14 +555,16 b' class revlog:' | |||
|
543 | 555 | end = self.end(t) |
|
544 | 556 | if not d: |
|
545 | 557 | prev = self.revision(self.tip()) |
|
546 | d = self.diff(prev, text) | |
|
558 | d = self.diff(prev, str(text)) | |
|
547 | 559 | data = compress(d) |
|
548 |
|
|
|
560 | l = len(data[1]) + len(data[0]) | |
|
561 | dist = end - start + l | |
|
549 | 562 | |
|
550 | 563 | # full versions are inserted when the needed deltas |
|
551 | 564 | # become comparable to the uncompressed text |
|
552 | 565 | if not n or dist > len(text) * 2: |
|
553 | 566 | data = compress(text) |
|
567 | l = len(data[1]) + len(data[0]) | |
|
554 | 568 | base = n |
|
555 | 569 | else: |
|
556 | 570 | base = self.base(t) |
@@ -559,14 +573,17 b' class revlog:' | |||
|
559 | 573 | if t >= 0: |
|
560 | 574 | offset = self.end(t) |
|
561 | 575 | |
|
562 |
e = (offset, l |
|
|
576 | e = (offset, l, base, link, p1, p2, node) | |
|
563 | 577 | |
|
564 | 578 | self.index.append(e) |
|
565 | 579 | self.nodemap[node] = n |
|
566 | 580 | entry = struct.pack(indexformat, *e) |
|
567 | 581 | |
|
568 | 582 | transaction.add(self.datafile, e[0]) |
|
569 |
self.opener(self.datafile, "a") |
|
|
583 | f = self.opener(self.datafile, "a") | |
|
584 | if data[0]: | |
|
585 | f.write(data[0]) | |
|
586 | f.write(data[1]) | |
|
570 | 587 | transaction.add(self.indexfile, n * len(entry)) |
|
571 | 588 | self.opener(self.indexfile, "a").write(entry) |
|
572 | 589 | |
@@ -784,6 +801,10 b' class revlog:' | |||
|
784 | 801 | continue |
|
785 | 802 | delta = chunk[80:] |
|
786 | 803 | |
|
804 | for p in (p1, p2): | |
|
805 | if not p in self.nodemap: | |
|
806 | raise RevlogError(_("unknown parent %s") % short(p1)) | |
|
807 | ||
|
787 | 808 | if not chain: |
|
788 | 809 | # retrieve the parent revision of the delta chain |
|
789 | 810 | chain = p1 |
@@ -797,7 +818,8 b' class revlog:' | |||
|
797 | 818 | # current size. |
|
798 | 819 | |
|
799 | 820 | if chain == prev: |
|
800 |
|
|
|
821 | tempd = compress(delta) | |
|
822 | cdelta = tempd[0] + tempd[1] | |
|
801 | 823 | |
|
802 | 824 | if chain != prev or (end - start + len(cdelta)) > measure * 2: |
|
803 | 825 | # flush our writes here so we can read it in revision |
@@ -824,6 +846,36 b' class revlog:' | |||
|
824 | 846 | ifh.close() |
|
825 | 847 | return node |
|
826 | 848 | |
|
849 | def strip(self, rev, minlink): | |
|
850 | if self.count() == 0 or rev >= self.count(): | |
|
851 | return | |
|
852 | ||
|
853 | # When stripping away a revision, we need to make sure it | |
|
854 | # does not actually belong to an older changeset. | |
|
855 | # The minlink parameter defines the oldest revision | |
|
856 | # we're allowed to strip away. | |
|
857 | while minlink > self.index[rev][3]: | |
|
858 | rev += 1 | |
|
859 | if rev >= self.count(): | |
|
860 | return | |
|
861 | ||
|
862 | # first truncate the files on disk | |
|
863 | end = self.start(rev) | |
|
864 | self.opener(self.datafile, "a").truncate(end) | |
|
865 | end = rev * struct.calcsize(indexformat) | |
|
866 | self.opener(self.indexfile, "a").truncate(end) | |
|
867 | ||
|
868 | # then reset internal state in memory to forget those revisions | |
|
869 | self.cache = None | |
|
870 | for p in self.index[rev:]: | |
|
871 | del self.nodemap[p[6]] | |
|
872 | del self.index[rev:] | |
|
873 | ||
|
874 | # truncating the lazyindex also truncates the lazymap. | |
|
875 | if isinstance(self.index, lazyindex): | |
|
876 | self.index.trunc(end) | |
|
877 | ||
|
878 | ||
|
827 | 879 | def checksize(self): |
|
828 | 880 | expected = 0 |
|
829 | 881 | if self.count(): |
@@ -12,10 +12,9 b'' | |||
|
12 | 12 | # of the GNU General Public License, incorporated herein by reference. |
|
13 | 13 | |
|
14 | 14 | import os |
|
15 | import util | |
|
16 | 15 | from i18n import gettext as _ |
|
17 | 16 | |
|
18 | class transaction: | |
|
17 | class transaction(object): | |
|
19 | 18 | def __init__(self, report, opener, journal, after=None): |
|
20 | 19 | self.journal = None |
|
21 | 20 |
@@ -10,7 +10,7 b' from i18n import gettext as _' | |||
|
10 | 10 | from demandload import * |
|
11 | 11 | demandload(globals(), "re socket sys util") |
|
12 | 12 | |
|
13 | class ui: | |
|
13 | class ui(object): | |
|
14 | 14 | def __init__(self, verbose=False, debug=False, quiet=False, |
|
15 | 15 | interactive=True): |
|
16 | 16 | self.overlay = {} |
@@ -106,6 +106,13 b' class Abort(Exception):' | |||
|
106 | 106 | def always(fn): return True |
|
107 | 107 | def never(fn): return False |
|
108 | 108 | |
|
109 | def patkind(name, dflt_pat='glob'): | |
|
110 | """Split a string into an optional pattern kind prefix and the | |
|
111 | actual pattern.""" | |
|
112 | for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre': | |
|
113 | if name.startswith(prefix + ':'): return name.split(':', 1) | |
|
114 | return dflt_pat, name | |
|
115 | ||
|
109 | 116 | def globre(pat, head='^', tail='$'): |
|
110 | 117 | "convert a glob pattern into a regexp" |
|
111 | 118 | i, n = 0, len(pat) |
@@ -158,15 +165,20 b' def pathto(n1, n2):' | |||
|
158 | 165 | this returns a path in the form used by the local filesystem, not hg.''' |
|
159 | 166 | if not n1: return localpath(n2) |
|
160 | 167 | a, b = n1.split('/'), n2.split('/') |
|
161 |
a.reverse() |
|
|
168 | a.reverse() | |
|
169 | b.reverse() | |
|
162 | 170 | while a and b and a[-1] == b[-1]: |
|
163 |
a.pop() |
|
|
171 | a.pop() | |
|
172 | b.pop() | |
|
164 | 173 | b.reverse() |
|
165 | 174 | return os.sep.join((['..'] * len(a)) + b) |
|
166 | 175 | |
|
167 | 176 | def canonpath(root, cwd, myname): |
|
168 | 177 | """return the canonical path of myname, given cwd and root""" |
|
169 |
|
|
|
178 | if root == os.sep: | |
|
179 | rootsep = os.sep | |
|
180 | else: | |
|
181 | rootsep = root + os.sep | |
|
170 | 182 | name = myname |
|
171 | 183 | if not name.startswith(os.sep): |
|
172 | 184 | name = os.path.join(root, cwd, name) |
@@ -218,11 +230,6 b' def _matcher(canonroot, cwd, names, inc,' | |||
|
218 | 230 | make head regex a rooted bool |
|
219 | 231 | """ |
|
220 | 232 | |
|
221 | def patkind(name, dflt_pat='glob'): | |
|
222 | for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre': | |
|
223 | if name.startswith(prefix + ':'): return name.split(':', 1) | |
|
224 | return dflt_pat, name | |
|
225 | ||
|
226 | 233 | def contains_glob(name): |
|
227 | 234 | for c in name: |
|
228 | 235 | if c in _globchars: return True |
@@ -253,7 +260,7 b' def _matcher(canonroot, cwd, names, inc,' | |||
|
253 | 260 | try: |
|
254 | 261 | pat = '(?:%s)' % regex(k, p, tail) |
|
255 | 262 | matches.append(re.compile(pat).match) |
|
256 |
except re.error |
|
|
263 | except re.error: | |
|
257 | 264 | raise Abort("invalid pattern: %s:%s" % (k, p)) |
|
258 | 265 | |
|
259 | 266 | def buildfn(text): |
@@ -362,7 +369,36 b' def opener(base):' | |||
|
362 | 369 | remote file access from higher level code. |
|
363 | 370 | """ |
|
364 | 371 | p = base |
|
365 | def o(path, mode="r", text=False): | |
|
372 | ||
|
373 | def mktempcopy(name): | |
|
374 | d, fn = os.path.split(name) | |
|
375 | fd, temp = tempfile.mkstemp(prefix=fn, dir=d) | |
|
376 | fp = os.fdopen(fd, "wb") | |
|
377 | try: | |
|
378 | fp.write(file(name, "rb").read()) | |
|
379 | except: | |
|
380 | try: os.unlink(temp) | |
|
381 | except: pass | |
|
382 | raise | |
|
383 | fp.close() | |
|
384 | st = os.lstat(name) | |
|
385 | os.chmod(temp, st.st_mode) | |
|
386 | return temp | |
|
387 | ||
|
388 | class atomicfile(file): | |
|
389 | """the file will only be copied on close""" | |
|
390 | def __init__(self, name, mode, atomic=False): | |
|
391 | self.__name = name | |
|
392 | self.temp = mktempcopy(name) | |
|
393 | file.__init__(self, self.temp, mode) | |
|
394 | def close(self): | |
|
395 | if not self.closed: | |
|
396 | file.close(self) | |
|
397 | rename(self.temp, self.__name) | |
|
398 | def __del__(self): | |
|
399 | self.close() | |
|
400 | ||
|
401 | def o(path, mode="r", text=False, atomic=False): | |
|
366 | 402 | f = os.path.join(p, path) |
|
367 | 403 | |
|
368 | 404 | if not text: |
@@ -376,19 +412,10 b' def opener(base):' | |||
|
376 | 412 | if not os.path.isdir(d): |
|
377 | 413 | os.makedirs(d) |
|
378 | 414 | else: |
|
415 | if atomic: | |
|
416 | return atomicfile(f, mode) | |
|
379 | 417 | if nlink > 1: |
|
380 | d, fn = os.path.split(f) | |
|
381 | fd, temp = tempfile.mkstemp(prefix=fn, dir=d) | |
|
382 | fp = os.fdopen(fd, "wb") | |
|
383 | try: | |
|
384 | fp.write(file(f, "rb").read()) | |
|
385 | except: | |
|
386 | try: os.unlink(temp) | |
|
387 | except: pass | |
|
388 | raise | |
|
389 | fp.close() | |
|
390 | rename(temp, f) | |
|
391 | ||
|
418 | rename(mktempcopy(f), f) | |
|
392 | 419 | return file(f, mode) |
|
393 | 420 | |
|
394 | 421 | return o |
@@ -484,6 +511,7 b' else:' | |||
|
484 | 511 | nulldev = '/dev/null' |
|
485 | 512 | |
|
486 | 513 | def rcfiles(path): |
|
514 | print 'checking', path | |
|
487 | 515 | rcs = [os.path.join(path, 'hgrc')] |
|
488 | 516 | rcdir = os.path.join(path, 'hgrc.d') |
|
489 | 517 | try: |
@@ -72,8 +72,10 b' class install_package_data(install_data)' | |||
|
72 | 72 | try: |
|
73 | 73 | mercurial.version.remember_version(version) |
|
74 | 74 | cmdclass = {'install_data': install_package_data} |
|
75 | py2exe_opts = {} | |
|
75 | 76 | if py2exe_for_demandload is not None: |
|
76 | 77 | cmdclass['py2exe'] = py2exe_for_demandload |
|
78 | py2exe_opts['console'] = ['hg'] | |
|
77 | 79 | setup(name='mercurial', |
|
78 | 80 | version=mercurial.version.get_version(), |
|
79 | 81 | author='Matt Mackall', |
@@ -90,6 +92,6 b' try:' | |||
|
90 | 92 | glob.glob('templates/*.tmpl'))], |
|
91 | 93 | cmdclass=cmdclass, |
|
92 | 94 | scripts=['hg', 'hgmerge'], |
|
93 | console = ['hg']) | |
|
95 | **py2exe_opts) | |
|
94 | 96 | finally: |
|
95 | 97 | mercurial.version.forget_version() |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | <item> |
|
2 |
<title>#desc|strip|firstline| |
|
|
2 | <title>#desc|strip|firstline|strip|escape#</title> | |
|
3 | 3 | <link>#url#?cs=#node|short#</link> |
|
4 | 4 | <description><![CDATA[#desc|strip|escape|addbreaks#]]></description> |
|
5 | 5 | <author>#author|obfuscate#</author> |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | <item> |
|
2 |
<title>#desc|strip|firstline| |
|
|
2 | <title>#desc|strip|firstline|strip|escape#</title> | |
|
3 | 3 | <link>#url#?f=#filenode|short#;file=#file#</link> |
|
4 | 4 | <description><![CDATA[#desc|strip|escape|addbreaks#]]></description> |
|
5 | 5 | <author>#author|obfuscate#</author> |
@@ -39,3 +39,4 b' indexentry = "<tr class="parity#parity#"' | |||
|
39 | 39 | index = index.tmpl |
|
40 | 40 | archiveentry = "<a href="?ca=#node|short#;type=#type#">#type#</a> " |
|
41 | 41 | notfound = notfound.tmpl |
|
42 | error = error.tmpl |
@@ -5,7 +5,7 b'' | |||
|
5 | 5 | |
|
6 | 6 | <h2>Mercurial Repositories</h2> |
|
7 | 7 | |
|
8 | The specified repository "#repo#" is unknown, sorry. | |
|
8 | The specified repository "#repo|escape#" is unknown, sorry. | |
|
9 | 9 | |
|
10 | 10 | Please go back to the main repository list page. |
|
11 | 11 |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | #header# |
|
2 | <title>#repo#: tags</title> | |
|
2 | <title>#repo|escape#: tags</title> | |
|
3 | 3 | </head> |
|
4 | 4 | <body> |
|
5 | 5 |
@@ -40,16 +40,11 b' HGTMP="${TMPDIR-/tmp}/hgtests.$RANDOM.$R' | |||
|
40 | 40 | } |
|
41 | 41 | |
|
42 | 42 | TESTDIR="$PWD" |
|
43 | ||
|
44 | if [ -d /usr/lib64 ]; then | |
|
45 | lib=lib64 | |
|
46 | else | |
|
47 | lib=lib | |
|
48 | fi | |
|
49 | ||
|
50 | 43 | INST="$HGTMP/install" |
|
44 | PYTHONDIR="$INST/lib/python" | |
|
51 | 45 | cd .. |
|
52 |
if ${PYTHON-python} setup.py install --home="$INST" |
|
|
46 | if ${PYTHON-python} setup.py install --home="$INST" \ | |
|
47 | --install-lib="$PYTHONDIR" > tests/install.err 2>&1 | |
|
53 | 48 | then |
|
54 | 49 | rm tests/install.err |
|
55 | 50 | else |
@@ -59,8 +54,7 b' fi' | |||
|
59 | 54 | cd "$TESTDIR" |
|
60 | 55 | |
|
61 | 56 | PATH="$INST/bin:$PATH"; export PATH |
|
62 |
PYTHONPATH="$ |
|
|
63 | ||
|
57 | PYTHONPATH="$PYTHONDIR"; export PYTHONPATH | |
|
64 | 58 | |
|
65 | 59 | run_one() { |
|
66 | 60 | rm -f "$1.err" |
@@ -14,7 +14,7 b" echo 'import/export' >> port" | |||
|
14 | 14 | hg commit -m 2 -u spam -d '2 0' |
|
15 | 15 | echo 'import/export' >> port |
|
16 | 16 | hg commit -m 3 -u eggs -d '3 0' |
|
17 | head -3 port > port1 | |
|
17 | head -n 3 port > port1 | |
|
18 | 18 | mv port1 port |
|
19 | 19 | hg commit -m 4 -u spam -d '4 0' |
|
20 | 20 | hg grep port port |
@@ -6,7 +6,7 b' basic commands (use "hg help" for the fu' | |||
|
6 | 6 | annotate show changeset information per file line |
|
7 | 7 | clone make a copy of an existing repository |
|
8 | 8 | commit commit the specified files or all outstanding changes |
|
9 |
diff diff |
|
|
9 | diff diff repository (or selected files) | |
|
10 | 10 | export dump the header and diffs for one or more changesets |
|
11 | 11 | init create a new repository in the given directory |
|
12 | 12 | log show revision history of entire repository or files |
@@ -22,7 +22,7 b' basic commands (use "hg help" for the fu' | |||
|
22 | 22 | annotate show changeset information per file line |
|
23 | 23 | clone make a copy of an existing repository |
|
24 | 24 | commit commit the specified files or all outstanding changes |
|
25 |
diff diff |
|
|
25 | diff diff repository (or selected files) | |
|
26 | 26 | export dump the header and diffs for one or more changesets |
|
27 | 27 | init create a new repository in the given directory |
|
28 | 28 | log show revision history of entire repository or files |
@@ -46,7 +46,7 b' list of commands (use "hg help -v" to sh' | |||
|
46 | 46 | clone make a copy of an existing repository |
|
47 | 47 | commit commit the specified files or all outstanding changes |
|
48 | 48 | copy mark files as copied for the next commit |
|
49 |
diff diff |
|
|
49 | diff diff repository (or selected files) | |
|
50 | 50 | export dump the header and diffs for one or more changesets |
|
51 | 51 | forget don't add the specified files on the next commit |
|
52 | 52 | grep search for a pattern in specified files and revisions |
@@ -88,7 +88,7 b' list of commands (use "hg help -v" to sh' | |||
|
88 | 88 | clone make a copy of an existing repository |
|
89 | 89 | commit commit the specified files or all outstanding changes |
|
90 | 90 | copy mark files as copied for the next commit |
|
91 |
diff diff |
|
|
91 | diff diff repository (or selected files) | |
|
92 | 92 | export dump the header and diffs for one or more changesets |
|
93 | 93 | forget don't add the specified files on the next commit |
|
94 | 94 | grep search for a pattern in specified files and revisions |
@@ -130,8 +130,7 b' add the specified files on the next comm' | |||
|
130 | 130 | |
|
131 | 131 | The files will be added to the repository at the next commit. |
|
132 | 132 | |
|
133 |
If no names are given, add all files in the |
|
|
134 | its subdirectories. | |
|
133 | If no names are given, add all files in the repository. | |
|
135 | 134 | |
|
136 | 135 | options: |
|
137 | 136 | |
@@ -146,8 +145,7 b' add the specified files on the next comm' | |||
|
146 | 145 | |
|
147 | 146 | The files will be added to the repository at the next commit. |
|
148 | 147 | |
|
149 |
If no names are given, add all files in the |
|
|
150 | its subdirectories. | |
|
148 | If no names are given, add all files in the repository. | |
|
151 | 149 | |
|
152 | 150 | options: |
|
153 | 151 | |
@@ -155,7 +153,7 b' options:' | |||
|
155 | 153 | -X --exclude exclude names matching the given patterns |
|
156 | 154 | hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]... |
|
157 | 155 | |
|
158 |
diff |
|
|
156 | diff repository (or selected files) | |
|
159 | 157 | |
|
160 | 158 | Show differences between revisions for the specified files. |
|
161 | 159 | |
@@ -181,9 +179,8 b' hg status [OPTION]... [FILE]...' | |||
|
181 | 179 | |
|
182 | 180 | show changed files in the working directory |
|
183 | 181 | |
|
184 |
Show changed files in the |
|
|
185 | given, all files are shown. Otherwise, only files matching the | |
|
186 | given names are shown. | |
|
182 | Show changed files in the repository. If names are | |
|
183 | given, only files that match are shown. | |
|
187 | 184 | |
|
188 | 185 | The codes used to show the status of files are: |
|
189 | 186 | M = modified |
@@ -191,6 +188,8 b' show changed files in the working direct' | |||
|
191 | 188 | R = removed |
|
192 | 189 | ? = not tracked |
|
193 | 190 | |
|
191 | aliases: st | |
|
192 | ||
|
194 | 193 | options: |
|
195 | 194 | |
|
196 | 195 | -m --modified show only modified files |
@@ -213,7 +212,7 b' basic commands (use "hg help" for the fu' | |||
|
213 | 212 | annotate show changeset information per file line |
|
214 | 213 | clone make a copy of an existing repository |
|
215 | 214 | commit commit the specified files or all outstanding changes |
|
216 |
diff diff |
|
|
215 | diff diff repository (or selected files) | |
|
217 | 216 | export dump the header and diffs for one or more changesets |
|
218 | 217 | init create a new repository in the given directory |
|
219 | 218 | log show revision history of entire repository or files |
@@ -234,7 +233,7 b' basic commands (use "hg help" for the fu' | |||
|
234 | 233 | annotate show changeset information per file line |
|
235 | 234 | clone make a copy of an existing repository |
|
236 | 235 | commit commit the specified files or all outstanding changes |
|
237 |
diff diff |
|
|
236 | diff diff repository (or selected files) | |
|
238 | 237 | export dump the header and diffs for one or more changesets |
|
239 | 238 | init create a new repository in the given directory |
|
240 | 239 | log show revision history of entire repository or files |
@@ -42,4 +42,4 b' echo "relglob:*" > .hgignore' | |||
|
42 | 42 | echo "--" ; hg status |
|
43 | 43 | |
|
44 | 44 | cd dir |
|
45 | echo "--" ; hg status | |
|
45 | echo "--" ; hg status . |
@@ -39,3 +39,4 b' ln -sf nonexist dir/b.o' | |||
|
39 | 39 | mkfifo a.c |
|
40 | 40 | # it should show a.c, dir/a.o and dir/b.o removed |
|
41 | 41 | hg status |
|
42 | hg status a.c |
@@ -1,15 +1,11 b'' | |||
|
1 | bar: unsupported file type (type is symbolic link) | |
|
2 | 1 | adding foo |
|
3 | bar: unsupported file type (type is symbolic link) | |
|
4 | bar: unsupported file type (type is symbolic link) | |
|
5 | 2 | adding bomb |
|
6 | bar: unsupported file type (type is symbolic link) | |
|
7 | 3 | adding a.c |
|
8 | 4 | adding dir/a.o |
|
9 | 5 | adding dir/b.o |
|
10 | a.c: unsupported file type (type is fifo) | |
|
11 | dir/b.o: unsupported file type (type is symbolic link) | |
|
12 | 6 | R a.c |
|
13 | 7 | R dir/a.o |
|
14 | 8 | R dir/b.o |
|
15 | 9 | ? .hgignore |
|
10 | a.c: unsupported file type (type is fifo) | |
|
11 | R a.c |
@@ -11,3 +11,7 b' hg history' | |||
|
11 | 11 | echo foo >> .hgtags |
|
12 | 12 | hg tag -d "0 0" "bleah2" || echo "failed" |
|
13 | 13 | |
|
14 | hg tag -l 'xx | |
|
15 | newline' | |
|
16 | hg tag -l 'xx:xx' | |
|
17 | true |
@@ -18,3 +18,5 b' summary: test' | |||
|
18 | 18 | |
|
19 | 19 | abort: working copy of .hgtags is changed (please commit .hgtags manually) |
|
20 | 20 | failed |
|
21 | abort: '\n' cannot be used in a tag name | |
|
22 | abort: ':' cannot be used in a tag name |
@@ -20,14 +20,14 b' hg addremove' | |||
|
20 | 20 | hg commit -m "commit #0" -d "0 0" |
|
21 | 21 | hg debugwalk |
|
22 | 22 | cd mammals |
|
23 | hg debugwalk | |
|
23 | hg debugwalk . | |
|
24 | 24 | hg debugwalk Procyonidae |
|
25 | 25 | cd Procyonidae |
|
26 | hg debugwalk | |
|
26 | hg debugwalk . | |
|
27 | 27 | hg debugwalk .. |
|
28 | 28 | cd .. |
|
29 | 29 | hg debugwalk ../beans |
|
30 | hg debugwalk | |
|
30 | hg debugwalk . | |
|
31 | 31 | cd .. |
|
32 | 32 | hg debugwalk -Ibeans |
|
33 | 33 | hg debugwalk 'glob:mammals/../beans/b*' |
General Comments 0
You need to be logged in to leave comments.
Login now