##// END OF EJS Templates
graphlog, win32mbcs: capitalize ASCII
Martin Geisler -
r8667:59450775 default
parent child Browse files
Show More
@@ -1,416 +1,416 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''show revision graphs in terminal windows
8 '''show revision graphs in terminal windows
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ascii representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 import os
15 import os
16 from mercurial.cmdutil import revrange, show_changeset
16 from mercurial.cmdutil import revrange, show_changeset
17 from mercurial.commands import templateopts
17 from mercurial.commands import templateopts
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.node import nullrev
19 from mercurial.node import nullrev
20 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
20 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
21 from mercurial import hg, url, util
21 from mercurial import hg, url, util
22
22
23 def revisions(repo, start, stop):
23 def revisions(repo, start, stop):
24 """cset DAG generator yielding (rev, node, [parents]) tuples
24 """cset DAG generator yielding (rev, node, [parents]) tuples
25
25
26 This generator function walks through the revision history from revision
26 This generator function walks through the revision history from revision
27 start to revision stop (which must be less than or equal to start).
27 start to revision stop (which must be less than or equal to start).
28 """
28 """
29 assert start >= stop
29 assert start >= stop
30 cur = start
30 cur = start
31 while cur >= stop:
31 while cur >= stop:
32 ctx = repo[cur]
32 ctx = repo[cur]
33 parents = [p.rev() for p in ctx.parents() if p.rev() != nullrev]
33 parents = [p.rev() for p in ctx.parents() if p.rev() != nullrev]
34 parents.sort()
34 parents.sort()
35 yield (ctx, parents)
35 yield (ctx, parents)
36 cur -= 1
36 cur -= 1
37
37
38 def filerevs(repo, path, start, stop):
38 def filerevs(repo, path, start, stop):
39 """file cset DAG generator yielding (rev, node, [parents]) tuples
39 """file cset DAG generator yielding (rev, node, [parents]) tuples
40
40
41 This generator function walks through the revision history of a single
41 This generator function walks through the revision history of a single
42 file from revision start to revision stop (which must be less than or
42 file from revision start to revision stop (which must be less than or
43 equal to start).
43 equal to start).
44 """
44 """
45 assert start >= stop
45 assert start >= stop
46 filerev = len(repo.file(path)) - 1
46 filerev = len(repo.file(path)) - 1
47 while filerev >= 0:
47 while filerev >= 0:
48 fctx = repo.filectx(path, fileid=filerev)
48 fctx = repo.filectx(path, fileid=filerev)
49 parents = [f.linkrev() for f in fctx.parents() if f.path() == path]
49 parents = [f.linkrev() for f in fctx.parents() if f.path() == path]
50 parents.sort()
50 parents.sort()
51 if fctx.rev() <= start:
51 if fctx.rev() <= start:
52 yield (fctx, parents)
52 yield (fctx, parents)
53 if fctx.rev() <= stop:
53 if fctx.rev() <= stop:
54 break
54 break
55 filerev -= 1
55 filerev -= 1
56
56
57 def grapher(nodes):
57 def grapher(nodes):
58 """grapher for asciigraph on a list of nodes and their parents
58 """grapher for asciigraph on a list of nodes and their parents
59
59
60 nodes must generate tuples (node, parents, char, lines) where
60 nodes must generate tuples (node, parents, char, lines) where
61 - parents must generate the parents of node, in sorted order,
61 - parents must generate the parents of node, in sorted order,
62 and max length 2,
62 and max length 2,
63 - char is the char to print as the node symbol, and
63 - char is the char to print as the node symbol, and
64 - lines are the lines to display next to the node.
64 - lines are the lines to display next to the node.
65 """
65 """
66 seen = []
66 seen = []
67 for node, parents, char, lines in nodes:
67 for node, parents, char, lines in nodes:
68 if node not in seen:
68 if node not in seen:
69 seen.append(node)
69 seen.append(node)
70 nodeidx = seen.index(node)
70 nodeidx = seen.index(node)
71
71
72 knownparents = []
72 knownparents = []
73 newparents = []
73 newparents = []
74 for parent in parents:
74 for parent in parents:
75 if parent in seen:
75 if parent in seen:
76 knownparents.append(parent)
76 knownparents.append(parent)
77 else:
77 else:
78 newparents.append(parent)
78 newparents.append(parent)
79
79
80 ncols = len(seen)
80 ncols = len(seen)
81 nextseen = seen[:]
81 nextseen = seen[:]
82 nextseen[nodeidx:nodeidx + 1] = newparents
82 nextseen[nodeidx:nodeidx + 1] = newparents
83 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
83 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
84
84
85 if len(newparents) > 0:
85 if len(newparents) > 0:
86 edges.append((nodeidx, nodeidx))
86 edges.append((nodeidx, nodeidx))
87 if len(newparents) > 1:
87 if len(newparents) > 1:
88 edges.append((nodeidx, nodeidx + 1))
88 edges.append((nodeidx, nodeidx + 1))
89 nmorecols = len(nextseen) - ncols
89 nmorecols = len(nextseen) - ncols
90 seen = nextseen
90 seen = nextseen
91 yield (char, lines, nodeidx, edges, ncols, nmorecols)
91 yield (char, lines, nodeidx, edges, ncols, nmorecols)
92
92
93 def fix_long_right_edges(edges):
93 def fix_long_right_edges(edges):
94 for (i, (start, end)) in enumerate(edges):
94 for (i, (start, end)) in enumerate(edges):
95 if end > start:
95 if end > start:
96 edges[i] = (start, end + 1)
96 edges[i] = (start, end + 1)
97
97
98 def get_nodeline_edges_tail(
98 def get_nodeline_edges_tail(
99 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
99 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
100 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
100 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
101 # Still going in the same non-vertical direction.
101 # Still going in the same non-vertical direction.
102 if n_columns_diff == -1:
102 if n_columns_diff == -1:
103 start = max(node_index + 1, p_node_index)
103 start = max(node_index + 1, p_node_index)
104 tail = ["|", " "] * (start - node_index - 1)
104 tail = ["|", " "] * (start - node_index - 1)
105 tail.extend(["/", " "] * (n_columns - start))
105 tail.extend(["/", " "] * (n_columns - start))
106 return tail
106 return tail
107 else:
107 else:
108 return ["\\", " "] * (n_columns - node_index - 1)
108 return ["\\", " "] * (n_columns - node_index - 1)
109 else:
109 else:
110 return ["|", " "] * (n_columns - node_index - 1)
110 return ["|", " "] * (n_columns - node_index - 1)
111
111
112 def draw_edges(edges, nodeline, interline):
112 def draw_edges(edges, nodeline, interline):
113 for (start, end) in edges:
113 for (start, end) in edges:
114 if start == end + 1:
114 if start == end + 1:
115 interline[2 * end + 1] = "/"
115 interline[2 * end + 1] = "/"
116 elif start == end - 1:
116 elif start == end - 1:
117 interline[2 * start + 1] = "\\"
117 interline[2 * start + 1] = "\\"
118 elif start == end:
118 elif start == end:
119 interline[2 * start] = "|"
119 interline[2 * start] = "|"
120 else:
120 else:
121 nodeline[2 * end] = "+"
121 nodeline[2 * end] = "+"
122 if start > end:
122 if start > end:
123 (start, end) = (end,start)
123 (start, end) = (end,start)
124 for i in range(2 * start + 1, 2 * end):
124 for i in range(2 * start + 1, 2 * end):
125 if nodeline[i] != "+":
125 if nodeline[i] != "+":
126 nodeline[i] = "-"
126 nodeline[i] = "-"
127
127
128 def get_padding_line(ni, n_columns, edges):
128 def get_padding_line(ni, n_columns, edges):
129 line = []
129 line = []
130 line.extend(["|", " "] * ni)
130 line.extend(["|", " "] * ni)
131 if (ni, ni - 1) in edges or (ni, ni) in edges:
131 if (ni, ni - 1) in edges or (ni, ni) in edges:
132 # (ni, ni - 1) (ni, ni)
132 # (ni, ni - 1) (ni, ni)
133 # | | | | | | | |
133 # | | | | | | | |
134 # +---o | | o---+
134 # +---o | | o---+
135 # | | c | | c | |
135 # | | c | | c | |
136 # | |/ / | |/ /
136 # | |/ / | |/ /
137 # | | | | | |
137 # | | | | | |
138 c = "|"
138 c = "|"
139 else:
139 else:
140 c = " "
140 c = " "
141 line.extend([c, " "])
141 line.extend([c, " "])
142 line.extend(["|", " "] * (n_columns - ni - 1))
142 line.extend(["|", " "] * (n_columns - ni - 1))
143 return line
143 return line
144
144
145 def ascii(ui, grapher):
145 def ascii(ui, grapher):
146 """prints an ASCII graph of the DAG returned by the grapher
146 """prints an ASCII graph of the DAG returned by the grapher
147
147
148 grapher is a generator that emits tuples with the following elements:
148 grapher is a generator that emits tuples with the following elements:
149
149
150 - Character to use as node's symbol.
150 - Character to use as node's symbol.
151 - List of lines to display as the node's text.
151 - List of lines to display as the node's text.
152 - Column of the current node in the set of ongoing edges.
152 - Column of the current node in the set of ongoing edges.
153 - Edges; a list of (col, next_col) indicating the edges between
153 - Edges; a list of (col, next_col) indicating the edges between
154 the current node and its parents.
154 the current node and its parents.
155 - Number of columns (ongoing edges) in the current revision.
155 - Number of columns (ongoing edges) in the current revision.
156 - The difference between the number of columns (ongoing edges)
156 - The difference between the number of columns (ongoing edges)
157 in the next revision and the number of columns (ongoing edges)
157 in the next revision and the number of columns (ongoing edges)
158 in the current revision. That is: -1 means one column removed;
158 in the current revision. That is: -1 means one column removed;
159 0 means no columns added or removed; 1 means one column added.
159 0 means no columns added or removed; 1 means one column added.
160 """
160 """
161 prev_n_columns_diff = 0
161 prev_n_columns_diff = 0
162 prev_node_index = 0
162 prev_node_index = 0
163 for (node_ch, node_lines, node_index, edges, n_columns, n_columns_diff) in grapher:
163 for (node_ch, node_lines, node_index, edges, n_columns, n_columns_diff) in grapher:
164
164
165 assert -2 < n_columns_diff < 2
165 assert -2 < n_columns_diff < 2
166 if n_columns_diff == -1:
166 if n_columns_diff == -1:
167 # Transform
167 # Transform
168 #
168 #
169 # | | | | | |
169 # | | | | | |
170 # o | | into o---+
170 # o | | into o---+
171 # |X / |/ /
171 # |X / |/ /
172 # | | | |
172 # | | | |
173 fix_long_right_edges(edges)
173 fix_long_right_edges(edges)
174
174
175 # add_padding_line says whether to rewrite
175 # add_padding_line says whether to rewrite
176 #
176 #
177 # | | | | | | | |
177 # | | | | | | | |
178 # | o---+ into | o---+
178 # | o---+ into | o---+
179 # | / / | | | # <--- padding line
179 # | / / | | | # <--- padding line
180 # o | | | / /
180 # o | | | / /
181 # o | |
181 # o | |
182 add_padding_line = (len(node_lines) > 2 and
182 add_padding_line = (len(node_lines) > 2 and
183 n_columns_diff == -1 and
183 n_columns_diff == -1 and
184 [x for (x, y) in edges if x + 1 < y])
184 [x for (x, y) in edges if x + 1 < y])
185
185
186 # fix_nodeline_tail says whether to rewrite
186 # fix_nodeline_tail says whether to rewrite
187 #
187 #
188 # | | o | | | | o | |
188 # | | o | | | | o | |
189 # | | |/ / | | |/ /
189 # | | |/ / | | |/ /
190 # | o | | into | o / / # <--- fixed nodeline tail
190 # | o | | into | o / / # <--- fixed nodeline tail
191 # | |/ / | |/ /
191 # | |/ / | |/ /
192 # o | | o | |
192 # o | | o | |
193 fix_nodeline_tail = len(node_lines) <= 2 and not add_padding_line
193 fix_nodeline_tail = len(node_lines) <= 2 and not add_padding_line
194
194
195 # nodeline is the line containing the node character (typically o)
195 # nodeline is the line containing the node character (typically o)
196 nodeline = ["|", " "] * node_index
196 nodeline = ["|", " "] * node_index
197 nodeline.extend([node_ch, " "])
197 nodeline.extend([node_ch, " "])
198
198
199 nodeline.extend(
199 nodeline.extend(
200 get_nodeline_edges_tail(
200 get_nodeline_edges_tail(
201 node_index, prev_node_index, n_columns, n_columns_diff,
201 node_index, prev_node_index, n_columns, n_columns_diff,
202 prev_n_columns_diff, fix_nodeline_tail))
202 prev_n_columns_diff, fix_nodeline_tail))
203
203
204 # shift_interline is the line containing the non-vertical
204 # shift_interline is the line containing the non-vertical
205 # edges between this entry and the next
205 # edges between this entry and the next
206 shift_interline = ["|", " "] * node_index
206 shift_interline = ["|", " "] * node_index
207 if n_columns_diff == -1:
207 if n_columns_diff == -1:
208 n_spaces = 1
208 n_spaces = 1
209 edge_ch = "/"
209 edge_ch = "/"
210 elif n_columns_diff == 0:
210 elif n_columns_diff == 0:
211 n_spaces = 2
211 n_spaces = 2
212 edge_ch = "|"
212 edge_ch = "|"
213 else:
213 else:
214 n_spaces = 3
214 n_spaces = 3
215 edge_ch = "\\"
215 edge_ch = "\\"
216 shift_interline.extend(n_spaces * [" "])
216 shift_interline.extend(n_spaces * [" "])
217 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
217 shift_interline.extend([edge_ch, " "] * (n_columns - node_index - 1))
218
218
219 # draw edges from the current node to its parents
219 # draw edges from the current node to its parents
220 draw_edges(edges, nodeline, shift_interline)
220 draw_edges(edges, nodeline, shift_interline)
221
221
222 # lines is the list of all graph lines to print
222 # lines is the list of all graph lines to print
223 lines = [nodeline]
223 lines = [nodeline]
224 if add_padding_line:
224 if add_padding_line:
225 lines.append(get_padding_line(node_index, n_columns, edges))
225 lines.append(get_padding_line(node_index, n_columns, edges))
226 lines.append(shift_interline)
226 lines.append(shift_interline)
227
227
228 # make sure that there are as many graph lines as there are
228 # make sure that there are as many graph lines as there are
229 # log strings
229 # log strings
230 while len(node_lines) < len(lines):
230 while len(node_lines) < len(lines):
231 node_lines.append("")
231 node_lines.append("")
232 if len(lines) < len(node_lines):
232 if len(lines) < len(node_lines):
233 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
233 extra_interline = ["|", " "] * (n_columns + n_columns_diff)
234 while len(lines) < len(node_lines):
234 while len(lines) < len(node_lines):
235 lines.append(extra_interline)
235 lines.append(extra_interline)
236
236
237 # print lines
237 # print lines
238 indentation_level = max(n_columns, n_columns + n_columns_diff)
238 indentation_level = max(n_columns, n_columns + n_columns_diff)
239 for (line, logstr) in zip(lines, node_lines):
239 for (line, logstr) in zip(lines, node_lines):
240 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
240 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
241 ui.write(ln.rstrip() + '\n')
241 ui.write(ln.rstrip() + '\n')
242
242
243 # ... and start over
243 # ... and start over
244 prev_node_index = node_index
244 prev_node_index = node_index
245 prev_n_columns_diff = n_columns_diff
245 prev_n_columns_diff = n_columns_diff
246
246
247 def get_revs(repo, rev_opt):
247 def get_revs(repo, rev_opt):
248 if rev_opt:
248 if rev_opt:
249 revs = revrange(repo, rev_opt)
249 revs = revrange(repo, rev_opt)
250 return (max(revs), min(revs))
250 return (max(revs), min(revs))
251 else:
251 else:
252 return (len(repo) - 1, 0)
252 return (len(repo) - 1, 0)
253
253
254 def check_unsupported_flags(opts):
254 def check_unsupported_flags(opts):
255 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
255 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
256 "only_merges", "user", "only_branch", "prune", "newest_first",
256 "only_merges", "user", "only_branch", "prune", "newest_first",
257 "no_merges", "include", "exclude"]:
257 "no_merges", "include", "exclude"]:
258 if op in opts and opts[op]:
258 if op in opts and opts[op]:
259 raise util.Abort(_("--graph option is incompatible with --%s") % op)
259 raise util.Abort(_("--graph option is incompatible with --%s") % op)
260
260
261 def graphlog(ui, repo, path=None, **opts):
261 def graphlog(ui, repo, path=None, **opts):
262 """show revision history alongside an ASCII revision graph
262 """show revision history alongside an ASCII revision graph
263
263
264 Print a revision history alongside a revision graph drawn with
264 Print a revision history alongside a revision graph drawn with
265 ASCII characters.
265 ASCII characters.
266
266
267 Nodes printed as an @ character are parents of the working
267 Nodes printed as an @ character are parents of the working
268 directory.
268 directory.
269 """
269 """
270
270
271 check_unsupported_flags(opts)
271 check_unsupported_flags(opts)
272 limit = cmdutil.loglimit(opts)
272 limit = cmdutil.loglimit(opts)
273 start, stop = get_revs(repo, opts["rev"])
273 start, stop = get_revs(repo, opts["rev"])
274 stop = max(stop, start - limit + 1)
274 stop = max(stop, start - limit + 1)
275 if start == nullrev:
275 if start == nullrev:
276 return
276 return
277
277
278 if path:
278 if path:
279 path = util.canonpath(repo.root, os.getcwd(), path)
279 path = util.canonpath(repo.root, os.getcwd(), path)
280 if path: # could be reset in canonpath
280 if path: # could be reset in canonpath
281 revdag = filerevs(repo, path, start, stop)
281 revdag = filerevs(repo, path, start, stop)
282 else:
282 else:
283 revdag = revisions(repo, start, stop)
283 revdag = revisions(repo, start, stop)
284
284
285 graphdag = graphabledag(ui, repo, revdag, opts)
285 graphdag = graphabledag(ui, repo, revdag, opts)
286 ascii(ui, grapher(graphdag))
286 ascii(ui, grapher(graphdag))
287
287
288 def graphrevs(repo, nodes, opts):
288 def graphrevs(repo, nodes, opts):
289 include = set(nodes)
289 include = set(nodes)
290 limit = cmdutil.loglimit(opts)
290 limit = cmdutil.loglimit(opts)
291 count = 0
291 count = 0
292 for node in reversed(nodes):
292 for node in reversed(nodes):
293 if count >= limit:
293 if count >= limit:
294 break
294 break
295 ctx = repo[node]
295 ctx = repo[node]
296 parents = [p.rev() for p in ctx.parents() if p.node() in include]
296 parents = [p.rev() for p in ctx.parents() if p.node() in include]
297 parents.sort()
297 parents.sort()
298 yield (ctx, parents)
298 yield (ctx, parents)
299 count += 1
299 count += 1
300
300
301 def graphabledag(ui, repo, revdag, opts):
301 def graphabledag(ui, repo, revdag, opts):
302 showparents = [ctx.node() for ctx in repo[None].parents()]
302 showparents = [ctx.node() for ctx in repo[None].parents()]
303 displayer = show_changeset(ui, repo, opts, buffered=True)
303 displayer = show_changeset(ui, repo, opts, buffered=True)
304 for (ctx, parents) in revdag:
304 for (ctx, parents) in revdag:
305 displayer.show(ctx)
305 displayer.show(ctx)
306 lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1]
306 lines = displayer.hunk.pop(ctx.rev()).split('\n')[:-1]
307 char = ctx.node() in showparents and '@' or 'o'
307 char = ctx.node() in showparents and '@' or 'o'
308 yield (ctx.rev(), parents, char, lines)
308 yield (ctx.rev(), parents, char, lines)
309
309
310 def goutgoing(ui, repo, dest=None, **opts):
310 def goutgoing(ui, repo, dest=None, **opts):
311 """show the outgoing changesets alongside an ASCII revision graph
311 """show the outgoing changesets alongside an ASCII revision graph
312
312
313 Print the outgoing changesets alongside a revision graph drawn with
313 Print the outgoing changesets alongside a revision graph drawn with
314 ASCII characters.
314 ASCII characters.
315
315
316 Nodes printed as an @ character are parents of the working
316 Nodes printed as an @ character are parents of the working
317 directory.
317 directory.
318 """
318 """
319
319
320 check_unsupported_flags(opts)
320 check_unsupported_flags(opts)
321 dest, revs, checkout = hg.parseurl(
321 dest, revs, checkout = hg.parseurl(
322 ui.expandpath(dest or 'default-push', dest or 'default'),
322 ui.expandpath(dest or 'default-push', dest or 'default'),
323 opts.get('rev'))
323 opts.get('rev'))
324 if revs:
324 if revs:
325 revs = [repo.lookup(rev) for rev in revs]
325 revs = [repo.lookup(rev) for rev in revs]
326 other = hg.repository(cmdutil.remoteui(ui, opts), dest)
326 other = hg.repository(cmdutil.remoteui(ui, opts), dest)
327 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
327 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
328 o = repo.findoutgoing(other, force=opts.get('force'))
328 o = repo.findoutgoing(other, force=opts.get('force'))
329 if not o:
329 if not o:
330 ui.status(_("no changes found\n"))
330 ui.status(_("no changes found\n"))
331 return
331 return
332
332
333 o = repo.changelog.nodesbetween(o, revs)[0]
333 o = repo.changelog.nodesbetween(o, revs)[0]
334 revdag = graphrevs(repo, o, opts)
334 revdag = graphrevs(repo, o, opts)
335 graphdag = graphabledag(ui, repo, revdag, opts)
335 graphdag = graphabledag(ui, repo, revdag, opts)
336 ascii(ui, grapher(graphdag))
336 ascii(ui, grapher(graphdag))
337
337
338 def gincoming(ui, repo, source="default", **opts):
338 def gincoming(ui, repo, source="default", **opts):
339 """show the incoming changesets alongside an ASCII revision graph
339 """show the incoming changesets alongside an ASCII revision graph
340
340
341 Print the incoming changesets alongside a revision graph drawn with
341 Print the incoming changesets alongside a revision graph drawn with
342 ASCII characters.
342 ASCII characters.
343
343
344 Nodes printed as an @ character are parents of the working
344 Nodes printed as an @ character are parents of the working
345 directory.
345 directory.
346 """
346 """
347
347
348 check_unsupported_flags(opts)
348 check_unsupported_flags(opts)
349 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
349 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
350 other = hg.repository(cmdutil.remoteui(repo, opts), source)
350 other = hg.repository(cmdutil.remoteui(repo, opts), source)
351 ui.status(_('comparing with %s\n') % url.hidepassword(source))
351 ui.status(_('comparing with %s\n') % url.hidepassword(source))
352 if revs:
352 if revs:
353 revs = [other.lookup(rev) for rev in revs]
353 revs = [other.lookup(rev) for rev in revs]
354 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
354 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
355 if not incoming:
355 if not incoming:
356 try:
356 try:
357 os.unlink(opts["bundle"])
357 os.unlink(opts["bundle"])
358 except:
358 except:
359 pass
359 pass
360 ui.status(_("no changes found\n"))
360 ui.status(_("no changes found\n"))
361 return
361 return
362
362
363 cleanup = None
363 cleanup = None
364 try:
364 try:
365
365
366 fname = opts["bundle"]
366 fname = opts["bundle"]
367 if fname or not other.local():
367 if fname or not other.local():
368 # create a bundle (uncompressed if other repo is not local)
368 # create a bundle (uncompressed if other repo is not local)
369 if revs is None:
369 if revs is None:
370 cg = other.changegroup(incoming, "incoming")
370 cg = other.changegroup(incoming, "incoming")
371 else:
371 else:
372 cg = other.changegroupsubset(incoming, revs, 'incoming')
372 cg = other.changegroupsubset(incoming, revs, 'incoming')
373 bundletype = other.local() and "HG10BZ" or "HG10UN"
373 bundletype = other.local() and "HG10BZ" or "HG10UN"
374 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
374 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
375 # keep written bundle?
375 # keep written bundle?
376 if opts["bundle"]:
376 if opts["bundle"]:
377 cleanup = None
377 cleanup = None
378 if not other.local():
378 if not other.local():
379 # use the created uncompressed bundlerepo
379 # use the created uncompressed bundlerepo
380 other = bundlerepo.bundlerepository(ui, repo.root, fname)
380 other = bundlerepo.bundlerepository(ui, repo.root, fname)
381
381
382 chlist = other.changelog.nodesbetween(incoming, revs)[0]
382 chlist = other.changelog.nodesbetween(incoming, revs)[0]
383 revdag = graphrevs(other, chlist, opts)
383 revdag = graphrevs(other, chlist, opts)
384 graphdag = graphabledag(ui, repo, revdag, opts)
384 graphdag = graphabledag(ui, repo, revdag, opts)
385 ascii(ui, grapher(graphdag))
385 ascii(ui, grapher(graphdag))
386
386
387 finally:
387 finally:
388 if hasattr(other, 'close'):
388 if hasattr(other, 'close'):
389 other.close()
389 other.close()
390 if cleanup:
390 if cleanup:
391 os.unlink(cleanup)
391 os.unlink(cleanup)
392
392
393 def uisetup(ui):
393 def uisetup(ui):
394 '''Initialize the extension.'''
394 '''Initialize the extension.'''
395 _wrapcmd(ui, 'log', commands.table, graphlog)
395 _wrapcmd(ui, 'log', commands.table, graphlog)
396 _wrapcmd(ui, 'incoming', commands.table, gincoming)
396 _wrapcmd(ui, 'incoming', commands.table, gincoming)
397 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
397 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
398
398
399 def _wrapcmd(ui, cmd, table, wrapfn):
399 def _wrapcmd(ui, cmd, table, wrapfn):
400 '''wrap the command'''
400 '''wrap the command'''
401 def graph(orig, *args, **kwargs):
401 def graph(orig, *args, **kwargs):
402 if kwargs['graph']:
402 if kwargs['graph']:
403 return wrapfn(*args, **kwargs)
403 return wrapfn(*args, **kwargs)
404 return orig(*args, **kwargs)
404 return orig(*args, **kwargs)
405 entry = extensions.wrapcommand(table, cmd, graph)
405 entry = extensions.wrapcommand(table, cmd, graph)
406 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
406 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
407
407
408 cmdtable = {
408 cmdtable = {
409 "glog":
409 "glog":
410 (graphlog,
410 (graphlog,
411 [('l', 'limit', '', _('limit number of changes displayed')),
411 [('l', 'limit', '', _('limit number of changes displayed')),
412 ('p', 'patch', False, _('show patch')),
412 ('p', 'patch', False, _('show patch')),
413 ('r', 'rev', [], _('show the specified revision or range')),
413 ('r', 'rev', [], _('show the specified revision or range')),
414 ] + templateopts,
414 ] + templateopts,
415 _('hg glog [OPTION]... [FILE]')),
415 _('hg glog [OPTION]... [FILE]')),
416 }
416 }
@@ -1,126 +1,126 b''
1 # win32mbcs.py -- MBCS filename support for Mercurial
1 # win32mbcs.py -- MBCS filename support for Mercurial
2 #
2 #
3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
4 #
4 #
5 # Version: 0.2
5 # Version: 0.2
6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
7 #
7 #
8 # This software may be used and distributed according to the terms of the
8 # This software may be used and distributed according to the terms of the
9 # GNU General Public License version 2, incorporated herein by reference.
9 # GNU General Public License version 2, incorporated herein by reference.
10 #
10 #
11
11
12 """allow to use MBCS path with problematic encoding.
12 """allow to use MBCS path with problematic encoding.
13
13
14 Some MBCS encodings are not good for some path operations (i.e.
14 Some MBCS encodings are not good for some path operations (i.e.
15 splitting path, case conversion, etc.) with its encoded bytes. We call
15 splitting path, case conversion, etc.) with its encoded bytes. We call
16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
16 such a encoding (i.e. shift_jis and big5) as "problematic encoding".
17 This extension can be used to fix the issue with those encodings by
17 This extension can be used to fix the issue with those encodings by
18 wrapping some functions to convert to Unicode string before path
18 wrapping some functions to convert to Unicode string before path
19 operation.
19 operation.
20
20
21 This extension is usefull for:
21 This extension is usefull for:
22 * Japanese Windows users using shift_jis encoding.
22 * Japanese Windows users using shift_jis encoding.
23 * Chinese Windows users using big5 encoding.
23 * Chinese Windows users using big5 encoding.
24 * All users who use a repository with one of problematic encodings on
24 * All users who use a repository with one of problematic encodings on
25 case-insensitive file system.
25 case-insensitive file system.
26
26
27 This extension is not needed for:
27 This extension is not needed for:
28 * Any user who use only ascii chars in path.
28 * Any user who use only ASCII chars in path.
29 * Any user who do not use any of problematic encodings.
29 * Any user who do not use any of problematic encodings.
30
30
31 Note that there are some limitations on using this extension:
31 Note that there are some limitations on using this extension:
32 * You should use single encoding in one repository.
32 * You should use single encoding in one repository.
33 * You should set same encoding for the repository by locale or
33 * You should set same encoding for the repository by locale or
34 HGENCODING.
34 HGENCODING.
35
35
36 To use this extension, enable the extension in .hg/hgrc or ~/.hgrc:
36 To use this extension, enable the extension in .hg/hgrc or ~/.hgrc:
37
37
38 [extensions]
38 [extensions]
39 hgext.win32mbcs =
39 hgext.win32mbcs =
40
40
41 Path encoding conversion are done between Unicode and
41 Path encoding conversion are done between Unicode and
42 encoding.encoding which is decided by mercurial from current locale
42 encoding.encoding which is decided by mercurial from current locale
43 setting or HGENCODING.
43 setting or HGENCODING.
44
44
45 """
45 """
46
46
47 import os
47 import os
48 from mercurial.i18n import _
48 from mercurial.i18n import _
49 from mercurial import util, encoding
49 from mercurial import util, encoding
50
50
51 def decode(arg):
51 def decode(arg):
52 if isinstance(arg, str):
52 if isinstance(arg, str):
53 uarg = arg.decode(encoding.encoding)
53 uarg = arg.decode(encoding.encoding)
54 if arg == uarg.encode(encoding.encoding):
54 if arg == uarg.encode(encoding.encoding):
55 return uarg
55 return uarg
56 raise UnicodeError("Not local encoding")
56 raise UnicodeError("Not local encoding")
57 elif isinstance(arg, tuple):
57 elif isinstance(arg, tuple):
58 return tuple(map(decode, arg))
58 return tuple(map(decode, arg))
59 elif isinstance(arg, list):
59 elif isinstance(arg, list):
60 return map(decode, arg)
60 return map(decode, arg)
61 return arg
61 return arg
62
62
63 def encode(arg):
63 def encode(arg):
64 if isinstance(arg, unicode):
64 if isinstance(arg, unicode):
65 return arg.encode(encoding.encoding)
65 return arg.encode(encoding.encoding)
66 elif isinstance(arg, tuple):
66 elif isinstance(arg, tuple):
67 return tuple(map(encode, arg))
67 return tuple(map(encode, arg))
68 elif isinstance(arg, list):
68 elif isinstance(arg, list):
69 return map(encode, arg)
69 return map(encode, arg)
70 return arg
70 return arg
71
71
72 def wrapper(func, args):
72 def wrapper(func, args):
73 # check argument is unicode, then call original
73 # check argument is unicode, then call original
74 for arg in args:
74 for arg in args:
75 if isinstance(arg, unicode):
75 if isinstance(arg, unicode):
76 return func(*args)
76 return func(*args)
77
77
78 try:
78 try:
79 # convert arguments to unicode, call func, then convert back
79 # convert arguments to unicode, call func, then convert back
80 return encode(func(*decode(args)))
80 return encode(func(*decode(args)))
81 except UnicodeError:
81 except UnicodeError:
82 # If not encoded with encoding.encoding, report it then
82 # If not encoded with encoding.encoding, report it then
83 # continue with calling original function.
83 # continue with calling original function.
84 raise util.Abort(_("[win32mbcs] filename conversion fail with"
84 raise util.Abort(_("[win32mbcs] filename conversion fail with"
85 " %s encoding\n") % (encoding.encoding))
85 " %s encoding\n") % (encoding.encoding))
86
86
87 def wrapname(name):
87 def wrapname(name):
88 idx = name.rfind('.')
88 idx = name.rfind('.')
89 module = name[:idx]
89 module = name[:idx]
90 name = name[idx+1:]
90 name = name[idx+1:]
91 module = globals()[module]
91 module = globals()[module]
92 func = getattr(module, name)
92 func = getattr(module, name)
93 def f(*args):
93 def f(*args):
94 return wrapper(func, args)
94 return wrapper(func, args)
95 try:
95 try:
96 f.__name__ = func.__name__ # fail with python23
96 f.__name__ = func.__name__ # fail with python23
97 except Exception:
97 except Exception:
98 pass
98 pass
99 setattr(module, name, f)
99 setattr(module, name, f)
100
100
101 # List of functions to be wrapped.
101 # List of functions to be wrapped.
102 # NOTE: os.path.dirname() and os.path.basename() are safe because
102 # NOTE: os.path.dirname() and os.path.basename() are safe because
103 # they use result of os.path.split()
103 # they use result of os.path.split()
104 funcs = '''os.path.join os.path.split os.path.splitext
104 funcs = '''os.path.join os.path.split os.path.splitext
105 os.path.splitunc os.path.normpath os.path.normcase os.makedirs
105 os.path.splitunc os.path.normpath os.path.normcase os.makedirs
106 util.endswithsep util.splitpath util.checkcase util.fspath'''
106 util.endswithsep util.splitpath util.checkcase util.fspath'''
107
107
108 # codec and alias names of sjis and big5 to be faked.
108 # codec and alias names of sjis and big5 to be faked.
109 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
109 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
110 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
110 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
111 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
111 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
112 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213'''
112 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213'''
113
113
114 def reposetup(ui, repo):
114 def reposetup(ui, repo):
115 # TODO: decide use of config section for this extension
115 # TODO: decide use of config section for this extension
116 if not os.path.supports_unicode_filenames:
116 if not os.path.supports_unicode_filenames:
117 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
117 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
118 return
118 return
119
119
120 # fake is only for relevant environment.
120 # fake is only for relevant environment.
121 if encoding.encoding.lower() in problematic_encodings.split():
121 if encoding.encoding.lower() in problematic_encodings.split():
122 for f in funcs.split():
122 for f in funcs.split():
123 wrapname(f)
123 wrapname(f)
124 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
124 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
125 % encoding.encoding)
125 % encoding.encoding)
126
126
General Comments 0
You need to be logged in to leave comments. Login now