##// END OF EJS Templates
graphlog: cleanup before code move...
Patrick Mezard -
r16412:1a10bee8 default
parent child Browse files
Show More
@@ -1,553 +1,552 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell
8 '''command to view revision graphs from a shell
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 from mercurial.cmdutil import show_changeset
15 from mercurial.cmdutil import show_changeset
16 from mercurial.commands import templateopts
16 from mercurial.commands import templateopts
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import nullrev
18 from mercurial.node import nullrev
19 from mercurial import cmdutil, commands, extensions, scmutil
19 from mercurial import cmdutil, commands, extensions, scmutil
20 from mercurial import hg, util, graphmod, templatekw
20 from mercurial import hg, util, graphmod, templatekw, revset
21 from mercurial import revset as revsetmod
22
21
23 cmdtable = {}
22 cmdtable = {}
24 command = cmdutil.command(cmdtable)
23 command = cmdutil.command(cmdtable)
25
24
26 ASCIIDATA = 'ASC'
25 ASCIIDATA = 'ASC'
27
26
28 def asciiedges(type, char, lines, seen, rev, parents):
27 def asciiedges(type, char, lines, seen, rev, parents):
29 """adds edge info to changelog DAG walk suitable for ascii()"""
28 """adds edge info to changelog DAG walk suitable for ascii()"""
30 if rev not in seen:
29 if rev not in seen:
31 seen.append(rev)
30 seen.append(rev)
32 nodeidx = seen.index(rev)
31 nodeidx = seen.index(rev)
33
32
34 knownparents = []
33 knownparents = []
35 newparents = []
34 newparents = []
36 for parent in parents:
35 for parent in parents:
37 if parent in seen:
36 if parent in seen:
38 knownparents.append(parent)
37 knownparents.append(parent)
39 else:
38 else:
40 newparents.append(parent)
39 newparents.append(parent)
41
40
42 ncols = len(seen)
41 ncols = len(seen)
43 nextseen = seen[:]
42 nextseen = seen[:]
44 nextseen[nodeidx:nodeidx + 1] = newparents
43 nextseen[nodeidx:nodeidx + 1] = newparents
45 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
44 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
46
45
47 while len(newparents) > 2:
46 while len(newparents) > 2:
48 # ascii() only knows how to add or remove a single column between two
47 # ascii() only knows how to add or remove a single column between two
49 # calls. Nodes with more than two parents break this constraint so we
48 # calls. Nodes with more than two parents break this constraint so we
50 # introduce intermediate expansion lines to grow the active node list
49 # introduce intermediate expansion lines to grow the active node list
51 # slowly.
50 # slowly.
52 edges.append((nodeidx, nodeidx))
51 edges.append((nodeidx, nodeidx))
53 edges.append((nodeidx, nodeidx + 1))
52 edges.append((nodeidx, nodeidx + 1))
54 nmorecols = 1
53 nmorecols = 1
55 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
54 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
56 char = '\\'
55 char = '\\'
57 lines = []
56 lines = []
58 nodeidx += 1
57 nodeidx += 1
59 ncols += 1
58 ncols += 1
60 edges = []
59 edges = []
61 del newparents[0]
60 del newparents[0]
62
61
63 if len(newparents) > 0:
62 if len(newparents) > 0:
64 edges.append((nodeidx, nodeidx))
63 edges.append((nodeidx, nodeidx))
65 if len(newparents) > 1:
64 if len(newparents) > 1:
66 edges.append((nodeidx, nodeidx + 1))
65 edges.append((nodeidx, nodeidx + 1))
67 nmorecols = len(nextseen) - ncols
66 nmorecols = len(nextseen) - ncols
68 seen[:] = nextseen
67 seen[:] = nextseen
69 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
68 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
70
69
71 def fix_long_right_edges(edges):
70 def fix_long_right_edges(edges):
72 for (i, (start, end)) in enumerate(edges):
71 for (i, (start, end)) in enumerate(edges):
73 if end > start:
72 if end > start:
74 edges[i] = (start, end + 1)
73 edges[i] = (start, end + 1)
75
74
76 def get_nodeline_edges_tail(
75 def get_nodeline_edges_tail(
77 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
76 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
78 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
77 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
79 # Still going in the same non-vertical direction.
78 # Still going in the same non-vertical direction.
80 if n_columns_diff == -1:
79 if n_columns_diff == -1:
81 start = max(node_index + 1, p_node_index)
80 start = max(node_index + 1, p_node_index)
82 tail = ["|", " "] * (start - node_index - 1)
81 tail = ["|", " "] * (start - node_index - 1)
83 tail.extend(["/", " "] * (n_columns - start))
82 tail.extend(["/", " "] * (n_columns - start))
84 return tail
83 return tail
85 else:
84 else:
86 return ["\\", " "] * (n_columns - node_index - 1)
85 return ["\\", " "] * (n_columns - node_index - 1)
87 else:
86 else:
88 return ["|", " "] * (n_columns - node_index - 1)
87 return ["|", " "] * (n_columns - node_index - 1)
89
88
90 def draw_edges(edges, nodeline, interline):
89 def draw_edges(edges, nodeline, interline):
91 for (start, end) in edges:
90 for (start, end) in edges:
92 if start == end + 1:
91 if start == end + 1:
93 interline[2 * end + 1] = "/"
92 interline[2 * end + 1] = "/"
94 elif start == end - 1:
93 elif start == end - 1:
95 interline[2 * start + 1] = "\\"
94 interline[2 * start + 1] = "\\"
96 elif start == end:
95 elif start == end:
97 interline[2 * start] = "|"
96 interline[2 * start] = "|"
98 else:
97 else:
99 if 2 * end >= len(nodeline):
98 if 2 * end >= len(nodeline):
100 continue
99 continue
101 nodeline[2 * end] = "+"
100 nodeline[2 * end] = "+"
102 if start > end:
101 if start > end:
103 (start, end) = (end, start)
102 (start, end) = (end, start)
104 for i in range(2 * start + 1, 2 * end):
103 for i in range(2 * start + 1, 2 * end):
105 if nodeline[i] != "+":
104 if nodeline[i] != "+":
106 nodeline[i] = "-"
105 nodeline[i] = "-"
107
106
108 def get_padding_line(ni, n_columns, edges):
107 def get_padding_line(ni, n_columns, edges):
109 line = []
108 line = []
110 line.extend(["|", " "] * ni)
109 line.extend(["|", " "] * ni)
111 if (ni, ni - 1) in edges or (ni, ni) in edges:
110 if (ni, ni - 1) in edges or (ni, ni) in edges:
112 # (ni, ni - 1) (ni, ni)
111 # (ni, ni - 1) (ni, ni)
113 # | | | | | | | |
112 # | | | | | | | |
114 # +---o | | o---+
113 # +---o | | o---+
115 # | | c | | c | |
114 # | | c | | c | |
116 # | |/ / | |/ /
115 # | |/ / | |/ /
117 # | | | | | |
116 # | | | | | |
118 c = "|"
117 c = "|"
119 else:
118 else:
120 c = " "
119 c = " "
121 line.extend([c, " "])
120 line.extend([c, " "])
122 line.extend(["|", " "] * (n_columns - ni - 1))
121 line.extend(["|", " "] * (n_columns - ni - 1))
123 return line
122 return line
124
123
125 def asciistate():
124 def asciistate():
126 """returns the initial value for the "state" argument to ascii()"""
125 """returns the initial value for the "state" argument to ascii()"""
127 return [0, 0]
126 return [0, 0]
128
127
129 def ascii(ui, state, type, char, text, coldata):
128 def ascii(ui, state, type, char, text, coldata):
130 """prints an ASCII graph of the DAG
129 """prints an ASCII graph of the DAG
131
130
132 takes the following arguments (one call per node in the graph):
131 takes the following arguments (one call per node in the graph):
133
132
134 - ui to write to
133 - ui to write to
135 - Somewhere to keep the needed state in (init to asciistate())
134 - Somewhere to keep the needed state in (init to asciistate())
136 - Column of the current node in the set of ongoing edges.
135 - Column of the current node in the set of ongoing edges.
137 - Type indicator of node data == ASCIIDATA.
136 - Type indicator of node data == ASCIIDATA.
138 - Payload: (char, lines):
137 - Payload: (char, lines):
139 - Character to use as node's symbol.
138 - Character to use as node's symbol.
140 - List of lines to display as the node's text.
139 - List of lines to display as the node's text.
141 - Edges; a list of (col, next_col) indicating the edges between
140 - Edges; a list of (col, next_col) indicating the edges between
142 the current node and its parents.
141 the current node and its parents.
143 - Number of columns (ongoing edges) in the current revision.
142 - Number of columns (ongoing edges) in the current revision.
144 - The difference between the number of columns (ongoing edges)
143 - The difference between the number of columns (ongoing edges)
145 in the next revision and the number of columns (ongoing edges)
144 in the next revision and the number of columns (ongoing edges)
146 in the current revision. That is: -1 means one column removed;
145 in the current revision. That is: -1 means one column removed;
147 0 means no columns added or removed; 1 means one column added.
146 0 means no columns added or removed; 1 means one column added.
148 """
147 """
149
148
150 idx, edges, ncols, coldiff = coldata
149 idx, edges, ncols, coldiff = coldata
151 assert -2 < coldiff < 2
150 assert -2 < coldiff < 2
152 if coldiff == -1:
151 if coldiff == -1:
153 # Transform
152 # Transform
154 #
153 #
155 # | | | | | |
154 # | | | | | |
156 # o | | into o---+
155 # o | | into o---+
157 # |X / |/ /
156 # |X / |/ /
158 # | | | |
157 # | | | |
159 fix_long_right_edges(edges)
158 fix_long_right_edges(edges)
160
159
161 # add_padding_line says whether to rewrite
160 # add_padding_line says whether to rewrite
162 #
161 #
163 # | | | | | | | |
162 # | | | | | | | |
164 # | o---+ into | o---+
163 # | o---+ into | o---+
165 # | / / | | | # <--- padding line
164 # | / / | | | # <--- padding line
166 # o | | | / /
165 # o | | | / /
167 # o | |
166 # o | |
168 add_padding_line = (len(text) > 2 and coldiff == -1 and
167 add_padding_line = (len(text) > 2 and coldiff == -1 and
169 [x for (x, y) in edges if x + 1 < y])
168 [x for (x, y) in edges if x + 1 < y])
170
169
171 # fix_nodeline_tail says whether to rewrite
170 # fix_nodeline_tail says whether to rewrite
172 #
171 #
173 # | | o | | | | o | |
172 # | | o | | | | o | |
174 # | | |/ / | | |/ /
173 # | | |/ / | | |/ /
175 # | o | | into | o / / # <--- fixed nodeline tail
174 # | o | | into | o / / # <--- fixed nodeline tail
176 # | |/ / | |/ /
175 # | |/ / | |/ /
177 # o | | o | |
176 # o | | o | |
178 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
177 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
179
178
180 # nodeline is the line containing the node character (typically o)
179 # nodeline is the line containing the node character (typically o)
181 nodeline = ["|", " "] * idx
180 nodeline = ["|", " "] * idx
182 nodeline.extend([char, " "])
181 nodeline.extend([char, " "])
183
182
184 nodeline.extend(
183 nodeline.extend(
185 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
184 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
186 state[0], fix_nodeline_tail))
185 state[0], fix_nodeline_tail))
187
186
188 # shift_interline is the line containing the non-vertical
187 # shift_interline is the line containing the non-vertical
189 # edges between this entry and the next
188 # edges between this entry and the next
190 shift_interline = ["|", " "] * idx
189 shift_interline = ["|", " "] * idx
191 if coldiff == -1:
190 if coldiff == -1:
192 n_spaces = 1
191 n_spaces = 1
193 edge_ch = "/"
192 edge_ch = "/"
194 elif coldiff == 0:
193 elif coldiff == 0:
195 n_spaces = 2
194 n_spaces = 2
196 edge_ch = "|"
195 edge_ch = "|"
197 else:
196 else:
198 n_spaces = 3
197 n_spaces = 3
199 edge_ch = "\\"
198 edge_ch = "\\"
200 shift_interline.extend(n_spaces * [" "])
199 shift_interline.extend(n_spaces * [" "])
201 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
200 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
202
201
203 # draw edges from the current node to its parents
202 # draw edges from the current node to its parents
204 draw_edges(edges, nodeline, shift_interline)
203 draw_edges(edges, nodeline, shift_interline)
205
204
206 # lines is the list of all graph lines to print
205 # lines is the list of all graph lines to print
207 lines = [nodeline]
206 lines = [nodeline]
208 if add_padding_line:
207 if add_padding_line:
209 lines.append(get_padding_line(idx, ncols, edges))
208 lines.append(get_padding_line(idx, ncols, edges))
210 lines.append(shift_interline)
209 lines.append(shift_interline)
211
210
212 # make sure that there are as many graph lines as there are
211 # make sure that there are as many graph lines as there are
213 # log strings
212 # log strings
214 while len(text) < len(lines):
213 while len(text) < len(lines):
215 text.append("")
214 text.append("")
216 if len(lines) < len(text):
215 if len(lines) < len(text):
217 extra_interline = ["|", " "] * (ncols + coldiff)
216 extra_interline = ["|", " "] * (ncols + coldiff)
218 while len(lines) < len(text):
217 while len(lines) < len(text):
219 lines.append(extra_interline)
218 lines.append(extra_interline)
220
219
221 # print lines
220 # print lines
222 indentation_level = max(ncols, ncols + coldiff)
221 indentation_level = max(ncols, ncols + coldiff)
223 for (line, logstr) in zip(lines, text):
222 for (line, logstr) in zip(lines, text):
224 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
223 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
225 ui.write(ln.rstrip() + '\n')
224 ui.write(ln.rstrip() + '\n')
226
225
227 # ... and start over
226 # ... and start over
228 state[0] = coldiff
227 state[0] = coldiff
229 state[1] = idx
228 state[1] = idx
230
229
231 def get_revs(repo, rev_opt):
230 def get_revs(repo, rev_opt):
232 if rev_opt:
231 if rev_opt:
233 revs = scmutil.revrange(repo, rev_opt)
232 revs = scmutil.revrange(repo, rev_opt)
234 if len(revs) == 0:
233 if len(revs) == 0:
235 return (nullrev, nullrev)
234 return (nullrev, nullrev)
236 return (max(revs), min(revs))
235 return (max(revs), min(revs))
237 else:
236 else:
238 return (len(repo) - 1, 0)
237 return (len(repo) - 1, 0)
239
238
240 def check_unsupported_flags(pats, opts):
239 def check_unsupported_flags(pats, opts):
241 for op in ["newest_first"]:
240 for op in ["newest_first"]:
242 if op in opts and opts[op]:
241 if op in opts and opts[op]:
243 raise util.Abort(_("-G/--graph option is incompatible with --%s")
242 raise util.Abort(_("-G/--graph option is incompatible with --%s")
244 % op.replace("_", "-"))
243 % op.replace("_", "-"))
245
244
246 def makefilematcher(repo, pats, followfirst):
245 def _makefilematcher(repo, pats, followfirst):
247 # When displaying a revision with --patch --follow FILE, we have
246 # When displaying a revision with --patch --follow FILE, we have
248 # to know which file of the revision must be diffed. With
247 # to know which file of the revision must be diffed. With
249 # --follow, we want the names of the ancestors of FILE in the
248 # --follow, we want the names of the ancestors of FILE in the
250 # revision, stored in "fcache". "fcache" is populated by
249 # revision, stored in "fcache". "fcache" is populated by
251 # reproducing the graph traversal already done by --follow revset
250 # reproducing the graph traversal already done by --follow revset
252 # and relating linkrevs to file names (which is not "correct" but
251 # and relating linkrevs to file names (which is not "correct" but
253 # good enough).
252 # good enough).
254 fcache = {}
253 fcache = {}
255 fcacheready = [False]
254 fcacheready = [False]
256 pctx = repo['.']
255 pctx = repo['.']
257 wctx = repo[None]
256 wctx = repo[None]
258
257
259 def populate():
258 def populate():
260 for fn in pats:
259 for fn in pats:
261 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
260 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
262 for c in i:
261 for c in i:
263 fcache.setdefault(c.linkrev(), set()).add(c.path())
262 fcache.setdefault(c.linkrev(), set()).add(c.path())
264
263
265 def filematcher(rev):
264 def filematcher(rev):
266 if not fcacheready[0]:
265 if not fcacheready[0]:
267 # Lazy initialization
266 # Lazy initialization
268 fcacheready[0] = True
267 fcacheready[0] = True
269 populate()
268 populate()
270 return scmutil.match(wctx, fcache.get(rev, []), default='path')
269 return scmutil.match(wctx, fcache.get(rev, []), default='path')
271
270
272 return filematcher
271 return filematcher
273
272
274 def _makelogrevset(repo, pats, opts, revs):
273 def _makelogrevset(repo, pats, opts, revs):
275 """Return (expr, filematcher) where expr is a revset string built
274 """Return (expr, filematcher) where expr is a revset string built
276 from log options and file patterns or None. If --stat or --patch
275 from log options and file patterns or None. If --stat or --patch
277 are not passed filematcher is None. Otherwise it is a callable
276 are not passed filematcher is None. Otherwise it is a callable
278 taking a revision number and returning a match objects filtering
277 taking a revision number and returning a match objects filtering
279 the files to be detailed when displaying the revision.
278 the files to be detailed when displaying the revision.
280 """
279 """
281 opt2revset = {
280 opt2revset = {
282 'no_merges': ('not merge()', None),
281 'no_merges': ('not merge()', None),
283 'only_merges': ('merge()', None),
282 'only_merges': ('merge()', None),
284 '_ancestors': ('ancestors(%(val)s)', None),
283 '_ancestors': ('ancestors(%(val)s)', None),
285 '_fancestors': ('_firstancestors(%(val)s)', None),
284 '_fancestors': ('_firstancestors(%(val)s)', None),
286 '_descendants': ('descendants(%(val)s)', None),
285 '_descendants': ('descendants(%(val)s)', None),
287 '_fdescendants': ('_firstdescendants(%(val)s)', None),
286 '_fdescendants': ('_firstdescendants(%(val)s)', None),
288 '_matchfiles': ('_matchfiles(%(val)s)', None),
287 '_matchfiles': ('_matchfiles(%(val)s)', None),
289 'date': ('date(%(val)r)', None),
288 'date': ('date(%(val)r)', None),
290 'branch': ('branch(%(val)r)', ' or '),
289 'branch': ('branch(%(val)r)', ' or '),
291 '_patslog': ('filelog(%(val)r)', ' or '),
290 '_patslog': ('filelog(%(val)r)', ' or '),
292 '_patsfollow': ('follow(%(val)r)', ' or '),
291 '_patsfollow': ('follow(%(val)r)', ' or '),
293 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
292 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
294 'keyword': ('keyword(%(val)r)', ' or '),
293 'keyword': ('keyword(%(val)r)', ' or '),
295 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
294 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
296 'user': ('user(%(val)r)', ' or '),
295 'user': ('user(%(val)r)', ' or '),
297 }
296 }
298
297
299 opts = dict(opts)
298 opts = dict(opts)
300 # follow or not follow?
299 # follow or not follow?
301 follow = opts.get('follow') or opts.get('follow_first')
300 follow = opts.get('follow') or opts.get('follow_first')
302 followfirst = opts.get('follow_first')
301 followfirst = opts.get('follow_first')
303 # --follow with FILE behaviour depends on revs...
302 # --follow with FILE behaviour depends on revs...
304 startrev = revs[0]
303 startrev = revs[0]
305 followdescendants = len(revs) > 1 and revs[0] < revs[1]
304 followdescendants = len(revs) > 1 and revs[0] < revs[1]
306
305
307 # branch and only_branch are really aliases and must be handled at
306 # branch and only_branch are really aliases and must be handled at
308 # the same time
307 # the same time
309 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
308 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
310 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
309 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
311 # pats/include/exclude are passed to match.match() directly in
310 # pats/include/exclude are passed to match.match() directly in
312 # _matchfile() revset but walkchangerevs() builds its matcher with
311 # _matchfile() revset but walkchangerevs() builds its matcher with
313 # scmutil.match(). The difference is input pats are globbed on
312 # scmutil.match(). The difference is input pats are globbed on
314 # platforms without shell expansion (windows).
313 # platforms without shell expansion (windows).
315 pctx = repo[None]
314 pctx = repo[None]
316 match, pats = scmutil.matchandpats(pctx, pats, opts)
315 match, pats = scmutil.matchandpats(pctx, pats, opts)
317 slowpath = match.anypats() or (match.files() and opts.get('removed'))
316 slowpath = match.anypats() or (match.files() and opts.get('removed'))
318 if not slowpath:
317 if not slowpath:
319 for f in match.files():
318 for f in match.files():
320 if follow and f not in pctx:
319 if follow and f not in pctx:
321 raise util.Abort(_('cannot follow file not in parent '
320 raise util.Abort(_('cannot follow file not in parent '
322 'revision: "%s"') % f)
321 'revision: "%s"') % f)
323 filelog = repo.file(f)
322 filelog = repo.file(f)
324 if not len(filelog):
323 if not len(filelog):
325 # A zero count may be a directory or deleted file, so
324 # A zero count may be a directory or deleted file, so
326 # try to find matching entries on the slow path.
325 # try to find matching entries on the slow path.
327 if follow:
326 if follow:
328 raise util.Abort(
327 raise util.Abort(
329 _('cannot follow nonexistent file: "%s"') % f)
328 _('cannot follow nonexistent file: "%s"') % f)
330 slowpath = True
329 slowpath = True
331 if slowpath:
330 if slowpath:
332 # See cmdutil.walkchangerevs() slow path.
331 # See cmdutil.walkchangerevs() slow path.
333 #
332 #
334 if follow:
333 if follow:
335 raise util.Abort(_('can only follow copies/renames for explicit '
334 raise util.Abort(_('can only follow copies/renames for explicit '
336 'filenames'))
335 'filenames'))
337 # pats/include/exclude cannot be represented as separate
336 # pats/include/exclude cannot be represented as separate
338 # revset expressions as their filtering logic applies at file
337 # revset expressions as their filtering logic applies at file
339 # level. For instance "-I a -X a" matches a revision touching
338 # level. For instance "-I a -X a" matches a revision touching
340 # "a" and "b" while "file(a) and not file(b)" does
339 # "a" and "b" while "file(a) and not file(b)" does
341 # not. Besides, filesets are evaluated against the working
340 # not. Besides, filesets are evaluated against the working
342 # directory.
341 # directory.
343 matchargs = ['r:', 'd:relpath']
342 matchargs = ['r:', 'd:relpath']
344 for p in pats:
343 for p in pats:
345 matchargs.append('p:' + p)
344 matchargs.append('p:' + p)
346 for p in opts.get('include', []):
345 for p in opts.get('include', []):
347 matchargs.append('i:' + p)
346 matchargs.append('i:' + p)
348 for p in opts.get('exclude', []):
347 for p in opts.get('exclude', []):
349 matchargs.append('x:' + p)
348 matchargs.append('x:' + p)
350 matchargs = ','.join(('%r' % p) for p in matchargs)
349 matchargs = ','.join(('%r' % p) for p in matchargs)
351 opts['_matchfiles'] = matchargs
350 opts['_matchfiles'] = matchargs
352 else:
351 else:
353 if follow:
352 if follow:
354 if followfirst:
353 if followfirst:
355 if pats:
354 if pats:
356 opts['_patsfollowfirst'] = list(pats)
355 opts['_patsfollowfirst'] = list(pats)
357 else:
356 else:
358 if followdescendants:
357 if followdescendants:
359 opts['_fdescendants'] = str(startrev)
358 opts['_fdescendants'] = str(startrev)
360 else:
359 else:
361 opts['_fancestors'] = str(startrev)
360 opts['_fancestors'] = str(startrev)
362 else:
361 else:
363 if pats:
362 if pats:
364 opts['_patsfollow'] = list(pats)
363 opts['_patsfollow'] = list(pats)
365 else:
364 else:
366 if followdescendants:
365 if followdescendants:
367 opts['_descendants'] = str(startrev)
366 opts['_descendants'] = str(startrev)
368 else:
367 else:
369 opts['_ancestors'] = str(startrev)
368 opts['_ancestors'] = str(startrev)
370 else:
369 else:
371 opts['_patslog'] = list(pats)
370 opts['_patslog'] = list(pats)
372
371
373 filematcher = None
372 filematcher = None
374 if opts.get('patch') or opts.get('stat'):
373 if opts.get('patch') or opts.get('stat'):
375 if follow:
374 if follow:
376 filematcher = makefilematcher(repo, pats, followfirst)
375 filematcher = _makefilematcher(repo, pats, followfirst)
377 else:
376 else:
378 filematcher = lambda rev: match
377 filematcher = lambda rev: match
379
378
380 revset = []
379 expr = []
381 for op, val in opts.iteritems():
380 for op, val in opts.iteritems():
382 if not val:
381 if not val:
383 continue
382 continue
384 if op not in opt2revset:
383 if op not in opt2revset:
385 continue
384 continue
386 revop, andor = opt2revset[op]
385 revop, andor = opt2revset[op]
387 if '%(val)' not in revop:
386 if '%(val)' not in revop:
388 revset.append(revop)
387 expr.append(revop)
389 else:
388 else:
390 if not isinstance(val, list):
389 if not isinstance(val, list):
391 expr = revop % {'val': val}
390 e = revop % {'val': val}
392 else:
391 else:
393 expr = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
392 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
394 revset.append(expr)
393 expr.append(e)
395
394
396 if revset:
395 if expr:
397 revset = '(' + ' and '.join(revset) + ')'
396 expr = '(' + ' and '.join(expr) + ')'
398 else:
397 else:
399 revset = None
398 expr = None
400 return revset, filematcher
399 return expr, filematcher
401
400
402 def getlogrevs(repo, pats, opts):
401 def getlogrevs(repo, pats, opts):
403 """Return (revs, expr, filematcher) where revs is a list of
402 """Return (revs, expr, filematcher) where revs is a list of
404 revision numbers, expr is a revset string built from log options
403 revision numbers, expr is a revset string built from log options
405 and file patterns or None, and used to filter 'revs'. If --stat or
404 and file patterns or None, and used to filter 'revs'. If --stat or
406 --patch are not passed filematcher is None. Otherwise it is a
405 --patch are not passed filematcher is None. Otherwise it is a
407 callable taking a revision number and returning a match objects
406 callable taking a revision number and returning a match objects
408 filtering the files to be detailed when displaying the revision.
407 filtering the files to be detailed when displaying the revision.
409 """
408 """
410 if not len(repo):
409 if not len(repo):
411 return [], None, None
410 return [], None, None
412 # Default --rev value depends on --follow but --follow behaviour
411 # Default --rev value depends on --follow but --follow behaviour
413 # depends on revisions resolved from --rev...
412 # depends on revisions resolved from --rev...
414 follow = opts.get('follow') or opts.get('follow_first')
413 follow = opts.get('follow') or opts.get('follow_first')
415 if opts.get('rev'):
414 if opts.get('rev'):
416 revs = scmutil.revrange(repo, opts['rev'])
415 revs = scmutil.revrange(repo, opts['rev'])
417 else:
416 else:
418 if follow and len(repo) > 0:
417 if follow and len(repo) > 0:
419 revs = scmutil.revrange(repo, ['.:0'])
418 revs = scmutil.revrange(repo, ['.:0'])
420 else:
419 else:
421 revs = range(len(repo) - 1, -1, -1)
420 revs = range(len(repo) - 1, -1, -1)
422 if not revs:
421 if not revs:
423 return [], None, None
422 return [], None, None
424 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
423 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
425 if expr:
424 if expr:
426 # Evaluate revisions in changelog order for performance
425 # Evaluate revisions in changelog order for performance
427 # reasons but preserve the original sequence order in the
426 # reasons but preserve the original sequence order in the
428 # filtered result.
427 # filtered result.
429 matched = set(revsetmod.match(repo.ui, expr)(repo, sorted(revs)))
428 matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
430 revs = [r for r in revs if r in matched]
429 revs = [r for r in revs if r in matched]
431 return revs, expr, filematcher
430 return revs, expr, filematcher
432
431
433 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
432 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
434 filematcher=None):
433 filematcher=None):
435 seen, state = [], asciistate()
434 seen, state = [], asciistate()
436 for rev, type, ctx, parents in dag:
435 for rev, type, ctx, parents in dag:
437 char = ctx.node() in showparents and '@' or 'o'
436 char = ctx.node() in showparents and '@' or 'o'
438 copies = None
437 copies = None
439 if getrenamed and ctx.rev():
438 if getrenamed and ctx.rev():
440 copies = []
439 copies = []
441 for fn in ctx.files():
440 for fn in ctx.files():
442 rename = getrenamed(fn, ctx.rev())
441 rename = getrenamed(fn, ctx.rev())
443 if rename:
442 if rename:
444 copies.append((fn, rename[0]))
443 copies.append((fn, rename[0]))
445 revmatchfn = None
444 revmatchfn = None
446 if filematcher is not None:
445 if filematcher is not None:
447 revmatchfn = filematcher(ctx.rev())
446 revmatchfn = filematcher(ctx.rev())
448 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
447 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
449 lines = displayer.hunk.pop(rev).split('\n')[:-1]
448 lines = displayer.hunk.pop(rev).split('\n')[:-1]
450 displayer.flush(rev)
449 displayer.flush(rev)
451 edges = edgefn(type, char, lines, seen, rev, parents)
450 edges = edgefn(type, char, lines, seen, rev, parents)
452 for type, char, lines, coldata in edges:
451 for type, char, lines, coldata in edges:
453 ascii(ui, state, type, char, lines, coldata)
452 ascii(ui, state, type, char, lines, coldata)
454 displayer.close()
453 displayer.close()
455
454
456 @command('glog',
455 @command('glog',
457 [('l', 'limit', '',
456 [('l', 'limit', '',
458 _('limit number of changes displayed'), _('NUM')),
457 _('limit number of changes displayed'), _('NUM')),
459 ('p', 'patch', False, _('show patch')),
458 ('p', 'patch', False, _('show patch')),
460 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
459 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
461 ] + templateopts,
460 ] + templateopts,
462 _('hg glog [OPTION]... [FILE]'))
461 _('hg glog [OPTION]... [FILE]'))
463 def graphlog(ui, repo, *pats, **opts):
462 def graphlog(ui, repo, *pats, **opts):
464 """show revision history alongside an ASCII revision graph
463 """show revision history alongside an ASCII revision graph
465
464
466 Print a revision history alongside a revision graph drawn with
465 Print a revision history alongside a revision graph drawn with
467 ASCII characters.
466 ASCII characters.
468
467
469 Nodes printed as an @ character are parents of the working
468 Nodes printed as an @ character are parents of the working
470 directory.
469 directory.
471 """
470 """
472
471
473 check_unsupported_flags(pats, opts)
472 check_unsupported_flags(pats, opts)
474
473
475 revs, expr, filematcher = getlogrevs(repo, pats, opts)
474 revs, expr, filematcher = getlogrevs(repo, pats, opts)
476 revs = sorted(revs, reverse=1)
475 revs = sorted(revs, reverse=1)
477 limit = cmdutil.loglimit(opts)
476 limit = cmdutil.loglimit(opts)
478 if limit is not None:
477 if limit is not None:
479 revs = revs[:limit]
478 revs = revs[:limit]
480 revdag = graphmod.dagwalker(repo, revs)
479 revdag = graphmod.dagwalker(repo, revs)
481
480
482 getrenamed = None
481 getrenamed = None
483 if opts.get('copies'):
482 if opts.get('copies'):
484 endrev = None
483 endrev = None
485 if opts.get('rev'):
484 if opts.get('rev'):
486 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
485 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
487 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
486 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
488 displayer = show_changeset(ui, repo, opts, buffered=True)
487 displayer = show_changeset(ui, repo, opts, buffered=True)
489 showparents = [ctx.node() for ctx in repo[None].parents()]
488 showparents = [ctx.node() for ctx in repo[None].parents()]
490 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
489 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
491 filematcher)
490 filematcher)
492
491
493 def graphrevs(repo, nodes, opts):
492 def graphrevs(repo, nodes, opts):
494 limit = cmdutil.loglimit(opts)
493 limit = cmdutil.loglimit(opts)
495 nodes.reverse()
494 nodes.reverse()
496 if limit is not None:
495 if limit is not None:
497 nodes = nodes[:limit]
496 nodes = nodes[:limit]
498 return graphmod.nodes(repo, nodes)
497 return graphmod.nodes(repo, nodes)
499
498
500 def goutgoing(ui, repo, dest=None, **opts):
499 def goutgoing(ui, repo, dest=None, **opts):
501 """show the outgoing changesets alongside an ASCII revision graph
500 """show the outgoing changesets alongside an ASCII revision graph
502
501
503 Print the outgoing changesets alongside a revision graph drawn with
502 Print the outgoing changesets alongside a revision graph drawn with
504 ASCII characters.
503 ASCII characters.
505
504
506 Nodes printed as an @ character are parents of the working
505 Nodes printed as an @ character are parents of the working
507 directory.
506 directory.
508 """
507 """
509
508
510 check_unsupported_flags([], opts)
509 check_unsupported_flags([], opts)
511 o = hg._outgoing(ui, repo, dest, opts)
510 o = hg._outgoing(ui, repo, dest, opts)
512 if o is None:
511 if o is None:
513 return
512 return
514
513
515 revdag = graphrevs(repo, o, opts)
514 revdag = graphrevs(repo, o, opts)
516 displayer = show_changeset(ui, repo, opts, buffered=True)
515 displayer = show_changeset(ui, repo, opts, buffered=True)
517 showparents = [ctx.node() for ctx in repo[None].parents()]
516 showparents = [ctx.node() for ctx in repo[None].parents()]
518 generate(ui, revdag, displayer, showparents, asciiedges)
517 generate(ui, revdag, displayer, showparents, asciiedges)
519
518
520 def gincoming(ui, repo, source="default", **opts):
519 def gincoming(ui, repo, source="default", **opts):
521 """show the incoming changesets alongside an ASCII revision graph
520 """show the incoming changesets alongside an ASCII revision graph
522
521
523 Print the incoming changesets alongside a revision graph drawn with
522 Print the incoming changesets alongside a revision graph drawn with
524 ASCII characters.
523 ASCII characters.
525
524
526 Nodes printed as an @ character are parents of the working
525 Nodes printed as an @ character are parents of the working
527 directory.
526 directory.
528 """
527 """
529 def subreporecurse():
528 def subreporecurse():
530 return 1
529 return 1
531
530
532 check_unsupported_flags([], opts)
531 check_unsupported_flags([], opts)
533 def display(other, chlist, displayer):
532 def display(other, chlist, displayer):
534 revdag = graphrevs(other, chlist, opts)
533 revdag = graphrevs(other, chlist, opts)
535 showparents = [ctx.node() for ctx in repo[None].parents()]
534 showparents = [ctx.node() for ctx in repo[None].parents()]
536 generate(ui, revdag, displayer, showparents, asciiedges)
535 generate(ui, revdag, displayer, showparents, asciiedges)
537
536
538 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
537 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
539
538
540 def uisetup(ui):
539 def uisetup(ui):
541 '''Initialize the extension.'''
540 '''Initialize the extension.'''
542 _wrapcmd('log', commands.table, graphlog)
541 _wrapcmd('log', commands.table, graphlog)
543 _wrapcmd('incoming', commands.table, gincoming)
542 _wrapcmd('incoming', commands.table, gincoming)
544 _wrapcmd('outgoing', commands.table, goutgoing)
543 _wrapcmd('outgoing', commands.table, goutgoing)
545
544
546 def _wrapcmd(cmd, table, wrapfn):
545 def _wrapcmd(cmd, table, wrapfn):
547 '''wrap the command'''
546 '''wrap the command'''
548 def graph(orig, *args, **kwargs):
547 def graph(orig, *args, **kwargs):
549 if kwargs['graph']:
548 if kwargs['graph']:
550 return wrapfn(*args, **kwargs)
549 return wrapfn(*args, **kwargs)
551 return orig(*args, **kwargs)
550 return orig(*args, **kwargs)
552 entry = extensions.wrapcommand(table, cmd, graph)
551 entry = extensions.wrapcommand(table, cmd, graph)
553 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
552 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now