##// END OF EJS Templates
graphlog: pass changesets to revset.match() in changelog order...
Patrick Mezard -
r16406:4aa4f50c default
parent child Browse files
Show More
@@ -1,535 +1,539 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell
8 '''command to view revision graphs from a shell
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 from mercurial.cmdutil import show_changeset
15 from mercurial.cmdutil import show_changeset
16 from mercurial.commands import templateopts
16 from mercurial.commands import templateopts
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import nullrev
18 from mercurial.node import nullrev
19 from mercurial import cmdutil, commands, extensions, scmutil
19 from mercurial import cmdutil, commands, extensions, scmutil
20 from mercurial import hg, util, graphmod, templatekw
20 from mercurial import hg, util, graphmod, templatekw
21 from mercurial import revset as revsetmod
21 from mercurial import revset as revsetmod
22
22
23 cmdtable = {}
23 cmdtable = {}
24 command = cmdutil.command(cmdtable)
24 command = cmdutil.command(cmdtable)
25
25
26 ASCIIDATA = 'ASC'
26 ASCIIDATA = 'ASC'
27
27
28 def asciiedges(type, char, lines, seen, rev, parents):
28 def asciiedges(type, char, lines, seen, rev, parents):
29 """adds edge info to changelog DAG walk suitable for ascii()"""
29 """adds edge info to changelog DAG walk suitable for ascii()"""
30 if rev not in seen:
30 if rev not in seen:
31 seen.append(rev)
31 seen.append(rev)
32 nodeidx = seen.index(rev)
32 nodeidx = seen.index(rev)
33
33
34 knownparents = []
34 knownparents = []
35 newparents = []
35 newparents = []
36 for parent in parents:
36 for parent in parents:
37 if parent in seen:
37 if parent in seen:
38 knownparents.append(parent)
38 knownparents.append(parent)
39 else:
39 else:
40 newparents.append(parent)
40 newparents.append(parent)
41
41
42 ncols = len(seen)
42 ncols = len(seen)
43 nextseen = seen[:]
43 nextseen = seen[:]
44 nextseen[nodeidx:nodeidx + 1] = newparents
44 nextseen[nodeidx:nodeidx + 1] = newparents
45 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
45 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
46
46
47 while len(newparents) > 2:
47 while len(newparents) > 2:
48 # ascii() only knows how to add or remove a single column between two
48 # ascii() only knows how to add or remove a single column between two
49 # calls. Nodes with more than two parents break this constraint so we
49 # calls. Nodes with more than two parents break this constraint so we
50 # introduce intermediate expansion lines to grow the active node list
50 # introduce intermediate expansion lines to grow the active node list
51 # slowly.
51 # slowly.
52 edges.append((nodeidx, nodeidx))
52 edges.append((nodeidx, nodeidx))
53 edges.append((nodeidx, nodeidx + 1))
53 edges.append((nodeidx, nodeidx + 1))
54 nmorecols = 1
54 nmorecols = 1
55 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
55 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
56 char = '\\'
56 char = '\\'
57 lines = []
57 lines = []
58 nodeidx += 1
58 nodeidx += 1
59 ncols += 1
59 ncols += 1
60 edges = []
60 edges = []
61 del newparents[0]
61 del newparents[0]
62
62
63 if len(newparents) > 0:
63 if len(newparents) > 0:
64 edges.append((nodeidx, nodeidx))
64 edges.append((nodeidx, nodeidx))
65 if len(newparents) > 1:
65 if len(newparents) > 1:
66 edges.append((nodeidx, nodeidx + 1))
66 edges.append((nodeidx, nodeidx + 1))
67 nmorecols = len(nextseen) - ncols
67 nmorecols = len(nextseen) - ncols
68 seen[:] = nextseen
68 seen[:] = nextseen
69 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
69 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
70
70
71 def fix_long_right_edges(edges):
71 def fix_long_right_edges(edges):
72 for (i, (start, end)) in enumerate(edges):
72 for (i, (start, end)) in enumerate(edges):
73 if end > start:
73 if end > start:
74 edges[i] = (start, end + 1)
74 edges[i] = (start, end + 1)
75
75
76 def get_nodeline_edges_tail(
76 def get_nodeline_edges_tail(
77 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
77 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
78 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
78 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
79 # Still going in the same non-vertical direction.
79 # Still going in the same non-vertical direction.
80 if n_columns_diff == -1:
80 if n_columns_diff == -1:
81 start = max(node_index + 1, p_node_index)
81 start = max(node_index + 1, p_node_index)
82 tail = ["|", " "] * (start - node_index - 1)
82 tail = ["|", " "] * (start - node_index - 1)
83 tail.extend(["/", " "] * (n_columns - start))
83 tail.extend(["/", " "] * (n_columns - start))
84 return tail
84 return tail
85 else:
85 else:
86 return ["\\", " "] * (n_columns - node_index - 1)
86 return ["\\", " "] * (n_columns - node_index - 1)
87 else:
87 else:
88 return ["|", " "] * (n_columns - node_index - 1)
88 return ["|", " "] * (n_columns - node_index - 1)
89
89
90 def draw_edges(edges, nodeline, interline):
90 def draw_edges(edges, nodeline, interline):
91 for (start, end) in edges:
91 for (start, end) in edges:
92 if start == end + 1:
92 if start == end + 1:
93 interline[2 * end + 1] = "/"
93 interline[2 * end + 1] = "/"
94 elif start == end - 1:
94 elif start == end - 1:
95 interline[2 * start + 1] = "\\"
95 interline[2 * start + 1] = "\\"
96 elif start == end:
96 elif start == end:
97 interline[2 * start] = "|"
97 interline[2 * start] = "|"
98 else:
98 else:
99 if 2 * end >= len(nodeline):
99 if 2 * end >= len(nodeline):
100 continue
100 continue
101 nodeline[2 * end] = "+"
101 nodeline[2 * end] = "+"
102 if start > end:
102 if start > end:
103 (start, end) = (end, start)
103 (start, end) = (end, start)
104 for i in range(2 * start + 1, 2 * end):
104 for i in range(2 * start + 1, 2 * end):
105 if nodeline[i] != "+":
105 if nodeline[i] != "+":
106 nodeline[i] = "-"
106 nodeline[i] = "-"
107
107
108 def get_padding_line(ni, n_columns, edges):
108 def get_padding_line(ni, n_columns, edges):
109 line = []
109 line = []
110 line.extend(["|", " "] * ni)
110 line.extend(["|", " "] * ni)
111 if (ni, ni - 1) in edges or (ni, ni) in edges:
111 if (ni, ni - 1) in edges or (ni, ni) in edges:
112 # (ni, ni - 1) (ni, ni)
112 # (ni, ni - 1) (ni, ni)
113 # | | | | | | | |
113 # | | | | | | | |
114 # +---o | | o---+
114 # +---o | | o---+
115 # | | c | | c | |
115 # | | c | | c | |
116 # | |/ / | |/ /
116 # | |/ / | |/ /
117 # | | | | | |
117 # | | | | | |
118 c = "|"
118 c = "|"
119 else:
119 else:
120 c = " "
120 c = " "
121 line.extend([c, " "])
121 line.extend([c, " "])
122 line.extend(["|", " "] * (n_columns - ni - 1))
122 line.extend(["|", " "] * (n_columns - ni - 1))
123 return line
123 return line
124
124
125 def asciistate():
125 def asciistate():
126 """returns the initial value for the "state" argument to ascii()"""
126 """returns the initial value for the "state" argument to ascii()"""
127 return [0, 0]
127 return [0, 0]
128
128
129 def ascii(ui, state, type, char, text, coldata):
129 def ascii(ui, state, type, char, text, coldata):
130 """prints an ASCII graph of the DAG
130 """prints an ASCII graph of the DAG
131
131
132 takes the following arguments (one call per node in the graph):
132 takes the following arguments (one call per node in the graph):
133
133
134 - ui to write to
134 - ui to write to
135 - Somewhere to keep the needed state in (init to asciistate())
135 - Somewhere to keep the needed state in (init to asciistate())
136 - Column of the current node in the set of ongoing edges.
136 - Column of the current node in the set of ongoing edges.
137 - Type indicator of node data == ASCIIDATA.
137 - Type indicator of node data == ASCIIDATA.
138 - Payload: (char, lines):
138 - Payload: (char, lines):
139 - Character to use as node's symbol.
139 - Character to use as node's symbol.
140 - List of lines to display as the node's text.
140 - List of lines to display as the node's text.
141 - Edges; a list of (col, next_col) indicating the edges between
141 - Edges; a list of (col, next_col) indicating the edges between
142 the current node and its parents.
142 the current node and its parents.
143 - Number of columns (ongoing edges) in the current revision.
143 - Number of columns (ongoing edges) in the current revision.
144 - The difference between the number of columns (ongoing edges)
144 - The difference between the number of columns (ongoing edges)
145 in the next revision and the number of columns (ongoing edges)
145 in the next revision and the number of columns (ongoing edges)
146 in the current revision. That is: -1 means one column removed;
146 in the current revision. That is: -1 means one column removed;
147 0 means no columns added or removed; 1 means one column added.
147 0 means no columns added or removed; 1 means one column added.
148 """
148 """
149
149
150 idx, edges, ncols, coldiff = coldata
150 idx, edges, ncols, coldiff = coldata
151 assert -2 < coldiff < 2
151 assert -2 < coldiff < 2
152 if coldiff == -1:
152 if coldiff == -1:
153 # Transform
153 # Transform
154 #
154 #
155 # | | | | | |
155 # | | | | | |
156 # o | | into o---+
156 # o | | into o---+
157 # |X / |/ /
157 # |X / |/ /
158 # | | | |
158 # | | | |
159 fix_long_right_edges(edges)
159 fix_long_right_edges(edges)
160
160
161 # add_padding_line says whether to rewrite
161 # add_padding_line says whether to rewrite
162 #
162 #
163 # | | | | | | | |
163 # | | | | | | | |
164 # | o---+ into | o---+
164 # | o---+ into | o---+
165 # | / / | | | # <--- padding line
165 # | / / | | | # <--- padding line
166 # o | | | / /
166 # o | | | / /
167 # o | |
167 # o | |
168 add_padding_line = (len(text) > 2 and coldiff == -1 and
168 add_padding_line = (len(text) > 2 and coldiff == -1 and
169 [x for (x, y) in edges if x + 1 < y])
169 [x for (x, y) in edges if x + 1 < y])
170
170
171 # fix_nodeline_tail says whether to rewrite
171 # fix_nodeline_tail says whether to rewrite
172 #
172 #
173 # | | o | | | | o | |
173 # | | o | | | | o | |
174 # | | |/ / | | |/ /
174 # | | |/ / | | |/ /
175 # | o | | into | o / / # <--- fixed nodeline tail
175 # | o | | into | o / / # <--- fixed nodeline tail
176 # | |/ / | |/ /
176 # | |/ / | |/ /
177 # o | | o | |
177 # o | | o | |
178 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
178 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
179
179
180 # nodeline is the line containing the node character (typically o)
180 # nodeline is the line containing the node character (typically o)
181 nodeline = ["|", " "] * idx
181 nodeline = ["|", " "] * idx
182 nodeline.extend([char, " "])
182 nodeline.extend([char, " "])
183
183
184 nodeline.extend(
184 nodeline.extend(
185 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
185 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
186 state[0], fix_nodeline_tail))
186 state[0], fix_nodeline_tail))
187
187
188 # shift_interline is the line containing the non-vertical
188 # shift_interline is the line containing the non-vertical
189 # edges between this entry and the next
189 # edges between this entry and the next
190 shift_interline = ["|", " "] * idx
190 shift_interline = ["|", " "] * idx
191 if coldiff == -1:
191 if coldiff == -1:
192 n_spaces = 1
192 n_spaces = 1
193 edge_ch = "/"
193 edge_ch = "/"
194 elif coldiff == 0:
194 elif coldiff == 0:
195 n_spaces = 2
195 n_spaces = 2
196 edge_ch = "|"
196 edge_ch = "|"
197 else:
197 else:
198 n_spaces = 3
198 n_spaces = 3
199 edge_ch = "\\"
199 edge_ch = "\\"
200 shift_interline.extend(n_spaces * [" "])
200 shift_interline.extend(n_spaces * [" "])
201 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
201 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
202
202
203 # draw edges from the current node to its parents
203 # draw edges from the current node to its parents
204 draw_edges(edges, nodeline, shift_interline)
204 draw_edges(edges, nodeline, shift_interline)
205
205
206 # lines is the list of all graph lines to print
206 # lines is the list of all graph lines to print
207 lines = [nodeline]
207 lines = [nodeline]
208 if add_padding_line:
208 if add_padding_line:
209 lines.append(get_padding_line(idx, ncols, edges))
209 lines.append(get_padding_line(idx, ncols, edges))
210 lines.append(shift_interline)
210 lines.append(shift_interline)
211
211
212 # make sure that there are as many graph lines as there are
212 # make sure that there are as many graph lines as there are
213 # log strings
213 # log strings
214 while len(text) < len(lines):
214 while len(text) < len(lines):
215 text.append("")
215 text.append("")
216 if len(lines) < len(text):
216 if len(lines) < len(text):
217 extra_interline = ["|", " "] * (ncols + coldiff)
217 extra_interline = ["|", " "] * (ncols + coldiff)
218 while len(lines) < len(text):
218 while len(lines) < len(text):
219 lines.append(extra_interline)
219 lines.append(extra_interline)
220
220
221 # print lines
221 # print lines
222 indentation_level = max(ncols, ncols + coldiff)
222 indentation_level = max(ncols, ncols + coldiff)
223 for (line, logstr) in zip(lines, text):
223 for (line, logstr) in zip(lines, text):
224 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
224 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
225 ui.write(ln.rstrip() + '\n')
225 ui.write(ln.rstrip() + '\n')
226
226
227 # ... and start over
227 # ... and start over
228 state[0] = coldiff
228 state[0] = coldiff
229 state[1] = idx
229 state[1] = idx
230
230
231 def get_revs(repo, rev_opt):
231 def get_revs(repo, rev_opt):
232 if rev_opt:
232 if rev_opt:
233 revs = scmutil.revrange(repo, rev_opt)
233 revs = scmutil.revrange(repo, rev_opt)
234 if len(revs) == 0:
234 if len(revs) == 0:
235 return (nullrev, nullrev)
235 return (nullrev, nullrev)
236 return (max(revs), min(revs))
236 return (max(revs), min(revs))
237 else:
237 else:
238 return (len(repo) - 1, 0)
238 return (len(repo) - 1, 0)
239
239
240 def check_unsupported_flags(pats, opts):
240 def check_unsupported_flags(pats, opts):
241 for op in ["newest_first"]:
241 for op in ["newest_first"]:
242 if op in opts and opts[op]:
242 if op in opts and opts[op]:
243 raise util.Abort(_("-G/--graph option is incompatible with --%s")
243 raise util.Abort(_("-G/--graph option is incompatible with --%s")
244 % op.replace("_", "-"))
244 % op.replace("_", "-"))
245
245
246 def makefilematcher(repo, pats, followfirst):
246 def makefilematcher(repo, pats, followfirst):
247 # When displaying a revision with --patch --follow FILE, we have
247 # When displaying a revision with --patch --follow FILE, we have
248 # to know which file of the revision must be diffed. With
248 # to know which file of the revision must be diffed. With
249 # --follow, we want the names of the ancestors of FILE in the
249 # --follow, we want the names of the ancestors of FILE in the
250 # revision, stored in "fcache". "fcache" is populated by
250 # revision, stored in "fcache". "fcache" is populated by
251 # reproducing the graph traversal already done by --follow revset
251 # reproducing the graph traversal already done by --follow revset
252 # and relating linkrevs to file names (which is not "correct" but
252 # and relating linkrevs to file names (which is not "correct" but
253 # good enough).
253 # good enough).
254 fcache = {}
254 fcache = {}
255 fcacheready = [False]
255 fcacheready = [False]
256 pctx = repo['.']
256 pctx = repo['.']
257 wctx = repo[None]
257 wctx = repo[None]
258
258
259 def populate():
259 def populate():
260 for fn in pats:
260 for fn in pats:
261 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
261 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
262 for c in i:
262 for c in i:
263 fcache.setdefault(c.linkrev(), set()).add(c.path())
263 fcache.setdefault(c.linkrev(), set()).add(c.path())
264
264
265 def filematcher(rev):
265 def filematcher(rev):
266 if not fcacheready[0]:
266 if not fcacheready[0]:
267 # Lazy initialization
267 # Lazy initialization
268 fcacheready[0] = True
268 fcacheready[0] = True
269 populate()
269 populate()
270 return scmutil.match(wctx, fcache.get(rev, []), default='path')
270 return scmutil.match(wctx, fcache.get(rev, []), default='path')
271
271
272 return filematcher
272 return filematcher
273
273
274 def _makelogrevset(repo, pats, opts, revs):
274 def _makelogrevset(repo, pats, opts, revs):
275 """Return (expr, filematcher) where expr is a revset string built
275 """Return (expr, filematcher) where expr is a revset string built
276 from log options and file patterns or None. If --stat or --patch
276 from log options and file patterns or None. If --stat or --patch
277 are not passed filematcher is None. Otherwise it is a callable
277 are not passed filematcher is None. Otherwise it is a callable
278 taking a revision number and returning a match objects filtering
278 taking a revision number and returning a match objects filtering
279 the files to be detailed when displaying the revision.
279 the files to be detailed when displaying the revision.
280 """
280 """
281 opt2revset = {
281 opt2revset = {
282 'follow': ('follow()', None),
282 'follow': ('follow()', None),
283 'follow_first': ('_followfirst()', None),
283 'follow_first': ('_followfirst()', None),
284 'no_merges': ('not merge()', None),
284 'no_merges': ('not merge()', None),
285 'only_merges': ('merge()', None),
285 'only_merges': ('merge()', None),
286 '_matchfiles': ('_matchfiles(%(val)s)', None),
286 '_matchfiles': ('_matchfiles(%(val)s)', None),
287 'date': ('date(%(val)r)', None),
287 'date': ('date(%(val)r)', None),
288 'branch': ('branch(%(val)r)', ' or '),
288 'branch': ('branch(%(val)r)', ' or '),
289 '_patslog': ('filelog(%(val)r)', ' or '),
289 '_patslog': ('filelog(%(val)r)', ' or '),
290 '_patsfollow': ('follow(%(val)r)', ' or '),
290 '_patsfollow': ('follow(%(val)r)', ' or '),
291 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
291 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
292 'keyword': ('keyword(%(val)r)', ' or '),
292 'keyword': ('keyword(%(val)r)', ' or '),
293 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
293 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
294 'user': ('user(%(val)r)', ' or '),
294 'user': ('user(%(val)r)', ' or '),
295 }
295 }
296
296
297 opts = dict(opts)
297 opts = dict(opts)
298 # follow or not follow?
298 # follow or not follow?
299 follow = opts.get('follow') or opts.get('follow_first')
299 follow = opts.get('follow') or opts.get('follow_first')
300 followfirst = opts.get('follow_first')
300 followfirst = opts.get('follow_first')
301 if 'follow' in opts:
301 if 'follow' in opts:
302 del opts['follow']
302 del opts['follow']
303 if 'follow_first' in opts:
303 if 'follow_first' in opts:
304 del opts['follow_first']
304 del opts['follow_first']
305
305
306 # branch and only_branch are really aliases and must be handled at
306 # branch and only_branch are really aliases and must be handled at
307 # the same time
307 # the same time
308 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
308 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
309 # pats/include/exclude are passed to match.match() directly in
309 # pats/include/exclude are passed to match.match() directly in
310 # _matchfile() revset but walkchangerevs() builds its matcher with
310 # _matchfile() revset but walkchangerevs() builds its matcher with
311 # scmutil.match(). The difference is input pats are globbed on
311 # scmutil.match(). The difference is input pats are globbed on
312 # platforms without shell expansion (windows).
312 # platforms without shell expansion (windows).
313 pctx = repo[None]
313 pctx = repo[None]
314 match, pats = scmutil.matchandpats(pctx, pats, opts)
314 match, pats = scmutil.matchandpats(pctx, pats, opts)
315 slowpath = match.anypats() or (match.files() and opts.get('removed'))
315 slowpath = match.anypats() or (match.files() and opts.get('removed'))
316 if not slowpath:
316 if not slowpath:
317 for f in match.files():
317 for f in match.files():
318 if follow and f not in pctx:
318 if follow and f not in pctx:
319 raise util.Abort(_('cannot follow file not in parent '
319 raise util.Abort(_('cannot follow file not in parent '
320 'revision: "%s"') % f)
320 'revision: "%s"') % f)
321 filelog = repo.file(f)
321 filelog = repo.file(f)
322 if not len(filelog):
322 if not len(filelog):
323 # A zero count may be a directory or deleted file, so
323 # A zero count may be a directory or deleted file, so
324 # try to find matching entries on the slow path.
324 # try to find matching entries on the slow path.
325 if follow:
325 if follow:
326 raise util.Abort(
326 raise util.Abort(
327 _('cannot follow nonexistent file: "%s"') % f)
327 _('cannot follow nonexistent file: "%s"') % f)
328 slowpath = True
328 slowpath = True
329 if slowpath:
329 if slowpath:
330 # See cmdutil.walkchangerevs() slow path.
330 # See cmdutil.walkchangerevs() slow path.
331 #
331 #
332 if follow:
332 if follow:
333 raise util.Abort(_('can only follow copies/renames for explicit '
333 raise util.Abort(_('can only follow copies/renames for explicit '
334 'filenames'))
334 'filenames'))
335 # pats/include/exclude cannot be represented as separate
335 # pats/include/exclude cannot be represented as separate
336 # revset expressions as their filtering logic applies at file
336 # revset expressions as their filtering logic applies at file
337 # level. For instance "-I a -X a" matches a revision touching
337 # level. For instance "-I a -X a" matches a revision touching
338 # "a" and "b" while "file(a) and not file(b)" does
338 # "a" and "b" while "file(a) and not file(b)" does
339 # not. Besides, filesets are evaluated against the working
339 # not. Besides, filesets are evaluated against the working
340 # directory.
340 # directory.
341 matchargs = ['r:']
341 matchargs = ['r:']
342 for p in pats:
342 for p in pats:
343 matchargs.append('p:' + p)
343 matchargs.append('p:' + p)
344 for p in opts.get('include', []):
344 for p in opts.get('include', []):
345 matchargs.append('i:' + p)
345 matchargs.append('i:' + p)
346 for p in opts.get('exclude', []):
346 for p in opts.get('exclude', []):
347 matchargs.append('x:' + p)
347 matchargs.append('x:' + p)
348 matchargs = ','.join(('%r' % p) for p in matchargs)
348 matchargs = ','.join(('%r' % p) for p in matchargs)
349 opts['_matchfiles'] = matchargs
349 opts['_matchfiles'] = matchargs
350 else:
350 else:
351 if follow:
351 if follow:
352 if followfirst:
352 if followfirst:
353 if pats:
353 if pats:
354 opts['_patsfollowfirst'] = list(pats)
354 opts['_patsfollowfirst'] = list(pats)
355 else:
355 else:
356 opts['follow_first'] = True
356 opts['follow_first'] = True
357 else:
357 else:
358 if pats:
358 if pats:
359 opts['_patsfollow'] = list(pats)
359 opts['_patsfollow'] = list(pats)
360 else:
360 else:
361 opts['follow'] = True
361 opts['follow'] = True
362 else:
362 else:
363 opts['_patslog'] = list(pats)
363 opts['_patslog'] = list(pats)
364
364
365 filematcher = None
365 filematcher = None
366 if opts.get('patch') or opts.get('stat'):
366 if opts.get('patch') or opts.get('stat'):
367 if follow:
367 if follow:
368 filematcher = makefilematcher(repo, pats, followfirst)
368 filematcher = makefilematcher(repo, pats, followfirst)
369 else:
369 else:
370 filematcher = lambda rev: match
370 filematcher = lambda rev: match
371
371
372 revset = []
372 revset = []
373 for op, val in opts.iteritems():
373 for op, val in opts.iteritems():
374 if not val:
374 if not val:
375 continue
375 continue
376 if op not in opt2revset:
376 if op not in opt2revset:
377 continue
377 continue
378 revop, andor = opt2revset[op]
378 revop, andor = opt2revset[op]
379 if '%(val)' not in revop:
379 if '%(val)' not in revop:
380 revset.append(revop)
380 revset.append(revop)
381 else:
381 else:
382 if not isinstance(val, list):
382 if not isinstance(val, list):
383 expr = revop % {'val': val}
383 expr = revop % {'val': val}
384 else:
384 else:
385 expr = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
385 expr = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
386 revset.append(expr)
386 revset.append(expr)
387
387
388 if revset:
388 if revset:
389 revset = '(' + ' and '.join(revset) + ')'
389 revset = '(' + ' and '.join(revset) + ')'
390 else:
390 else:
391 revset = None
391 revset = None
392 return revset, filematcher
392 return revset, filematcher
393
393
394 def getlogrevs(repo, pats, opts):
394 def getlogrevs(repo, pats, opts):
395 """Return (revs, expr, filematcher) where revs is a list of
395 """Return (revs, expr, filematcher) where revs is a list of
396 revision numbers, expr is a revset string built from log options
396 revision numbers, expr is a revset string built from log options
397 and file patterns or None, and used to filter 'revs'. If --stat or
397 and file patterns or None, and used to filter 'revs'. If --stat or
398 --patch are not passed filematcher is None. Otherwise it is a
398 --patch are not passed filematcher is None. Otherwise it is a
399 callable taking a revision number and returning a match objects
399 callable taking a revision number and returning a match objects
400 filtering the files to be detailed when displaying the revision.
400 filtering the files to be detailed when displaying the revision.
401 """
401 """
402 if not len(repo):
402 if not len(repo):
403 return [], None, None
403 return [], None, None
404 if opts.get('rev'):
404 if opts.get('rev'):
405 revs = scmutil.revrange(repo, opts['rev'])
405 revs = scmutil.revrange(repo, opts['rev'])
406 else:
406 else:
407 revs = range(len(repo))
407 revs = range(len(repo))
408 if not revs:
408 if not revs:
409 return [], None, None
409 return [], None, None
410 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
410 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
411 if expr:
411 if expr:
412 revs = revsetmod.match(repo.ui, expr)(repo, revs)
412 # Evaluate revisions in changelog order for performance
413 # reasons but preserve the original sequence order in the
414 # filtered result.
415 matched = set(revsetmod.match(repo.ui, expr)(repo, sorted(revs)))
416 revs = [r for r in revs if r in matched]
413 return revs, expr, filematcher
417 return revs, expr, filematcher
414
418
415 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
419 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
416 filematcher=None):
420 filematcher=None):
417 seen, state = [], asciistate()
421 seen, state = [], asciistate()
418 for rev, type, ctx, parents in dag:
422 for rev, type, ctx, parents in dag:
419 char = ctx.node() in showparents and '@' or 'o'
423 char = ctx.node() in showparents and '@' or 'o'
420 copies = None
424 copies = None
421 if getrenamed and ctx.rev():
425 if getrenamed and ctx.rev():
422 copies = []
426 copies = []
423 for fn in ctx.files():
427 for fn in ctx.files():
424 rename = getrenamed(fn, ctx.rev())
428 rename = getrenamed(fn, ctx.rev())
425 if rename:
429 if rename:
426 copies.append((fn, rename[0]))
430 copies.append((fn, rename[0]))
427 revmatchfn = None
431 revmatchfn = None
428 if filematcher is not None:
432 if filematcher is not None:
429 revmatchfn = filematcher(ctx.rev())
433 revmatchfn = filematcher(ctx.rev())
430 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
434 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
431 lines = displayer.hunk.pop(rev).split('\n')[:-1]
435 lines = displayer.hunk.pop(rev).split('\n')[:-1]
432 displayer.flush(rev)
436 displayer.flush(rev)
433 edges = edgefn(type, char, lines, seen, rev, parents)
437 edges = edgefn(type, char, lines, seen, rev, parents)
434 for type, char, lines, coldata in edges:
438 for type, char, lines, coldata in edges:
435 ascii(ui, state, type, char, lines, coldata)
439 ascii(ui, state, type, char, lines, coldata)
436 displayer.close()
440 displayer.close()
437
441
438 @command('glog',
442 @command('glog',
439 [('l', 'limit', '',
443 [('l', 'limit', '',
440 _('limit number of changes displayed'), _('NUM')),
444 _('limit number of changes displayed'), _('NUM')),
441 ('p', 'patch', False, _('show patch')),
445 ('p', 'patch', False, _('show patch')),
442 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
446 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
443 ] + templateopts,
447 ] + templateopts,
444 _('hg glog [OPTION]... [FILE]'))
448 _('hg glog [OPTION]... [FILE]'))
445 def graphlog(ui, repo, *pats, **opts):
449 def graphlog(ui, repo, *pats, **opts):
446 """show revision history alongside an ASCII revision graph
450 """show revision history alongside an ASCII revision graph
447
451
448 Print a revision history alongside a revision graph drawn with
452 Print a revision history alongside a revision graph drawn with
449 ASCII characters.
453 ASCII characters.
450
454
451 Nodes printed as an @ character are parents of the working
455 Nodes printed as an @ character are parents of the working
452 directory.
456 directory.
453 """
457 """
454
458
455 check_unsupported_flags(pats, opts)
459 check_unsupported_flags(pats, opts)
456
460
457 revs, expr, filematcher = getlogrevs(repo, pats, opts)
461 revs, expr, filematcher = getlogrevs(repo, pats, opts)
458 revs = sorted(revs, reverse=1)
462 revs = sorted(revs, reverse=1)
459 limit = cmdutil.loglimit(opts)
463 limit = cmdutil.loglimit(opts)
460 if limit is not None:
464 if limit is not None:
461 revs = revs[:limit]
465 revs = revs[:limit]
462 revdag = graphmod.dagwalker(repo, revs)
466 revdag = graphmod.dagwalker(repo, revs)
463
467
464 getrenamed = None
468 getrenamed = None
465 if opts.get('copies'):
469 if opts.get('copies'):
466 endrev = None
470 endrev = None
467 if opts.get('rev'):
471 if opts.get('rev'):
468 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
472 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
469 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
473 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
470 displayer = show_changeset(ui, repo, opts, buffered=True)
474 displayer = show_changeset(ui, repo, opts, buffered=True)
471 showparents = [ctx.node() for ctx in repo[None].parents()]
475 showparents = [ctx.node() for ctx in repo[None].parents()]
472 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
476 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
473 filematcher)
477 filematcher)
474
478
475 def graphrevs(repo, nodes, opts):
479 def graphrevs(repo, nodes, opts):
476 limit = cmdutil.loglimit(opts)
480 limit = cmdutil.loglimit(opts)
477 nodes.reverse()
481 nodes.reverse()
478 if limit is not None:
482 if limit is not None:
479 nodes = nodes[:limit]
483 nodes = nodes[:limit]
480 return graphmod.nodes(repo, nodes)
484 return graphmod.nodes(repo, nodes)
481
485
482 def goutgoing(ui, repo, dest=None, **opts):
486 def goutgoing(ui, repo, dest=None, **opts):
483 """show the outgoing changesets alongside an ASCII revision graph
487 """show the outgoing changesets alongside an ASCII revision graph
484
488
485 Print the outgoing changesets alongside a revision graph drawn with
489 Print the outgoing changesets alongside a revision graph drawn with
486 ASCII characters.
490 ASCII characters.
487
491
488 Nodes printed as an @ character are parents of the working
492 Nodes printed as an @ character are parents of the working
489 directory.
493 directory.
490 """
494 """
491
495
492 check_unsupported_flags([], opts)
496 check_unsupported_flags([], opts)
493 o = hg._outgoing(ui, repo, dest, opts)
497 o = hg._outgoing(ui, repo, dest, opts)
494 if o is None:
498 if o is None:
495 return
499 return
496
500
497 revdag = graphrevs(repo, o, opts)
501 revdag = graphrevs(repo, o, opts)
498 displayer = show_changeset(ui, repo, opts, buffered=True)
502 displayer = show_changeset(ui, repo, opts, buffered=True)
499 showparents = [ctx.node() for ctx in repo[None].parents()]
503 showparents = [ctx.node() for ctx in repo[None].parents()]
500 generate(ui, revdag, displayer, showparents, asciiedges)
504 generate(ui, revdag, displayer, showparents, asciiedges)
501
505
502 def gincoming(ui, repo, source="default", **opts):
506 def gincoming(ui, repo, source="default", **opts):
503 """show the incoming changesets alongside an ASCII revision graph
507 """show the incoming changesets alongside an ASCII revision graph
504
508
505 Print the incoming changesets alongside a revision graph drawn with
509 Print the incoming changesets alongside a revision graph drawn with
506 ASCII characters.
510 ASCII characters.
507
511
508 Nodes printed as an @ character are parents of the working
512 Nodes printed as an @ character are parents of the working
509 directory.
513 directory.
510 """
514 """
511 def subreporecurse():
515 def subreporecurse():
512 return 1
516 return 1
513
517
514 check_unsupported_flags([], opts)
518 check_unsupported_flags([], opts)
515 def display(other, chlist, displayer):
519 def display(other, chlist, displayer):
516 revdag = graphrevs(other, chlist, opts)
520 revdag = graphrevs(other, chlist, opts)
517 showparents = [ctx.node() for ctx in repo[None].parents()]
521 showparents = [ctx.node() for ctx in repo[None].parents()]
518 generate(ui, revdag, displayer, showparents, asciiedges)
522 generate(ui, revdag, displayer, showparents, asciiedges)
519
523
520 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
524 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
521
525
522 def uisetup(ui):
526 def uisetup(ui):
523 '''Initialize the extension.'''
527 '''Initialize the extension.'''
524 _wrapcmd('log', commands.table, graphlog)
528 _wrapcmd('log', commands.table, graphlog)
525 _wrapcmd('incoming', commands.table, gincoming)
529 _wrapcmd('incoming', commands.table, gincoming)
526 _wrapcmd('outgoing', commands.table, goutgoing)
530 _wrapcmd('outgoing', commands.table, goutgoing)
527
531
528 def _wrapcmd(cmd, table, wrapfn):
532 def _wrapcmd(cmd, table, wrapfn):
529 '''wrap the command'''
533 '''wrap the command'''
530 def graph(orig, *args, **kwargs):
534 def graph(orig, *args, **kwargs):
531 if kwargs['graph']:
535 if kwargs['graph']:
532 return wrapfn(*args, **kwargs)
536 return wrapfn(*args, **kwargs)
533 return orig(*args, **kwargs)
537 return orig(*args, **kwargs)
534 entry = extensions.wrapcommand(table, cmd, graph)
538 entry = extensions.wrapcommand(table, cmd, graph)
535 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
539 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now