##// END OF EJS Templates
graphlog: add all log options to glog command...
Patrick Mezard -
r16432:365bb0fa default
parent child Browse files
Show More
@@ -1,556 +1,569 b''
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell
8 '''command to view revision graphs from a shell
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 from mercurial.cmdutil import show_changeset
15 from mercurial.cmdutil import show_changeset
16 from mercurial.commands import templateopts
17 from mercurial.i18n import _
16 from mercurial.i18n import _
18 from mercurial.node import nullrev
17 from mercurial.node import nullrev
19 from mercurial import cmdutil, commands, extensions, scmutil
18 from mercurial import cmdutil, commands, extensions, scmutil
20 from mercurial import hg, util, graphmod, templatekw, revset
19 from mercurial import hg, util, graphmod, templatekw, revset
21
20
22 cmdtable = {}
21 cmdtable = {}
23 command = cmdutil.command(cmdtable)
22 command = cmdutil.command(cmdtable)
24
23
25 ASCIIDATA = 'ASC'
24 ASCIIDATA = 'ASC'
26
25
27 def asciiedges(type, char, lines, seen, rev, parents):
26 def asciiedges(type, char, lines, seen, rev, parents):
28 """adds edge info to changelog DAG walk suitable for ascii()"""
27 """adds edge info to changelog DAG walk suitable for ascii()"""
29 if rev not in seen:
28 if rev not in seen:
30 seen.append(rev)
29 seen.append(rev)
31 nodeidx = seen.index(rev)
30 nodeidx = seen.index(rev)
32
31
33 knownparents = []
32 knownparents = []
34 newparents = []
33 newparents = []
35 for parent in parents:
34 for parent in parents:
36 if parent in seen:
35 if parent in seen:
37 knownparents.append(parent)
36 knownparents.append(parent)
38 else:
37 else:
39 newparents.append(parent)
38 newparents.append(parent)
40
39
41 ncols = len(seen)
40 ncols = len(seen)
42 nextseen = seen[:]
41 nextseen = seen[:]
43 nextseen[nodeidx:nodeidx + 1] = newparents
42 nextseen[nodeidx:nodeidx + 1] = newparents
44 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
43 edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
45
44
46 while len(newparents) > 2:
45 while len(newparents) > 2:
47 # ascii() only knows how to add or remove a single column between two
46 # ascii() only knows how to add or remove a single column between two
48 # calls. Nodes with more than two parents break this constraint so we
47 # calls. Nodes with more than two parents break this constraint so we
49 # introduce intermediate expansion lines to grow the active node list
48 # introduce intermediate expansion lines to grow the active node list
50 # slowly.
49 # slowly.
51 edges.append((nodeidx, nodeidx))
50 edges.append((nodeidx, nodeidx))
52 edges.append((nodeidx, nodeidx + 1))
51 edges.append((nodeidx, nodeidx + 1))
53 nmorecols = 1
52 nmorecols = 1
54 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
53 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
55 char = '\\'
54 char = '\\'
56 lines = []
55 lines = []
57 nodeidx += 1
56 nodeidx += 1
58 ncols += 1
57 ncols += 1
59 edges = []
58 edges = []
60 del newparents[0]
59 del newparents[0]
61
60
62 if len(newparents) > 0:
61 if len(newparents) > 0:
63 edges.append((nodeidx, nodeidx))
62 edges.append((nodeidx, nodeidx))
64 if len(newparents) > 1:
63 if len(newparents) > 1:
65 edges.append((nodeidx, nodeidx + 1))
64 edges.append((nodeidx, nodeidx + 1))
66 nmorecols = len(nextseen) - ncols
65 nmorecols = len(nextseen) - ncols
67 seen[:] = nextseen
66 seen[:] = nextseen
68 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
67 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
69
68
70 def fix_long_right_edges(edges):
69 def fix_long_right_edges(edges):
71 for (i, (start, end)) in enumerate(edges):
70 for (i, (start, end)) in enumerate(edges):
72 if end > start:
71 if end > start:
73 edges[i] = (start, end + 1)
72 edges[i] = (start, end + 1)
74
73
75 def get_nodeline_edges_tail(
74 def get_nodeline_edges_tail(
76 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
75 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
77 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
76 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
78 # Still going in the same non-vertical direction.
77 # Still going in the same non-vertical direction.
79 if n_columns_diff == -1:
78 if n_columns_diff == -1:
80 start = max(node_index + 1, p_node_index)
79 start = max(node_index + 1, p_node_index)
81 tail = ["|", " "] * (start - node_index - 1)
80 tail = ["|", " "] * (start - node_index - 1)
82 tail.extend(["/", " "] * (n_columns - start))
81 tail.extend(["/", " "] * (n_columns - start))
83 return tail
82 return tail
84 else:
83 else:
85 return ["\\", " "] * (n_columns - node_index - 1)
84 return ["\\", " "] * (n_columns - node_index - 1)
86 else:
85 else:
87 return ["|", " "] * (n_columns - node_index - 1)
86 return ["|", " "] * (n_columns - node_index - 1)
88
87
89 def draw_edges(edges, nodeline, interline):
88 def draw_edges(edges, nodeline, interline):
90 for (start, end) in edges:
89 for (start, end) in edges:
91 if start == end + 1:
90 if start == end + 1:
92 interline[2 * end + 1] = "/"
91 interline[2 * end + 1] = "/"
93 elif start == end - 1:
92 elif start == end - 1:
94 interline[2 * start + 1] = "\\"
93 interline[2 * start + 1] = "\\"
95 elif start == end:
94 elif start == end:
96 interline[2 * start] = "|"
95 interline[2 * start] = "|"
97 else:
96 else:
98 if 2 * end >= len(nodeline):
97 if 2 * end >= len(nodeline):
99 continue
98 continue
100 nodeline[2 * end] = "+"
99 nodeline[2 * end] = "+"
101 if start > end:
100 if start > end:
102 (start, end) = (end, start)
101 (start, end) = (end, start)
103 for i in range(2 * start + 1, 2 * end):
102 for i in range(2 * start + 1, 2 * end):
104 if nodeline[i] != "+":
103 if nodeline[i] != "+":
105 nodeline[i] = "-"
104 nodeline[i] = "-"
106
105
107 def get_padding_line(ni, n_columns, edges):
106 def get_padding_line(ni, n_columns, edges):
108 line = []
107 line = []
109 line.extend(["|", " "] * ni)
108 line.extend(["|", " "] * ni)
110 if (ni, ni - 1) in edges or (ni, ni) in edges:
109 if (ni, ni - 1) in edges or (ni, ni) in edges:
111 # (ni, ni - 1) (ni, ni)
110 # (ni, ni - 1) (ni, ni)
112 # | | | | | | | |
111 # | | | | | | | |
113 # +---o | | o---+
112 # +---o | | o---+
114 # | | c | | c | |
113 # | | c | | c | |
115 # | |/ / | |/ /
114 # | |/ / | |/ /
116 # | | | | | |
115 # | | | | | |
117 c = "|"
116 c = "|"
118 else:
117 else:
119 c = " "
118 c = " "
120 line.extend([c, " "])
119 line.extend([c, " "])
121 line.extend(["|", " "] * (n_columns - ni - 1))
120 line.extend(["|", " "] * (n_columns - ni - 1))
122 return line
121 return line
123
122
124 def asciistate():
123 def asciistate():
125 """returns the initial value for the "state" argument to ascii()"""
124 """returns the initial value for the "state" argument to ascii()"""
126 return [0, 0]
125 return [0, 0]
127
126
128 def ascii(ui, state, type, char, text, coldata):
127 def ascii(ui, state, type, char, text, coldata):
129 """prints an ASCII graph of the DAG
128 """prints an ASCII graph of the DAG
130
129
131 takes the following arguments (one call per node in the graph):
130 takes the following arguments (one call per node in the graph):
132
131
133 - ui to write to
132 - ui to write to
134 - Somewhere to keep the needed state in (init to asciistate())
133 - Somewhere to keep the needed state in (init to asciistate())
135 - Column of the current node in the set of ongoing edges.
134 - Column of the current node in the set of ongoing edges.
136 - Type indicator of node data == ASCIIDATA.
135 - Type indicator of node data == ASCIIDATA.
137 - Payload: (char, lines):
136 - Payload: (char, lines):
138 - Character to use as node's symbol.
137 - Character to use as node's symbol.
139 - List of lines to display as the node's text.
138 - List of lines to display as the node's text.
140 - Edges; a list of (col, next_col) indicating the edges between
139 - Edges; a list of (col, next_col) indicating the edges between
141 the current node and its parents.
140 the current node and its parents.
142 - Number of columns (ongoing edges) in the current revision.
141 - Number of columns (ongoing edges) in the current revision.
143 - The difference between the number of columns (ongoing edges)
142 - The difference between the number of columns (ongoing edges)
144 in the next revision and the number of columns (ongoing edges)
143 in the next revision and the number of columns (ongoing edges)
145 in the current revision. That is: -1 means one column removed;
144 in the current revision. That is: -1 means one column removed;
146 0 means no columns added or removed; 1 means one column added.
145 0 means no columns added or removed; 1 means one column added.
147 """
146 """
148
147
149 idx, edges, ncols, coldiff = coldata
148 idx, edges, ncols, coldiff = coldata
150 assert -2 < coldiff < 2
149 assert -2 < coldiff < 2
151 if coldiff == -1:
150 if coldiff == -1:
152 # Transform
151 # Transform
153 #
152 #
154 # | | | | | |
153 # | | | | | |
155 # o | | into o---+
154 # o | | into o---+
156 # |X / |/ /
155 # |X / |/ /
157 # | | | |
156 # | | | |
158 fix_long_right_edges(edges)
157 fix_long_right_edges(edges)
159
158
160 # add_padding_line says whether to rewrite
159 # add_padding_line says whether to rewrite
161 #
160 #
162 # | | | | | | | |
161 # | | | | | | | |
163 # | o---+ into | o---+
162 # | o---+ into | o---+
164 # | / / | | | # <--- padding line
163 # | / / | | | # <--- padding line
165 # o | | | / /
164 # o | | | / /
166 # o | |
165 # o | |
167 add_padding_line = (len(text) > 2 and coldiff == -1 and
166 add_padding_line = (len(text) > 2 and coldiff == -1 and
168 [x for (x, y) in edges if x + 1 < y])
167 [x for (x, y) in edges if x + 1 < y])
169
168
170 # fix_nodeline_tail says whether to rewrite
169 # fix_nodeline_tail says whether to rewrite
171 #
170 #
172 # | | o | | | | o | |
171 # | | o | | | | o | |
173 # | | |/ / | | |/ /
172 # | | |/ / | | |/ /
174 # | o | | into | o / / # <--- fixed nodeline tail
173 # | o | | into | o / / # <--- fixed nodeline tail
175 # | |/ / | |/ /
174 # | |/ / | |/ /
176 # o | | o | |
175 # o | | o | |
177 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
176 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
178
177
179 # nodeline is the line containing the node character (typically o)
178 # nodeline is the line containing the node character (typically o)
180 nodeline = ["|", " "] * idx
179 nodeline = ["|", " "] * idx
181 nodeline.extend([char, " "])
180 nodeline.extend([char, " "])
182
181
183 nodeline.extend(
182 nodeline.extend(
184 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
183 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
185 state[0], fix_nodeline_tail))
184 state[0], fix_nodeline_tail))
186
185
187 # shift_interline is the line containing the non-vertical
186 # shift_interline is the line containing the non-vertical
188 # edges between this entry and the next
187 # edges between this entry and the next
189 shift_interline = ["|", " "] * idx
188 shift_interline = ["|", " "] * idx
190 if coldiff == -1:
189 if coldiff == -1:
191 n_spaces = 1
190 n_spaces = 1
192 edge_ch = "/"
191 edge_ch = "/"
193 elif coldiff == 0:
192 elif coldiff == 0:
194 n_spaces = 2
193 n_spaces = 2
195 edge_ch = "|"
194 edge_ch = "|"
196 else:
195 else:
197 n_spaces = 3
196 n_spaces = 3
198 edge_ch = "\\"
197 edge_ch = "\\"
199 shift_interline.extend(n_spaces * [" "])
198 shift_interline.extend(n_spaces * [" "])
200 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
199 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
201
200
202 # draw edges from the current node to its parents
201 # draw edges from the current node to its parents
203 draw_edges(edges, nodeline, shift_interline)
202 draw_edges(edges, nodeline, shift_interline)
204
203
205 # lines is the list of all graph lines to print
204 # lines is the list of all graph lines to print
206 lines = [nodeline]
205 lines = [nodeline]
207 if add_padding_line:
206 if add_padding_line:
208 lines.append(get_padding_line(idx, ncols, edges))
207 lines.append(get_padding_line(idx, ncols, edges))
209 lines.append(shift_interline)
208 lines.append(shift_interline)
210
209
211 # make sure that there are as many graph lines as there are
210 # make sure that there are as many graph lines as there are
212 # log strings
211 # log strings
213 while len(text) < len(lines):
212 while len(text) < len(lines):
214 text.append("")
213 text.append("")
215 if len(lines) < len(text):
214 if len(lines) < len(text):
216 extra_interline = ["|", " "] * (ncols + coldiff)
215 extra_interline = ["|", " "] * (ncols + coldiff)
217 while len(lines) < len(text):
216 while len(lines) < len(text):
218 lines.append(extra_interline)
217 lines.append(extra_interline)
219
218
220 # print lines
219 # print lines
221 indentation_level = max(ncols, ncols + coldiff)
220 indentation_level = max(ncols, ncols + coldiff)
222 for (line, logstr) in zip(lines, text):
221 for (line, logstr) in zip(lines, text):
223 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
222 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
224 ui.write(ln.rstrip() + '\n')
223 ui.write(ln.rstrip() + '\n')
225
224
226 # ... and start over
225 # ... and start over
227 state[0] = coldiff
226 state[0] = coldiff
228 state[1] = idx
227 state[1] = idx
229
228
230 def get_revs(repo, rev_opt):
229 def get_revs(repo, rev_opt):
231 if rev_opt:
230 if rev_opt:
232 revs = scmutil.revrange(repo, rev_opt)
231 revs = scmutil.revrange(repo, rev_opt)
233 if len(revs) == 0:
232 if len(revs) == 0:
234 return (nullrev, nullrev)
233 return (nullrev, nullrev)
235 return (max(revs), min(revs))
234 return (max(revs), min(revs))
236 else:
235 else:
237 return (len(repo) - 1, 0)
236 return (len(repo) - 1, 0)
238
237
239 def check_unsupported_flags(pats, opts):
238 def check_unsupported_flags(pats, opts):
240 for op in ["newest_first"]:
239 for op in ["newest_first"]:
241 if op in opts and opts[op]:
240 if op in opts and opts[op]:
242 raise util.Abort(_("-G/--graph option is incompatible with --%s")
241 raise util.Abort(_("-G/--graph option is incompatible with --%s")
243 % op.replace("_", "-"))
242 % op.replace("_", "-"))
244
243
245 def _makefilematcher(repo, pats, followfirst):
244 def _makefilematcher(repo, pats, followfirst):
246 # When displaying a revision with --patch --follow FILE, we have
245 # When displaying a revision with --patch --follow FILE, we have
247 # to know which file of the revision must be diffed. With
246 # to know which file of the revision must be diffed. With
248 # --follow, we want the names of the ancestors of FILE in the
247 # --follow, we want the names of the ancestors of FILE in the
249 # revision, stored in "fcache". "fcache" is populated by
248 # revision, stored in "fcache". "fcache" is populated by
250 # reproducing the graph traversal already done by --follow revset
249 # reproducing the graph traversal already done by --follow revset
251 # and relating linkrevs to file names (which is not "correct" but
250 # and relating linkrevs to file names (which is not "correct" but
252 # good enough).
251 # good enough).
253 fcache = {}
252 fcache = {}
254 fcacheready = [False]
253 fcacheready = [False]
255 pctx = repo['.']
254 pctx = repo['.']
256 wctx = repo[None]
255 wctx = repo[None]
257
256
258 def populate():
257 def populate():
259 for fn in pats:
258 for fn in pats:
260 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
259 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
261 for c in i:
260 for c in i:
262 fcache.setdefault(c.linkrev(), set()).add(c.path())
261 fcache.setdefault(c.linkrev(), set()).add(c.path())
263
262
264 def filematcher(rev):
263 def filematcher(rev):
265 if not fcacheready[0]:
264 if not fcacheready[0]:
266 # Lazy initialization
265 # Lazy initialization
267 fcacheready[0] = True
266 fcacheready[0] = True
268 populate()
267 populate()
269 return scmutil.match(wctx, fcache.get(rev, []), default='path')
268 return scmutil.match(wctx, fcache.get(rev, []), default='path')
270
269
271 return filematcher
270 return filematcher
272
271
273 def _makelogrevset(repo, pats, opts, revs):
272 def _makelogrevset(repo, pats, opts, revs):
274 """Return (expr, filematcher) where expr is a revset string built
273 """Return (expr, filematcher) where expr is a revset string built
275 from log options and file patterns or None. If --stat or --patch
274 from log options and file patterns or None. If --stat or --patch
276 are not passed filematcher is None. Otherwise it is a callable
275 are not passed filematcher is None. Otherwise it is a callable
277 taking a revision number and returning a match objects filtering
276 taking a revision number and returning a match objects filtering
278 the files to be detailed when displaying the revision.
277 the files to be detailed when displaying the revision.
279 """
278 """
280 opt2revset = {
279 opt2revset = {
281 'no_merges': ('not merge()', None),
280 'no_merges': ('not merge()', None),
282 'only_merges': ('merge()', None),
281 'only_merges': ('merge()', None),
283 '_ancestors': ('ancestors(%(val)s)', None),
282 '_ancestors': ('ancestors(%(val)s)', None),
284 '_fancestors': ('_firstancestors(%(val)s)', None),
283 '_fancestors': ('_firstancestors(%(val)s)', None),
285 '_descendants': ('descendants(%(val)s)', None),
284 '_descendants': ('descendants(%(val)s)', None),
286 '_fdescendants': ('_firstdescendants(%(val)s)', None),
285 '_fdescendants': ('_firstdescendants(%(val)s)', None),
287 '_matchfiles': ('_matchfiles(%(val)s)', None),
286 '_matchfiles': ('_matchfiles(%(val)s)', None),
288 'date': ('date(%(val)r)', None),
287 'date': ('date(%(val)r)', None),
289 'branch': ('branch(%(val)r)', ' or '),
288 'branch': ('branch(%(val)r)', ' or '),
290 '_patslog': ('filelog(%(val)r)', ' or '),
289 '_patslog': ('filelog(%(val)r)', ' or '),
291 '_patsfollow': ('follow(%(val)r)', ' or '),
290 '_patsfollow': ('follow(%(val)r)', ' or '),
292 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
291 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
293 'keyword': ('keyword(%(val)r)', ' or '),
292 'keyword': ('keyword(%(val)r)', ' or '),
294 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
293 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
295 'user': ('user(%(val)r)', ' or '),
294 'user': ('user(%(val)r)', ' or '),
296 }
295 }
297
296
298 opts = dict(opts)
297 opts = dict(opts)
299 # follow or not follow?
298 # follow or not follow?
300 follow = opts.get('follow') or opts.get('follow_first')
299 follow = opts.get('follow') or opts.get('follow_first')
301 followfirst = opts.get('follow_first')
300 followfirst = opts.get('follow_first')
302 # --follow with FILE behaviour depends on revs...
301 # --follow with FILE behaviour depends on revs...
303 startrev = revs[0]
302 startrev = revs[0]
304 followdescendants = len(revs) > 1 and revs[0] < revs[1]
303 followdescendants = len(revs) > 1 and revs[0] < revs[1]
305
304
306 # branch and only_branch are really aliases and must be handled at
305 # branch and only_branch are really aliases and must be handled at
307 # the same time
306 # the same time
308 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
307 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
309 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
308 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
310 # pats/include/exclude are passed to match.match() directly in
309 # pats/include/exclude are passed to match.match() directly in
311 # _matchfile() revset but walkchangerevs() builds its matcher with
310 # _matchfile() revset but walkchangerevs() builds its matcher with
312 # scmutil.match(). The difference is input pats are globbed on
311 # scmutil.match(). The difference is input pats are globbed on
313 # platforms without shell expansion (windows).
312 # platforms without shell expansion (windows).
314 pctx = repo[None]
313 pctx = repo[None]
315 match, pats = scmutil.matchandpats(pctx, pats, opts)
314 match, pats = scmutil.matchandpats(pctx, pats, opts)
316 slowpath = match.anypats() or (match.files() and opts.get('removed'))
315 slowpath = match.anypats() or (match.files() and opts.get('removed'))
317 if not slowpath:
316 if not slowpath:
318 for f in match.files():
317 for f in match.files():
319 if follow and f not in pctx:
318 if follow and f not in pctx:
320 raise util.Abort(_('cannot follow file not in parent '
319 raise util.Abort(_('cannot follow file not in parent '
321 'revision: "%s"') % f)
320 'revision: "%s"') % f)
322 filelog = repo.file(f)
321 filelog = repo.file(f)
323 if not len(filelog):
322 if not len(filelog):
324 # A zero count may be a directory or deleted file, so
323 # A zero count may be a directory or deleted file, so
325 # try to find matching entries on the slow path.
324 # try to find matching entries on the slow path.
326 if follow:
325 if follow:
327 raise util.Abort(
326 raise util.Abort(
328 _('cannot follow nonexistent file: "%s"') % f)
327 _('cannot follow nonexistent file: "%s"') % f)
329 slowpath = True
328 slowpath = True
330 if slowpath:
329 if slowpath:
331 # See cmdutil.walkchangerevs() slow path.
330 # See cmdutil.walkchangerevs() slow path.
332 #
331 #
333 if follow:
332 if follow:
334 raise util.Abort(_('can only follow copies/renames for explicit '
333 raise util.Abort(_('can only follow copies/renames for explicit '
335 'filenames'))
334 'filenames'))
336 # pats/include/exclude cannot be represented as separate
335 # pats/include/exclude cannot be represented as separate
337 # revset expressions as their filtering logic applies at file
336 # revset expressions as their filtering logic applies at file
338 # level. For instance "-I a -X a" matches a revision touching
337 # level. For instance "-I a -X a" matches a revision touching
339 # "a" and "b" while "file(a) and not file(b)" does
338 # "a" and "b" while "file(a) and not file(b)" does
340 # not. Besides, filesets are evaluated against the working
339 # not. Besides, filesets are evaluated against the working
341 # directory.
340 # directory.
342 matchargs = ['r:', 'd:relpath']
341 matchargs = ['r:', 'd:relpath']
343 for p in pats:
342 for p in pats:
344 matchargs.append('p:' + p)
343 matchargs.append('p:' + p)
345 for p in opts.get('include', []):
344 for p in opts.get('include', []):
346 matchargs.append('i:' + p)
345 matchargs.append('i:' + p)
347 for p in opts.get('exclude', []):
346 for p in opts.get('exclude', []):
348 matchargs.append('x:' + p)
347 matchargs.append('x:' + p)
349 matchargs = ','.join(('%r' % p) for p in matchargs)
348 matchargs = ','.join(('%r' % p) for p in matchargs)
350 opts['_matchfiles'] = matchargs
349 opts['_matchfiles'] = matchargs
351 else:
350 else:
352 if follow:
351 if follow:
353 if followfirst:
352 if followfirst:
354 if pats:
353 if pats:
355 opts['_patsfollowfirst'] = list(pats)
354 opts['_patsfollowfirst'] = list(pats)
356 else:
355 else:
357 if followdescendants:
356 if followdescendants:
358 opts['_fdescendants'] = str(startrev)
357 opts['_fdescendants'] = str(startrev)
359 else:
358 else:
360 opts['_fancestors'] = str(startrev)
359 opts['_fancestors'] = str(startrev)
361 else:
360 else:
362 if pats:
361 if pats:
363 opts['_patsfollow'] = list(pats)
362 opts['_patsfollow'] = list(pats)
364 else:
363 else:
365 if followdescendants:
364 if followdescendants:
366 opts['_descendants'] = str(startrev)
365 opts['_descendants'] = str(startrev)
367 else:
366 else:
368 opts['_ancestors'] = str(startrev)
367 opts['_ancestors'] = str(startrev)
369 else:
368 else:
370 opts['_patslog'] = list(pats)
369 opts['_patslog'] = list(pats)
371
370
372 filematcher = None
371 filematcher = None
373 if opts.get('patch') or opts.get('stat'):
372 if opts.get('patch') or opts.get('stat'):
374 if follow:
373 if follow:
375 filematcher = _makefilematcher(repo, pats, followfirst)
374 filematcher = _makefilematcher(repo, pats, followfirst)
376 else:
375 else:
377 filematcher = lambda rev: match
376 filematcher = lambda rev: match
378
377
379 expr = []
378 expr = []
380 for op, val in opts.iteritems():
379 for op, val in opts.iteritems():
381 if not val:
380 if not val:
382 continue
381 continue
383 if op not in opt2revset:
382 if op not in opt2revset:
384 continue
383 continue
385 revop, andor = opt2revset[op]
384 revop, andor = opt2revset[op]
386 if '%(val)' not in revop:
385 if '%(val)' not in revop:
387 expr.append(revop)
386 expr.append(revop)
388 else:
387 else:
389 if not isinstance(val, list):
388 if not isinstance(val, list):
390 e = revop % {'val': val}
389 e = revop % {'val': val}
391 else:
390 else:
392 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
391 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
393 expr.append(e)
392 expr.append(e)
394
393
395 if expr:
394 if expr:
396 expr = '(' + ' and '.join(expr) + ')'
395 expr = '(' + ' and '.join(expr) + ')'
397 else:
396 else:
398 expr = None
397 expr = None
399 return expr, filematcher
398 return expr, filematcher
400
399
401 def getlogrevs(repo, pats, opts):
400 def getlogrevs(repo, pats, opts):
402 """Return (revs, expr, filematcher) where revs is a list of
401 """Return (revs, expr, filematcher) where revs is a list of
403 revision numbers, expr is a revset string built from log options
402 revision numbers, expr is a revset string built from log options
404 and file patterns or None, and used to filter 'revs'. If --stat or
403 and file patterns or None, and used to filter 'revs'. If --stat or
405 --patch are not passed filematcher is None. Otherwise it is a
404 --patch are not passed filematcher is None. Otherwise it is a
406 callable taking a revision number and returning a match objects
405 callable taking a revision number and returning a match objects
407 filtering the files to be detailed when displaying the revision.
406 filtering the files to be detailed when displaying the revision.
408 """
407 """
409 if not len(repo):
408 if not len(repo):
410 return [], None, None
409 return [], None, None
411 # Default --rev value depends on --follow but --follow behaviour
410 # Default --rev value depends on --follow but --follow behaviour
412 # depends on revisions resolved from --rev...
411 # depends on revisions resolved from --rev...
413 follow = opts.get('follow') or opts.get('follow_first')
412 follow = opts.get('follow') or opts.get('follow_first')
414 if opts.get('rev'):
413 if opts.get('rev'):
415 revs = scmutil.revrange(repo, opts['rev'])
414 revs = scmutil.revrange(repo, opts['rev'])
416 else:
415 else:
417 if follow and len(repo) > 0:
416 if follow and len(repo) > 0:
418 revs = scmutil.revrange(repo, ['.:0'])
417 revs = scmutil.revrange(repo, ['.:0'])
419 else:
418 else:
420 revs = range(len(repo) - 1, -1, -1)
419 revs = range(len(repo) - 1, -1, -1)
421 if not revs:
420 if not revs:
422 return [], None, None
421 return [], None, None
423 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
422 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
424 if expr:
423 if expr:
425 # Evaluate revisions in changelog order for performance
424 # Evaluate revisions in changelog order for performance
426 # reasons but preserve the original sequence order in the
425 # reasons but preserve the original sequence order in the
427 # filtered result.
426 # filtered result.
428 matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
427 matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
429 revs = [r for r in revs if r in matched]
428 revs = [r for r in revs if r in matched]
430 if not opts.get('hidden'):
429 if not opts.get('hidden'):
431 # --hidden is still experimental and not worth a dedicated revset
430 # --hidden is still experimental and not worth a dedicated revset
432 # yet. Fortunately, filtering revision number is fast.
431 # yet. Fortunately, filtering revision number is fast.
433 revs = [r for r in revs if r not in repo.changelog.hiddenrevs]
432 revs = [r for r in revs if r not in repo.changelog.hiddenrevs]
434 return revs, expr, filematcher
433 return revs, expr, filematcher
435
434
436 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
435 def generate(ui, dag, displayer, showparents, edgefn, getrenamed=None,
437 filematcher=None):
436 filematcher=None):
438 seen, state = [], asciistate()
437 seen, state = [], asciistate()
439 for rev, type, ctx, parents in dag:
438 for rev, type, ctx, parents in dag:
440 char = ctx.node() in showparents and '@' or 'o'
439 char = ctx.node() in showparents and '@' or 'o'
441 copies = None
440 copies = None
442 if getrenamed and ctx.rev():
441 if getrenamed and ctx.rev():
443 copies = []
442 copies = []
444 for fn in ctx.files():
443 for fn in ctx.files():
445 rename = getrenamed(fn, ctx.rev())
444 rename = getrenamed(fn, ctx.rev())
446 if rename:
445 if rename:
447 copies.append((fn, rename[0]))
446 copies.append((fn, rename[0]))
448 revmatchfn = None
447 revmatchfn = None
449 if filematcher is not None:
448 if filematcher is not None:
450 revmatchfn = filematcher(ctx.rev())
449 revmatchfn = filematcher(ctx.rev())
451 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
450 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
452 lines = displayer.hunk.pop(rev).split('\n')[:-1]
451 lines = displayer.hunk.pop(rev).split('\n')[:-1]
453 displayer.flush(rev)
452 displayer.flush(rev)
454 edges = edgefn(type, char, lines, seen, rev, parents)
453 edges = edgefn(type, char, lines, seen, rev, parents)
455 for type, char, lines, coldata in edges:
454 for type, char, lines, coldata in edges:
456 ascii(ui, state, type, char, lines, coldata)
455 ascii(ui, state, type, char, lines, coldata)
457 displayer.close()
456 displayer.close()
458
457
459 @command('glog',
458 @command('glog',
460 [('l', 'limit', '',
459 [('f', 'follow', None,
461 _('limit number of changes displayed'), _('NUM')),
460 _('follow changeset history, or file history across copies and renames')),
462 ('p', 'patch', False, _('show patch')),
461 ('', 'follow-first', None,
462 _('only follow the first parent of merge changesets (DEPRECATED)')),
463 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
464 ('C', 'copies', None, _('show copied files')),
465 ('k', 'keyword', [],
466 _('do case-insensitive search for a given text'), _('TEXT')),
463 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
467 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
464 ] + templateopts,
468 ('', 'removed', None, _('include revisions where files were removed')),
465 _('hg glog [OPTION]... [FILE]'))
469 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
470 ('u', 'user', [], _('revisions committed by user'), _('USER')),
471 ('', 'only-branch', [],
472 _('show only changesets within the given named branch (DEPRECATED)'),
473 _('BRANCH')),
474 ('b', 'branch', [],
475 _('show changesets within the given named branch'), _('BRANCH')),
476 ('P', 'prune', [],
477 _('do not display revision or any of its ancestors'), _('REV')),
478 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
479 ] + commands.logopts + commands.walkopts,
480 _('[OPTION]... [FILE]'))
466 def graphlog(ui, repo, *pats, **opts):
481 def graphlog(ui, repo, *pats, **opts):
467 """show revision history alongside an ASCII revision graph
482 """show revision history alongside an ASCII revision graph
468
483
469 Print a revision history alongside a revision graph drawn with
484 Print a revision history alongside a revision graph drawn with
470 ASCII characters.
485 ASCII characters.
471
486
472 Nodes printed as an @ character are parents of the working
487 Nodes printed as an @ character are parents of the working
473 directory.
488 directory.
474 """
489 """
475
490
476 check_unsupported_flags(pats, opts)
477
478 revs, expr, filematcher = getlogrevs(repo, pats, opts)
491 revs, expr, filematcher = getlogrevs(repo, pats, opts)
479 revs = sorted(revs, reverse=1)
492 revs = sorted(revs, reverse=1)
480 limit = cmdutil.loglimit(opts)
493 limit = cmdutil.loglimit(opts)
481 if limit is not None:
494 if limit is not None:
482 revs = revs[:limit]
495 revs = revs[:limit]
483 revdag = graphmod.dagwalker(repo, revs)
496 revdag = graphmod.dagwalker(repo, revs)
484
497
485 getrenamed = None
498 getrenamed = None
486 if opts.get('copies'):
499 if opts.get('copies'):
487 endrev = None
500 endrev = None
488 if opts.get('rev'):
501 if opts.get('rev'):
489 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
502 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
490 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
503 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
491 displayer = show_changeset(ui, repo, opts, buffered=True)
504 displayer = show_changeset(ui, repo, opts, buffered=True)
492 showparents = [ctx.node() for ctx in repo[None].parents()]
505 showparents = [ctx.node() for ctx in repo[None].parents()]
493 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
506 generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
494 filematcher)
507 filematcher)
495
508
496 def graphrevs(repo, nodes, opts):
509 def graphrevs(repo, nodes, opts):
497 limit = cmdutil.loglimit(opts)
510 limit = cmdutil.loglimit(opts)
498 nodes.reverse()
511 nodes.reverse()
499 if limit is not None:
512 if limit is not None:
500 nodes = nodes[:limit]
513 nodes = nodes[:limit]
501 return graphmod.nodes(repo, nodes)
514 return graphmod.nodes(repo, nodes)
502
515
503 def goutgoing(ui, repo, dest=None, **opts):
516 def goutgoing(ui, repo, dest=None, **opts):
504 """show the outgoing changesets alongside an ASCII revision graph
517 """show the outgoing changesets alongside an ASCII revision graph
505
518
506 Print the outgoing changesets alongside a revision graph drawn with
519 Print the outgoing changesets alongside a revision graph drawn with
507 ASCII characters.
520 ASCII characters.
508
521
509 Nodes printed as an @ character are parents of the working
522 Nodes printed as an @ character are parents of the working
510 directory.
523 directory.
511 """
524 """
512
525
513 check_unsupported_flags([], opts)
526 check_unsupported_flags([], opts)
514 o = hg._outgoing(ui, repo, dest, opts)
527 o = hg._outgoing(ui, repo, dest, opts)
515 if o is None:
528 if o is None:
516 return
529 return
517
530
518 revdag = graphrevs(repo, o, opts)
531 revdag = graphrevs(repo, o, opts)
519 displayer = show_changeset(ui, repo, opts, buffered=True)
532 displayer = show_changeset(ui, repo, opts, buffered=True)
520 showparents = [ctx.node() for ctx in repo[None].parents()]
533 showparents = [ctx.node() for ctx in repo[None].parents()]
521 generate(ui, revdag, displayer, showparents, asciiedges)
534 generate(ui, revdag, displayer, showparents, asciiedges)
522
535
523 def gincoming(ui, repo, source="default", **opts):
536 def gincoming(ui, repo, source="default", **opts):
524 """show the incoming changesets alongside an ASCII revision graph
537 """show the incoming changesets alongside an ASCII revision graph
525
538
526 Print the incoming changesets alongside a revision graph drawn with
539 Print the incoming changesets alongside a revision graph drawn with
527 ASCII characters.
540 ASCII characters.
528
541
529 Nodes printed as an @ character are parents of the working
542 Nodes printed as an @ character are parents of the working
530 directory.
543 directory.
531 """
544 """
532 def subreporecurse():
545 def subreporecurse():
533 return 1
546 return 1
534
547
535 check_unsupported_flags([], opts)
548 check_unsupported_flags([], opts)
536 def display(other, chlist, displayer):
549 def display(other, chlist, displayer):
537 revdag = graphrevs(other, chlist, opts)
550 revdag = graphrevs(other, chlist, opts)
538 showparents = [ctx.node() for ctx in repo[None].parents()]
551 showparents = [ctx.node() for ctx in repo[None].parents()]
539 generate(ui, revdag, displayer, showparents, asciiedges)
552 generate(ui, revdag, displayer, showparents, asciiedges)
540
553
541 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
554 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
542
555
543 def uisetup(ui):
556 def uisetup(ui):
544 '''Initialize the extension.'''
557 '''Initialize the extension.'''
545 _wrapcmd('log', commands.table, graphlog)
558 _wrapcmd('log', commands.table, graphlog)
546 _wrapcmd('incoming', commands.table, gincoming)
559 _wrapcmd('incoming', commands.table, gincoming)
547 _wrapcmd('outgoing', commands.table, goutgoing)
560 _wrapcmd('outgoing', commands.table, goutgoing)
548
561
549 def _wrapcmd(cmd, table, wrapfn):
562 def _wrapcmd(cmd, table, wrapfn):
550 '''wrap the command'''
563 '''wrap the command'''
551 def graph(orig, *args, **kwargs):
564 def graph(orig, *args, **kwargs):
552 if kwargs['graph']:
565 if kwargs['graph']:
553 return wrapfn(*args, **kwargs)
566 return wrapfn(*args, **kwargs)
554 return orig(*args, **kwargs)
567 return orig(*args, **kwargs)
555 entry = extensions.wrapcommand(table, cmd, graph)
568 entry = extensions.wrapcommand(table, cmd, graph)
556 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
569 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
General Comments 0
You need to be logged in to leave comments. Login now