##// END OF EJS Templates
incoming: unify code for incoming and graphlog.incoming
Nicolas Dumazet -
r12730:33e1fd2a default
parent child Browse files
Show More
@@ -1,384 +1,346
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell
8 '''command to view revision graphs from a shell
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 import os
15 import os
16 from mercurial.cmdutil import revrange, show_changeset
16 from mercurial.cmdutil import revrange, show_changeset
17 from mercurial.commands import templateopts
17 from mercurial.commands import templateopts
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.node import nullrev
19 from mercurial.node import nullrev
20 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
20 from mercurial import cmdutil, commands, extensions
21 from mercurial import hg, url, util, graphmod, discovery
21 from mercurial import hg, url, util, graphmod, discovery
22
22
23 ASCIIDATA = 'ASC'
23 ASCIIDATA = 'ASC'
24
24
25 def asciiedges(seen, rev, parents):
25 def asciiedges(seen, rev, parents):
26 """adds edge info to changelog DAG walk suitable for ascii()"""
26 """adds edge info to changelog DAG walk suitable for ascii()"""
27 if rev not in seen:
27 if rev not in seen:
28 seen.append(rev)
28 seen.append(rev)
29 nodeidx = seen.index(rev)
29 nodeidx = seen.index(rev)
30
30
31 knownparents = []
31 knownparents = []
32 newparents = []
32 newparents = []
33 for parent in parents:
33 for parent in parents:
34 if parent in seen:
34 if parent in seen:
35 knownparents.append(parent)
35 knownparents.append(parent)
36 else:
36 else:
37 newparents.append(parent)
37 newparents.append(parent)
38
38
39 ncols = len(seen)
39 ncols = len(seen)
40 seen[nodeidx:nodeidx + 1] = newparents
40 seen[nodeidx:nodeidx + 1] = newparents
41 edges = [(nodeidx, seen.index(p)) for p in knownparents]
41 edges = [(nodeidx, seen.index(p)) for p in knownparents]
42
42
43 if len(newparents) > 0:
43 if len(newparents) > 0:
44 edges.append((nodeidx, nodeidx))
44 edges.append((nodeidx, nodeidx))
45 if len(newparents) > 1:
45 if len(newparents) > 1:
46 edges.append((nodeidx, nodeidx + 1))
46 edges.append((nodeidx, nodeidx + 1))
47
47
48 nmorecols = len(seen) - ncols
48 nmorecols = len(seen) - ncols
49 return nodeidx, edges, ncols, nmorecols
49 return nodeidx, edges, ncols, nmorecols
50
50
51 def fix_long_right_edges(edges):
51 def fix_long_right_edges(edges):
52 for (i, (start, end)) in enumerate(edges):
52 for (i, (start, end)) in enumerate(edges):
53 if end > start:
53 if end > start:
54 edges[i] = (start, end + 1)
54 edges[i] = (start, end + 1)
55
55
56 def get_nodeline_edges_tail(
56 def get_nodeline_edges_tail(
57 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
57 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
58 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
58 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
59 # Still going in the same non-vertical direction.
59 # Still going in the same non-vertical direction.
60 if n_columns_diff == -1:
60 if n_columns_diff == -1:
61 start = max(node_index + 1, p_node_index)
61 start = max(node_index + 1, p_node_index)
62 tail = ["|", " "] * (start - node_index - 1)
62 tail = ["|", " "] * (start - node_index - 1)
63 tail.extend(["/", " "] * (n_columns - start))
63 tail.extend(["/", " "] * (n_columns - start))
64 return tail
64 return tail
65 else:
65 else:
66 return ["\\", " "] * (n_columns - node_index - 1)
66 return ["\\", " "] * (n_columns - node_index - 1)
67 else:
67 else:
68 return ["|", " "] * (n_columns - node_index - 1)
68 return ["|", " "] * (n_columns - node_index - 1)
69
69
70 def draw_edges(edges, nodeline, interline):
70 def draw_edges(edges, nodeline, interline):
71 for (start, end) in edges:
71 for (start, end) in edges:
72 if start == end + 1:
72 if start == end + 1:
73 interline[2 * end + 1] = "/"
73 interline[2 * end + 1] = "/"
74 elif start == end - 1:
74 elif start == end - 1:
75 interline[2 * start + 1] = "\\"
75 interline[2 * start + 1] = "\\"
76 elif start == end:
76 elif start == end:
77 interline[2 * start] = "|"
77 interline[2 * start] = "|"
78 else:
78 else:
79 nodeline[2 * end] = "+"
79 nodeline[2 * end] = "+"
80 if start > end:
80 if start > end:
81 (start, end) = (end, start)
81 (start, end) = (end, start)
82 for i in range(2 * start + 1, 2 * end):
82 for i in range(2 * start + 1, 2 * end):
83 if nodeline[i] != "+":
83 if nodeline[i] != "+":
84 nodeline[i] = "-"
84 nodeline[i] = "-"
85
85
86 def get_padding_line(ni, n_columns, edges):
86 def get_padding_line(ni, n_columns, edges):
87 line = []
87 line = []
88 line.extend(["|", " "] * ni)
88 line.extend(["|", " "] * ni)
89 if (ni, ni - 1) in edges or (ni, ni) in edges:
89 if (ni, ni - 1) in edges or (ni, ni) in edges:
90 # (ni, ni - 1) (ni, ni)
90 # (ni, ni - 1) (ni, ni)
91 # | | | | | | | |
91 # | | | | | | | |
92 # +---o | | o---+
92 # +---o | | o---+
93 # | | c | | c | |
93 # | | c | | c | |
94 # | |/ / | |/ /
94 # | |/ / | |/ /
95 # | | | | | |
95 # | | | | | |
96 c = "|"
96 c = "|"
97 else:
97 else:
98 c = " "
98 c = " "
99 line.extend([c, " "])
99 line.extend([c, " "])
100 line.extend(["|", " "] * (n_columns - ni - 1))
100 line.extend(["|", " "] * (n_columns - ni - 1))
101 return line
101 return line
102
102
103 def asciistate():
103 def asciistate():
104 """returns the initial value for the "state" argument to ascii()"""
104 """returns the initial value for the "state" argument to ascii()"""
105 return [0, 0]
105 return [0, 0]
106
106
107 def ascii(ui, state, type, char, text, coldata):
107 def ascii(ui, state, type, char, text, coldata):
108 """prints an ASCII graph of the DAG
108 """prints an ASCII graph of the DAG
109
109
110 takes the following arguments (one call per node in the graph):
110 takes the following arguments (one call per node in the graph):
111
111
112 - ui to write to
112 - ui to write to
113 - Somewhere to keep the needed state in (init to asciistate())
113 - Somewhere to keep the needed state in (init to asciistate())
114 - Column of the current node in the set of ongoing edges.
114 - Column of the current node in the set of ongoing edges.
115 - Type indicator of node data == ASCIIDATA.
115 - Type indicator of node data == ASCIIDATA.
116 - Payload: (char, lines):
116 - Payload: (char, lines):
117 - Character to use as node's symbol.
117 - Character to use as node's symbol.
118 - List of lines to display as the node's text.
118 - List of lines to display as the node's text.
119 - Edges; a list of (col, next_col) indicating the edges between
119 - Edges; a list of (col, next_col) indicating the edges between
120 the current node and its parents.
120 the current node and its parents.
121 - Number of columns (ongoing edges) in the current revision.
121 - Number of columns (ongoing edges) in the current revision.
122 - The difference between the number of columns (ongoing edges)
122 - The difference between the number of columns (ongoing edges)
123 in the next revision and the number of columns (ongoing edges)
123 in the next revision and the number of columns (ongoing edges)
124 in the current revision. That is: -1 means one column removed;
124 in the current revision. That is: -1 means one column removed;
125 0 means no columns added or removed; 1 means one column added.
125 0 means no columns added or removed; 1 means one column added.
126 """
126 """
127
127
128 idx, edges, ncols, coldiff = coldata
128 idx, edges, ncols, coldiff = coldata
129 assert -2 < coldiff < 2
129 assert -2 < coldiff < 2
130 if coldiff == -1:
130 if coldiff == -1:
131 # Transform
131 # Transform
132 #
132 #
133 # | | | | | |
133 # | | | | | |
134 # o | | into o---+
134 # o | | into o---+
135 # |X / |/ /
135 # |X / |/ /
136 # | | | |
136 # | | | |
137 fix_long_right_edges(edges)
137 fix_long_right_edges(edges)
138
138
139 # add_padding_line says whether to rewrite
139 # add_padding_line says whether to rewrite
140 #
140 #
141 # | | | | | | | |
141 # | | | | | | | |
142 # | o---+ into | o---+
142 # | o---+ into | o---+
143 # | / / | | | # <--- padding line
143 # | / / | | | # <--- padding line
144 # o | | | / /
144 # o | | | / /
145 # o | |
145 # o | |
146 add_padding_line = (len(text) > 2 and coldiff == -1 and
146 add_padding_line = (len(text) > 2 and coldiff == -1 and
147 [x for (x, y) in edges if x + 1 < y])
147 [x for (x, y) in edges if x + 1 < y])
148
148
149 # fix_nodeline_tail says whether to rewrite
149 # fix_nodeline_tail says whether to rewrite
150 #
150 #
151 # | | o | | | | o | |
151 # | | o | | | | o | |
152 # | | |/ / | | |/ /
152 # | | |/ / | | |/ /
153 # | o | | into | o / / # <--- fixed nodeline tail
153 # | o | | into | o / / # <--- fixed nodeline tail
154 # | |/ / | |/ /
154 # | |/ / | |/ /
155 # o | | o | |
155 # o | | o | |
156 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
156 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
157
157
158 # nodeline is the line containing the node character (typically o)
158 # nodeline is the line containing the node character (typically o)
159 nodeline = ["|", " "] * idx
159 nodeline = ["|", " "] * idx
160 nodeline.extend([char, " "])
160 nodeline.extend([char, " "])
161
161
162 nodeline.extend(
162 nodeline.extend(
163 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
163 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
164 state[0], fix_nodeline_tail))
164 state[0], fix_nodeline_tail))
165
165
166 # shift_interline is the line containing the non-vertical
166 # shift_interline is the line containing the non-vertical
167 # edges between this entry and the next
167 # edges between this entry and the next
168 shift_interline = ["|", " "] * idx
168 shift_interline = ["|", " "] * idx
169 if coldiff == -1:
169 if coldiff == -1:
170 n_spaces = 1
170 n_spaces = 1
171 edge_ch = "/"
171 edge_ch = "/"
172 elif coldiff == 0:
172 elif coldiff == 0:
173 n_spaces = 2
173 n_spaces = 2
174 edge_ch = "|"
174 edge_ch = "|"
175 else:
175 else:
176 n_spaces = 3
176 n_spaces = 3
177 edge_ch = "\\"
177 edge_ch = "\\"
178 shift_interline.extend(n_spaces * [" "])
178 shift_interline.extend(n_spaces * [" "])
179 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
179 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
180
180
181 # draw edges from the current node to its parents
181 # draw edges from the current node to its parents
182 draw_edges(edges, nodeline, shift_interline)
182 draw_edges(edges, nodeline, shift_interline)
183
183
184 # lines is the list of all graph lines to print
184 # lines is the list of all graph lines to print
185 lines = [nodeline]
185 lines = [nodeline]
186 if add_padding_line:
186 if add_padding_line:
187 lines.append(get_padding_line(idx, ncols, edges))
187 lines.append(get_padding_line(idx, ncols, edges))
188 lines.append(shift_interline)
188 lines.append(shift_interline)
189
189
190 # make sure that there are as many graph lines as there are
190 # make sure that there are as many graph lines as there are
191 # log strings
191 # log strings
192 while len(text) < len(lines):
192 while len(text) < len(lines):
193 text.append("")
193 text.append("")
194 if len(lines) < len(text):
194 if len(lines) < len(text):
195 extra_interline = ["|", " "] * (ncols + coldiff)
195 extra_interline = ["|", " "] * (ncols + coldiff)
196 while len(lines) < len(text):
196 while len(lines) < len(text):
197 lines.append(extra_interline)
197 lines.append(extra_interline)
198
198
199 # print lines
199 # print lines
200 indentation_level = max(ncols, ncols + coldiff)
200 indentation_level = max(ncols, ncols + coldiff)
201 for (line, logstr) in zip(lines, text):
201 for (line, logstr) in zip(lines, text):
202 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
202 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
203 ui.write(ln.rstrip() + '\n')
203 ui.write(ln.rstrip() + '\n')
204
204
205 # ... and start over
205 # ... and start over
206 state[0] = coldiff
206 state[0] = coldiff
207 state[1] = idx
207 state[1] = idx
208
208
209 def get_revs(repo, rev_opt):
209 def get_revs(repo, rev_opt):
210 if rev_opt:
210 if rev_opt:
211 revs = revrange(repo, rev_opt)
211 revs = revrange(repo, rev_opt)
212 if len(revs) == 0:
212 if len(revs) == 0:
213 return (nullrev, nullrev)
213 return (nullrev, nullrev)
214 return (max(revs), min(revs))
214 return (max(revs), min(revs))
215 else:
215 else:
216 return (len(repo) - 1, 0)
216 return (len(repo) - 1, 0)
217
217
218 def check_unsupported_flags(opts):
218 def check_unsupported_flags(opts):
219 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
219 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
220 "only_merges", "user", "branch", "only_branch", "prune",
220 "only_merges", "user", "branch", "only_branch", "prune",
221 "newest_first", "no_merges", "include", "exclude"]:
221 "newest_first", "no_merges", "include", "exclude"]:
222 if op in opts and opts[op]:
222 if op in opts and opts[op]:
223 raise util.Abort(_("--graph option is incompatible with --%s")
223 raise util.Abort(_("--graph option is incompatible with --%s")
224 % op.replace("_", "-"))
224 % op.replace("_", "-"))
225
225
226 def generate(ui, dag, displayer, showparents, edgefn):
226 def generate(ui, dag, displayer, showparents, edgefn):
227 seen, state = [], asciistate()
227 seen, state = [], asciistate()
228 for rev, type, ctx, parents in dag:
228 for rev, type, ctx, parents in dag:
229 char = ctx.node() in showparents and '@' or 'o'
229 char = ctx.node() in showparents and '@' or 'o'
230 displayer.show(ctx)
230 displayer.show(ctx)
231 lines = displayer.hunk.pop(rev).split('\n')[:-1]
231 lines = displayer.hunk.pop(rev).split('\n')[:-1]
232 displayer.flush(rev)
232 displayer.flush(rev)
233 ascii(ui, state, type, char, lines, edgefn(seen, rev, parents))
233 ascii(ui, state, type, char, lines, edgefn(seen, rev, parents))
234 displayer.close()
234 displayer.close()
235
235
236 def graphlog(ui, repo, path=None, **opts):
236 def graphlog(ui, repo, path=None, **opts):
237 """show revision history alongside an ASCII revision graph
237 """show revision history alongside an ASCII revision graph
238
238
239 Print a revision history alongside a revision graph drawn with
239 Print a revision history alongside a revision graph drawn with
240 ASCII characters.
240 ASCII characters.
241
241
242 Nodes printed as an @ character are parents of the working
242 Nodes printed as an @ character are parents of the working
243 directory.
243 directory.
244 """
244 """
245
245
246 check_unsupported_flags(opts)
246 check_unsupported_flags(opts)
247 limit = cmdutil.loglimit(opts)
247 limit = cmdutil.loglimit(opts)
248 start, stop = get_revs(repo, opts["rev"])
248 start, stop = get_revs(repo, opts["rev"])
249 if start == nullrev:
249 if start == nullrev:
250 return
250 return
251
251
252 if path:
252 if path:
253 path = util.canonpath(repo.root, os.getcwd(), path)
253 path = util.canonpath(repo.root, os.getcwd(), path)
254 if path: # could be reset in canonpath
254 if path: # could be reset in canonpath
255 revdag = graphmod.filerevs(repo, path, start, stop, limit)
255 revdag = graphmod.filerevs(repo, path, start, stop, limit)
256 else:
256 else:
257 if limit is not None:
257 if limit is not None:
258 stop = max(stop, start - limit + 1)
258 stop = max(stop, start - limit + 1)
259 revdag = graphmod.revisions(repo, start, stop)
259 revdag = graphmod.revisions(repo, start, stop)
260
260
261 displayer = show_changeset(ui, repo, opts, buffered=True)
261 displayer = show_changeset(ui, repo, opts, buffered=True)
262 showparents = [ctx.node() for ctx in repo[None].parents()]
262 showparents = [ctx.node() for ctx in repo[None].parents()]
263 generate(ui, revdag, displayer, showparents, asciiedges)
263 generate(ui, revdag, displayer, showparents, asciiedges)
264
264
265 def graphrevs(repo, nodes, opts):
265 def graphrevs(repo, nodes, opts):
266 limit = cmdutil.loglimit(opts)
266 limit = cmdutil.loglimit(opts)
267 nodes.reverse()
267 nodes.reverse()
268 if limit is not None:
268 if limit is not None:
269 nodes = nodes[:limit]
269 nodes = nodes[:limit]
270 return graphmod.nodes(repo, nodes)
270 return graphmod.nodes(repo, nodes)
271
271
272 def goutgoing(ui, repo, dest=None, **opts):
272 def goutgoing(ui, repo, dest=None, **opts):
273 """show the outgoing changesets alongside an ASCII revision graph
273 """show the outgoing changesets alongside an ASCII revision graph
274
274
275 Print the outgoing changesets alongside a revision graph drawn with
275 Print the outgoing changesets alongside a revision graph drawn with
276 ASCII characters.
276 ASCII characters.
277
277
278 Nodes printed as an @ character are parents of the working
278 Nodes printed as an @ character are parents of the working
279 directory.
279 directory.
280 """
280 """
281
281
282 check_unsupported_flags(opts)
282 check_unsupported_flags(opts)
283 dest = ui.expandpath(dest or 'default-push', dest or 'default')
283 dest = ui.expandpath(dest or 'default-push', dest or 'default')
284 dest, branches = hg.parseurl(dest, opts.get('branch'))
284 dest, branches = hg.parseurl(dest, opts.get('branch'))
285 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
285 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
286 other = hg.repository(hg.remoteui(ui, opts), dest)
286 other = hg.repository(hg.remoteui(ui, opts), dest)
287 if revs:
287 if revs:
288 revs = [repo.lookup(rev) for rev in revs]
288 revs = [repo.lookup(rev) for rev in revs]
289 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
289 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
290 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
290 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
291 if not o:
291 if not o:
292 ui.status(_("no changes found\n"))
292 ui.status(_("no changes found\n"))
293 return
293 return
294
294
295 o = repo.changelog.nodesbetween(o, revs)[0]
295 o = repo.changelog.nodesbetween(o, revs)[0]
296 revdag = graphrevs(repo, o, opts)
296 revdag = graphrevs(repo, o, opts)
297 displayer = show_changeset(ui, repo, opts, buffered=True)
297 displayer = show_changeset(ui, repo, opts, buffered=True)
298 showparents = [ctx.node() for ctx in repo[None].parents()]
298 showparents = [ctx.node() for ctx in repo[None].parents()]
299 generate(ui, revdag, displayer, showparents, asciiedges)
299 generate(ui, revdag, displayer, showparents, asciiedges)
300
300
301 def gincoming(ui, repo, source="default", **opts):
301 def gincoming(ui, repo, source="default", **opts):
302 """show the incoming changesets alongside an ASCII revision graph
302 """show the incoming changesets alongside an ASCII revision graph
303
303
304 Print the incoming changesets alongside a revision graph drawn with
304 Print the incoming changesets alongside a revision graph drawn with
305 ASCII characters.
305 ASCII characters.
306
306
307 Nodes printed as an @ character are parents of the working
307 Nodes printed as an @ character are parents of the working
308 directory.
308 directory.
309 """
309 """
310 def subreporecurse():
311 return 1
310
312
311 check_unsupported_flags(opts)
313 check_unsupported_flags(opts)
312 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
314 def display(other, chlist, displayer):
313 other = hg.repository(hg.remoteui(repo, opts), source)
314 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
315 ui.status(_('comparing with %s\n') % url.hidepassword(source))
316 if revs:
317 revs = [other.lookup(rev) for rev in revs]
318 incoming = discovery.findincoming(repo, other, heads=revs,
319 force=opts["force"])
320 if not incoming:
321 try:
322 os.unlink(opts["bundle"])
323 except:
324 pass
325 ui.status(_("no changes found\n"))
326 return
327
328 cleanup = None
329 try:
330
331 fname = opts["bundle"]
332 if fname or not other.local():
333 # create a bundle (uncompressed if other repo is not local)
334 if revs is None:
335 cg = other.changegroup(incoming, "incoming")
336 else:
337 cg = other.changegroupsubset(incoming, revs, 'incoming')
338 bundletype = other.local() and "HG10BZ" or "HG10UN"
339 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
340 # keep written bundle?
341 if opts["bundle"]:
342 cleanup = None
343 if not other.local():
344 # use the created uncompressed bundlerepo
345 other = bundlerepo.bundlerepository(ui, repo.root, fname)
346
347 chlist = other.changelog.nodesbetween(incoming, revs)[0]
348 revdag = graphrevs(other, chlist, opts)
315 revdag = graphrevs(other, chlist, opts)
349 displayer = show_changeset(ui, other, opts, buffered=True)
350 showparents = [ctx.node() for ctx in repo[None].parents()]
316 showparents = [ctx.node() for ctx in repo[None].parents()]
351 generate(ui, revdag, displayer, showparents, asciiedges)
317 generate(ui, revdag, displayer, showparents, asciiedges)
352
318
353 finally:
319 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
354 if hasattr(other, 'close'):
355 other.close()
356 if cleanup:
357 os.unlink(cleanup)
358
320
359 def uisetup(ui):
321 def uisetup(ui):
360 '''Initialize the extension.'''
322 '''Initialize the extension.'''
361 _wrapcmd(ui, 'log', commands.table, graphlog)
323 _wrapcmd(ui, 'log', commands.table, graphlog)
362 _wrapcmd(ui, 'incoming', commands.table, gincoming)
324 _wrapcmd(ui, 'incoming', commands.table, gincoming)
363 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
325 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
364
326
365 def _wrapcmd(ui, cmd, table, wrapfn):
327 def _wrapcmd(ui, cmd, table, wrapfn):
366 '''wrap the command'''
328 '''wrap the command'''
367 def graph(orig, *args, **kwargs):
329 def graph(orig, *args, **kwargs):
368 if kwargs['graph']:
330 if kwargs['graph']:
369 return wrapfn(*args, **kwargs)
331 return wrapfn(*args, **kwargs)
370 return orig(*args, **kwargs)
332 return orig(*args, **kwargs)
371 entry = extensions.wrapcommand(table, cmd, graph)
333 entry = extensions.wrapcommand(table, cmd, graph)
372 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
334 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
373
335
374 cmdtable = {
336 cmdtable = {
375 "glog":
337 "glog":
376 (graphlog,
338 (graphlog,
377 [('l', 'limit', '',
339 [('l', 'limit', '',
378 _('limit number of changes displayed'), _('NUM')),
340 _('limit number of changes displayed'), _('NUM')),
379 ('p', 'patch', False, _('show patch')),
341 ('p', 'patch', False, _('show patch')),
380 ('r', 'rev', [],
342 ('r', 'rev', [],
381 _('show the specified revision or range'), _('REV')),
343 _('show the specified revision or range'), _('REV')),
382 ] + templateopts,
344 ] + templateopts,
383 _('hg glog [OPTION]... [FILE]')),
345 _('hg glog [OPTION]... [FILE]')),
384 }
346 }
@@ -1,557 +1,573
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid, nullrev, short
11 from node import hex, nullid, nullrev, short
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import lock, util, extensions, error, encoding, node
13 import lock, util, extensions, error, encoding, node
14 import cmdutil, discovery, url, changegroup
14 import cmdutil, discovery, url, changegroup
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.drop_scheme('file', path))
20 path = util.expandpath(util.drop_scheme('file', path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, repo, branches, revs):
23 def addbranchrevs(lrepo, repo, branches, revs):
24 hashbranch, branches = branches
24 hashbranch, branches = branches
25 if not hashbranch and not branches:
25 if not hashbranch and not branches:
26 return revs or None, revs and revs[0] or None
26 return revs or None, revs and revs[0] or None
27 revs = revs and list(revs) or []
27 revs = revs and list(revs) or []
28 if not repo.capable('branchmap'):
28 if not repo.capable('branchmap'):
29 if branches:
29 if branches:
30 raise util.Abort(_("remote branch lookup not supported"))
30 raise util.Abort(_("remote branch lookup not supported"))
31 revs.append(hashbranch)
31 revs.append(hashbranch)
32 return revs, revs[0]
32 return revs, revs[0]
33 branchmap = repo.branchmap()
33 branchmap = repo.branchmap()
34
34
35 def primary(butf8):
35 def primary(butf8):
36 if butf8 == '.':
36 if butf8 == '.':
37 if not lrepo or not lrepo.local():
37 if not lrepo or not lrepo.local():
38 raise util.Abort(_("dirstate branch not accessible"))
38 raise util.Abort(_("dirstate branch not accessible"))
39 butf8 = lrepo.dirstate.branch()
39 butf8 = lrepo.dirstate.branch()
40 if butf8 in branchmap:
40 if butf8 in branchmap:
41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
42 return True
42 return True
43 else:
43 else:
44 return False
44 return False
45
45
46 for branch in branches:
46 for branch in branches:
47 butf8 = encoding.fromlocal(branch)
47 butf8 = encoding.fromlocal(branch)
48 if not primary(butf8):
48 if not primary(butf8):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 butf8 = encoding.fromlocal(hashbranch)
51 butf8 = encoding.fromlocal(hashbranch)
52 if not primary(butf8):
52 if not primary(butf8):
53 revs.append(hashbranch)
53 revs.append(hashbranch)
54 return revs, revs[0]
54 return revs, revs[0]
55
55
56 def parseurl(url, branches=None):
56 def parseurl(url, branches=None):
57 '''parse url#branch, returning (url, (branch, branches))'''
57 '''parse url#branch, returning (url, (branch, branches))'''
58
58
59 if '#' not in url:
59 if '#' not in url:
60 return url, (None, branches or [])
60 return url, (None, branches or [])
61 url, branch = url.split('#', 1)
61 url, branch = url.split('#', 1)
62 return url, (branch, branches or [])
62 return url, (branch, branches or [])
63
63
64 schemes = {
64 schemes = {
65 'bundle': bundlerepo,
65 'bundle': bundlerepo,
66 'file': _local,
66 'file': _local,
67 'http': httprepo,
67 'http': httprepo,
68 'https': httprepo,
68 'https': httprepo,
69 'ssh': sshrepo,
69 'ssh': sshrepo,
70 'static-http': statichttprepo,
70 'static-http': statichttprepo,
71 }
71 }
72
72
73 def _lookup(path):
73 def _lookup(path):
74 scheme = 'file'
74 scheme = 'file'
75 if path:
75 if path:
76 c = path.find(':')
76 c = path.find(':')
77 if c > 0:
77 if c > 0:
78 scheme = path[:c]
78 scheme = path[:c]
79 thing = schemes.get(scheme) or schemes['file']
79 thing = schemes.get(scheme) or schemes['file']
80 try:
80 try:
81 return thing(path)
81 return thing(path)
82 except TypeError:
82 except TypeError:
83 return thing
83 return thing
84
84
85 def islocal(repo):
85 def islocal(repo):
86 '''return true if repo or path is local'''
86 '''return true if repo or path is local'''
87 if isinstance(repo, str):
87 if isinstance(repo, str):
88 try:
88 try:
89 return _lookup(repo).islocal(repo)
89 return _lookup(repo).islocal(repo)
90 except AttributeError:
90 except AttributeError:
91 return False
91 return False
92 return repo.local()
92 return repo.local()
93
93
94 def repository(ui, path='', create=False):
94 def repository(ui, path='', create=False):
95 """return a repository object for the specified path"""
95 """return a repository object for the specified path"""
96 repo = _lookup(path).instance(ui, path, create)
96 repo = _lookup(path).instance(ui, path, create)
97 ui = getattr(repo, "ui", ui)
97 ui = getattr(repo, "ui", ui)
98 for name, module in extensions.extensions():
98 for name, module in extensions.extensions():
99 hook = getattr(module, 'reposetup', None)
99 hook = getattr(module, 'reposetup', None)
100 if hook:
100 if hook:
101 hook(ui, repo)
101 hook(ui, repo)
102 return repo
102 return repo
103
103
104 def defaultdest(source):
104 def defaultdest(source):
105 '''return default destination of clone if none is given'''
105 '''return default destination of clone if none is given'''
106 return os.path.basename(os.path.normpath(source))
106 return os.path.basename(os.path.normpath(source))
107
107
108 def localpath(path):
108 def localpath(path):
109 if path.startswith('file://localhost/'):
109 if path.startswith('file://localhost/'):
110 return path[16:]
110 return path[16:]
111 if path.startswith('file://'):
111 if path.startswith('file://'):
112 return path[7:]
112 return path[7:]
113 if path.startswith('file:'):
113 if path.startswith('file:'):
114 return path[5:]
114 return path[5:]
115 return path
115 return path
116
116
117 def share(ui, source, dest=None, update=True):
117 def share(ui, source, dest=None, update=True):
118 '''create a shared repository'''
118 '''create a shared repository'''
119
119
120 if not islocal(source):
120 if not islocal(source):
121 raise util.Abort(_('can only share local repositories'))
121 raise util.Abort(_('can only share local repositories'))
122
122
123 if not dest:
123 if not dest:
124 dest = defaultdest(source)
124 dest = defaultdest(source)
125 else:
125 else:
126 dest = ui.expandpath(dest)
126 dest = ui.expandpath(dest)
127
127
128 if isinstance(source, str):
128 if isinstance(source, str):
129 origsource = ui.expandpath(source)
129 origsource = ui.expandpath(source)
130 source, branches = parseurl(origsource)
130 source, branches = parseurl(origsource)
131 srcrepo = repository(ui, source)
131 srcrepo = repository(ui, source)
132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
133 else:
133 else:
134 srcrepo = source
134 srcrepo = source
135 origsource = source = srcrepo.url()
135 origsource = source = srcrepo.url()
136 checkout = None
136 checkout = None
137
137
138 sharedpath = srcrepo.sharedpath # if our source is already sharing
138 sharedpath = srcrepo.sharedpath # if our source is already sharing
139
139
140 root = os.path.realpath(dest)
140 root = os.path.realpath(dest)
141 roothg = os.path.join(root, '.hg')
141 roothg = os.path.join(root, '.hg')
142
142
143 if os.path.exists(roothg):
143 if os.path.exists(roothg):
144 raise util.Abort(_('destination already exists'))
144 raise util.Abort(_('destination already exists'))
145
145
146 if not os.path.isdir(root):
146 if not os.path.isdir(root):
147 os.mkdir(root)
147 os.mkdir(root)
148 os.mkdir(roothg)
148 os.mkdir(roothg)
149
149
150 requirements = ''
150 requirements = ''
151 try:
151 try:
152 requirements = srcrepo.opener('requires').read()
152 requirements = srcrepo.opener('requires').read()
153 except IOError, inst:
153 except IOError, inst:
154 if inst.errno != errno.ENOENT:
154 if inst.errno != errno.ENOENT:
155 raise
155 raise
156
156
157 requirements += 'shared\n'
157 requirements += 'shared\n'
158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
160
160
161 default = srcrepo.ui.config('paths', 'default')
161 default = srcrepo.ui.config('paths', 'default')
162 if default:
162 if default:
163 f = file(os.path.join(roothg, 'hgrc'), 'w')
163 f = file(os.path.join(roothg, 'hgrc'), 'w')
164 f.write('[paths]\ndefault = %s\n' % default)
164 f.write('[paths]\ndefault = %s\n' % default)
165 f.close()
165 f.close()
166
166
167 r = repository(ui, root)
167 r = repository(ui, root)
168
168
169 if update:
169 if update:
170 r.ui.status(_("updating working directory\n"))
170 r.ui.status(_("updating working directory\n"))
171 if update is not True:
171 if update is not True:
172 checkout = update
172 checkout = update
173 for test in (checkout, 'default', 'tip'):
173 for test in (checkout, 'default', 'tip'):
174 if test is None:
174 if test is None:
175 continue
175 continue
176 try:
176 try:
177 uprev = r.lookup(test)
177 uprev = r.lookup(test)
178 break
178 break
179 except error.RepoLookupError:
179 except error.RepoLookupError:
180 continue
180 continue
181 _update(r, uprev)
181 _update(r, uprev)
182
182
183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
184 stream=False, branch=None):
184 stream=False, branch=None):
185 """Make a copy of an existing repository.
185 """Make a copy of an existing repository.
186
186
187 Create a copy of an existing repository in a new directory. The
187 Create a copy of an existing repository in a new directory. The
188 source and destination are URLs, as passed to the repository
188 source and destination are URLs, as passed to the repository
189 function. Returns a pair of repository objects, the source and
189 function. Returns a pair of repository objects, the source and
190 newly created destination.
190 newly created destination.
191
191
192 The location of the source is added to the new repository's
192 The location of the source is added to the new repository's
193 .hg/hgrc file, as the default to be used for future pulls and
193 .hg/hgrc file, as the default to be used for future pulls and
194 pushes.
194 pushes.
195
195
196 If an exception is raised, the partly cloned/updated destination
196 If an exception is raised, the partly cloned/updated destination
197 repository will be deleted.
197 repository will be deleted.
198
198
199 Arguments:
199 Arguments:
200
200
201 source: repository object or URL
201 source: repository object or URL
202
202
203 dest: URL of destination repository to create (defaults to base
203 dest: URL of destination repository to create (defaults to base
204 name of source repository)
204 name of source repository)
205
205
206 pull: always pull from source repository, even in local case
206 pull: always pull from source repository, even in local case
207
207
208 stream: stream raw data uncompressed from repository (fast over
208 stream: stream raw data uncompressed from repository (fast over
209 LAN, slow over WAN)
209 LAN, slow over WAN)
210
210
211 rev: revision to clone up to (implies pull=True)
211 rev: revision to clone up to (implies pull=True)
212
212
213 update: update working directory after clone completes, if
213 update: update working directory after clone completes, if
214 destination is local repository (True means update to default rev,
214 destination is local repository (True means update to default rev,
215 anything else is treated as a revision)
215 anything else is treated as a revision)
216
216
217 branch: branches to clone
217 branch: branches to clone
218 """
218 """
219
219
220 if isinstance(source, str):
220 if isinstance(source, str):
221 origsource = ui.expandpath(source)
221 origsource = ui.expandpath(source)
222 source, branch = parseurl(origsource, branch)
222 source, branch = parseurl(origsource, branch)
223 src_repo = repository(ui, source)
223 src_repo = repository(ui, source)
224 else:
224 else:
225 src_repo = source
225 src_repo = source
226 branch = (None, branch or [])
226 branch = (None, branch or [])
227 origsource = source = src_repo.url()
227 origsource = source = src_repo.url()
228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
229
229
230 if dest is None:
230 if dest is None:
231 dest = defaultdest(source)
231 dest = defaultdest(source)
232 ui.status(_("destination directory: %s\n") % dest)
232 ui.status(_("destination directory: %s\n") % dest)
233 else:
233 else:
234 dest = ui.expandpath(dest)
234 dest = ui.expandpath(dest)
235
235
236 dest = localpath(dest)
236 dest = localpath(dest)
237 source = localpath(source)
237 source = localpath(source)
238
238
239 if os.path.exists(dest):
239 if os.path.exists(dest):
240 if not os.path.isdir(dest):
240 if not os.path.isdir(dest):
241 raise util.Abort(_("destination '%s' already exists") % dest)
241 raise util.Abort(_("destination '%s' already exists") % dest)
242 elif os.listdir(dest):
242 elif os.listdir(dest):
243 raise util.Abort(_("destination '%s' is not empty") % dest)
243 raise util.Abort(_("destination '%s' is not empty") % dest)
244
244
245 class DirCleanup(object):
245 class DirCleanup(object):
246 def __init__(self, dir_):
246 def __init__(self, dir_):
247 self.rmtree = shutil.rmtree
247 self.rmtree = shutil.rmtree
248 self.dir_ = dir_
248 self.dir_ = dir_
249 def close(self):
249 def close(self):
250 self.dir_ = None
250 self.dir_ = None
251 def cleanup(self):
251 def cleanup(self):
252 if self.dir_:
252 if self.dir_:
253 self.rmtree(self.dir_, True)
253 self.rmtree(self.dir_, True)
254
254
255 src_lock = dest_lock = dir_cleanup = None
255 src_lock = dest_lock = dir_cleanup = None
256 try:
256 try:
257 if islocal(dest):
257 if islocal(dest):
258 dir_cleanup = DirCleanup(dest)
258 dir_cleanup = DirCleanup(dest)
259
259
260 abspath = origsource
260 abspath = origsource
261 copy = False
261 copy = False
262 if src_repo.cancopy() and islocal(dest):
262 if src_repo.cancopy() and islocal(dest):
263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
264 copy = not pull and not rev
264 copy = not pull and not rev
265
265
266 if copy:
266 if copy:
267 try:
267 try:
268 # we use a lock here because if we race with commit, we
268 # we use a lock here because if we race with commit, we
269 # can end up with extra data in the cloned revlogs that's
269 # can end up with extra data in the cloned revlogs that's
270 # not pointed to by changesets, thus causing verify to
270 # not pointed to by changesets, thus causing verify to
271 # fail
271 # fail
272 src_lock = src_repo.lock(wait=False)
272 src_lock = src_repo.lock(wait=False)
273 except error.LockError:
273 except error.LockError:
274 copy = False
274 copy = False
275
275
276 if copy:
276 if copy:
277 src_repo.hook('preoutgoing', throw=True, source='clone')
277 src_repo.hook('preoutgoing', throw=True, source='clone')
278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
279 if not os.path.exists(dest):
279 if not os.path.exists(dest):
280 os.mkdir(dest)
280 os.mkdir(dest)
281 else:
281 else:
282 # only clean up directories we create ourselves
282 # only clean up directories we create ourselves
283 dir_cleanup.dir_ = hgdir
283 dir_cleanup.dir_ = hgdir
284 try:
284 try:
285 dest_path = hgdir
285 dest_path = hgdir
286 os.mkdir(dest_path)
286 os.mkdir(dest_path)
287 except OSError, inst:
287 except OSError, inst:
288 if inst.errno == errno.EEXIST:
288 if inst.errno == errno.EEXIST:
289 dir_cleanup.close()
289 dir_cleanup.close()
290 raise util.Abort(_("destination '%s' already exists")
290 raise util.Abort(_("destination '%s' already exists")
291 % dest)
291 % dest)
292 raise
292 raise
293
293
294 hardlink = None
294 hardlink = None
295 num = 0
295 num = 0
296 for f in src_repo.store.copylist():
296 for f in src_repo.store.copylist():
297 src = os.path.join(src_repo.sharedpath, f)
297 src = os.path.join(src_repo.sharedpath, f)
298 dst = os.path.join(dest_path, f)
298 dst = os.path.join(dest_path, f)
299 dstbase = os.path.dirname(dst)
299 dstbase = os.path.dirname(dst)
300 if dstbase and not os.path.exists(dstbase):
300 if dstbase and not os.path.exists(dstbase):
301 os.mkdir(dstbase)
301 os.mkdir(dstbase)
302 if os.path.exists(src):
302 if os.path.exists(src):
303 if dst.endswith('data'):
303 if dst.endswith('data'):
304 # lock to avoid premature writing to the target
304 # lock to avoid premature writing to the target
305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
306 hardlink, n = util.copyfiles(src, dst, hardlink)
306 hardlink, n = util.copyfiles(src, dst, hardlink)
307 num += n
307 num += n
308 if hardlink:
308 if hardlink:
309 ui.debug("linked %d files\n" % num)
309 ui.debug("linked %d files\n" % num)
310 else:
310 else:
311 ui.debug("copied %d files\n" % num)
311 ui.debug("copied %d files\n" % num)
312
312
313 # we need to re-init the repo after manually copying the data
313 # we need to re-init the repo after manually copying the data
314 # into it
314 # into it
315 dest_repo = repository(ui, dest)
315 dest_repo = repository(ui, dest)
316 src_repo.hook('outgoing', source='clone',
316 src_repo.hook('outgoing', source='clone',
317 node=node.hex(node.nullid))
317 node=node.hex(node.nullid))
318 else:
318 else:
319 try:
319 try:
320 dest_repo = repository(ui, dest, create=True)
320 dest_repo = repository(ui, dest, create=True)
321 except OSError, inst:
321 except OSError, inst:
322 if inst.errno == errno.EEXIST:
322 if inst.errno == errno.EEXIST:
323 dir_cleanup.close()
323 dir_cleanup.close()
324 raise util.Abort(_("destination '%s' already exists")
324 raise util.Abort(_("destination '%s' already exists")
325 % dest)
325 % dest)
326 raise
326 raise
327
327
328 revs = None
328 revs = None
329 if rev:
329 if rev:
330 if 'lookup' not in src_repo.capabilities:
330 if 'lookup' not in src_repo.capabilities:
331 raise util.Abort(_("src repository does not support "
331 raise util.Abort(_("src repository does not support "
332 "revision lookup and so doesn't "
332 "revision lookup and so doesn't "
333 "support clone by revision"))
333 "support clone by revision"))
334 revs = [src_repo.lookup(r) for r in rev]
334 revs = [src_repo.lookup(r) for r in rev]
335 checkout = revs[0]
335 checkout = revs[0]
336 if dest_repo.local():
336 if dest_repo.local():
337 dest_repo.clone(src_repo, heads=revs, stream=stream)
337 dest_repo.clone(src_repo, heads=revs, stream=stream)
338 elif src_repo.local():
338 elif src_repo.local():
339 src_repo.push(dest_repo, revs=revs)
339 src_repo.push(dest_repo, revs=revs)
340 else:
340 else:
341 raise util.Abort(_("clone from remote to remote not supported"))
341 raise util.Abort(_("clone from remote to remote not supported"))
342
342
343 if dir_cleanup:
343 if dir_cleanup:
344 dir_cleanup.close()
344 dir_cleanup.close()
345
345
346 if dest_repo.local():
346 if dest_repo.local():
347 fp = dest_repo.opener("hgrc", "w", text=True)
347 fp = dest_repo.opener("hgrc", "w", text=True)
348 fp.write("[paths]\n")
348 fp.write("[paths]\n")
349 fp.write("default = %s\n" % abspath)
349 fp.write("default = %s\n" % abspath)
350 fp.close()
350 fp.close()
351
351
352 dest_repo.ui.setconfig('paths', 'default', abspath)
352 dest_repo.ui.setconfig('paths', 'default', abspath)
353
353
354 if update:
354 if update:
355 if update is not True:
355 if update is not True:
356 checkout = update
356 checkout = update
357 if src_repo.local():
357 if src_repo.local():
358 checkout = src_repo.lookup(update)
358 checkout = src_repo.lookup(update)
359 for test in (checkout, 'default', 'tip'):
359 for test in (checkout, 'default', 'tip'):
360 if test is None:
360 if test is None:
361 continue
361 continue
362 try:
362 try:
363 uprev = dest_repo.lookup(test)
363 uprev = dest_repo.lookup(test)
364 break
364 break
365 except error.RepoLookupError:
365 except error.RepoLookupError:
366 continue
366 continue
367 bn = dest_repo[uprev].branch()
367 bn = dest_repo[uprev].branch()
368 dest_repo.ui.status(_("updating to branch %s\n")
368 dest_repo.ui.status(_("updating to branch %s\n")
369 % encoding.tolocal(bn))
369 % encoding.tolocal(bn))
370 _update(dest_repo, uprev)
370 _update(dest_repo, uprev)
371
371
372 return src_repo, dest_repo
372 return src_repo, dest_repo
373 finally:
373 finally:
374 release(src_lock, dest_lock)
374 release(src_lock, dest_lock)
375 if dir_cleanup is not None:
375 if dir_cleanup is not None:
376 dir_cleanup.cleanup()
376 dir_cleanup.cleanup()
377
377
378 def _showstats(repo, stats):
378 def _showstats(repo, stats):
379 repo.ui.status(_("%d files updated, %d files merged, "
379 repo.ui.status(_("%d files updated, %d files merged, "
380 "%d files removed, %d files unresolved\n") % stats)
380 "%d files removed, %d files unresolved\n") % stats)
381
381
382 def update(repo, node):
382 def update(repo, node):
383 """update the working directory to node, merging linear changes"""
383 """update the working directory to node, merging linear changes"""
384 stats = mergemod.update(repo, node, False, False, None)
384 stats = mergemod.update(repo, node, False, False, None)
385 _showstats(repo, stats)
385 _showstats(repo, stats)
386 if stats[3]:
386 if stats[3]:
387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
388 return stats[3] > 0
388 return stats[3] > 0
389
389
390 # naming conflict in clone()
390 # naming conflict in clone()
391 _update = update
391 _update = update
392
392
393 def clean(repo, node, show_stats=True):
393 def clean(repo, node, show_stats=True):
394 """forcibly switch the working directory to node, clobbering changes"""
394 """forcibly switch the working directory to node, clobbering changes"""
395 stats = mergemod.update(repo, node, False, True, None)
395 stats = mergemod.update(repo, node, False, True, None)
396 if show_stats:
396 if show_stats:
397 _showstats(repo, stats)
397 _showstats(repo, stats)
398 return stats[3] > 0
398 return stats[3] > 0
399
399
400 def merge(repo, node, force=None, remind=True):
400 def merge(repo, node, force=None, remind=True):
401 """branch merge with node, resolving changes"""
401 """branch merge with node, resolving changes"""
402 stats = mergemod.update(repo, node, True, force, False)
402 stats = mergemod.update(repo, node, True, force, False)
403 _showstats(repo, stats)
403 _showstats(repo, stats)
404 if stats[3]:
404 if stats[3]:
405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
406 "or 'hg update -C .' to abandon\n"))
406 "or 'hg update -C .' to abandon\n"))
407 elif remind:
407 elif remind:
408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
409 return stats[3] > 0
409 return stats[3] > 0
410
410
411 def _incoming(displaychlist, subreporecurse, ui, repo, source,
412 opts, buffered=False):
413 """
414 Helper for incoming / gincoming.
415 displaychlist gets called with
416 (remoterepo, incomingchangesetlist, displayer) parameters,
417 and is supposed to contain only code that can't be unified.
418 """
419 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
420 other = repository(remoteui(repo, opts), source)
421 ui.status(_('comparing with %s\n') % url.hidepassword(source))
422 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
423
424 if revs:
425 revs = [other.lookup(rev) for rev in revs]
426 bundlename = opts["bundle"]
427 force = opts["force"]
428 tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force)
429 common, incoming, rheads = tmp
430 if not incoming:
431 try:
432 os.unlink(bundlename)
433 except:
434 pass
435 ui.status(_("no changes found\n"))
436 return subreporecurse()
437
438 bundle = None
439 if bundlename or not other.local():
440 # create a bundle (uncompressed if other repo is not local)
441
442 if revs is None and other.capable('changegroupsubset'):
443 revs = rheads
444
445 if revs is None:
446 cg = other.changegroup(incoming, "incoming")
447 else:
448 cg = other.changegroupsubset(incoming, revs, 'incoming')
449 bundletype = other.local() and "HG10BZ" or "HG10UN"
450 fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
451 # keep written bundle?
452 if bundlename:
453 bundle = None
454 if not other.local():
455 # use the created uncompressed bundlerepo
456 other = bundlerepo.bundlerepository(ui, repo.root, fname)
457
458 try:
459 chlist = other.changelog.nodesbetween(incoming, revs)[0]
460 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
461
462 # XXX once graphlog extension makes it into core,
463 # should be replaced by a if graph/else
464 displaychlist(other, chlist, displayer)
465
466 displayer.close()
467 finally:
468 if hasattr(other, 'close'):
469 other.close()
470 if bundle:
471 os.unlink(bundle)
472 subreporecurse()
473 return 0 # exit code is zero since we found incoming changes
474
411 def incoming(ui, repo, source, opts):
475 def incoming(ui, repo, source, opts):
412 def recurse():
476 def subreporecurse():
413 ret = 1
477 ret = 1
414 if opts.get('subrepos'):
478 if opts.get('subrepos'):
415 ctx = repo[None]
479 ctx = repo[None]
416 for subpath in sorted(ctx.substate):
480 for subpath in sorted(ctx.substate):
417 sub = ctx.sub(subpath)
481 sub = ctx.sub(subpath)
418 ret = min(ret, sub.incoming(ui, source, opts))
482 ret = min(ret, sub.incoming(ui, source, opts))
419 return ret
483 return ret
420
484
421 limit = cmdutil.loglimit(opts)
485 def display(other, chlist, displayer):
422 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
486 limit = cmdutil.loglimit(opts)
423 other = repository(remoteui(repo, opts), source)
424 ui.status(_('comparing with %s\n') % url.hidepassword(source))
425 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
426 if revs:
427 revs = [other.lookup(rev) for rev in revs]
428
429 tmp = discovery.findcommonincoming(repo, other, heads=revs,
430 force=opts.get('force'))
431 common, incoming, rheads = tmp
432 if not incoming:
433 try:
434 os.unlink(opts["bundle"])
435 except:
436 pass
437 ui.status(_("no changes found\n"))
438 return recurse()
439
440 cleanup = None
441 try:
442 fname = opts["bundle"]
443 if fname or not other.local():
444 # create a bundle (uncompressed if other repo is not local)
445
446 if revs is None and other.capable('changegroupsubset'):
447 revs = rheads
448
449 if revs is None:
450 cg = other.changegroup(incoming, "incoming")
451 else:
452 cg = other.changegroupsubset(incoming, revs, 'incoming')
453 bundletype = other.local() and "HG10BZ" or "HG10UN"
454 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
455 # keep written bundle?
456 if opts["bundle"]:
457 cleanup = None
458 if not other.local():
459 # use the created uncompressed bundlerepo
460 other = bundlerepo.bundlerepository(ui, repo.root, fname)
461
462 chlist = other.changelog.nodesbetween(incoming, revs)[0]
463 if opts.get('newest_first'):
487 if opts.get('newest_first'):
464 chlist.reverse()
488 chlist.reverse()
465 displayer = cmdutil.show_changeset(ui, other, opts)
466 count = 0
489 count = 0
467 for n in chlist:
490 for n in chlist:
468 if limit is not None and count >= limit:
491 if limit is not None and count >= limit:
469 break
492 break
470 parents = [p for p in other.changelog.parents(n) if p != nullid]
493 parents = [p for p in other.changelog.parents(n) if p != nullid]
471 if opts.get('no_merges') and len(parents) == 2:
494 if opts.get('no_merges') and len(parents) == 2:
472 continue
495 continue
473 count += 1
496 count += 1
474 displayer.show(other[n])
497 displayer.show(other[n])
475 displayer.close()
498 return _incoming(display, subreporecurse, ui, repo, source, opts)
476 finally:
477 if hasattr(other, 'close'):
478 other.close()
479 if cleanup:
480 os.unlink(cleanup)
481 recurse()
482 return 0 # exit code is zero since we found incoming changes
483
499
484 def outgoing(ui, repo, dest, opts):
500 def outgoing(ui, repo, dest, opts):
485 def recurse():
501 def recurse():
486 ret = 1
502 ret = 1
487 if opts.get('subrepos'):
503 if opts.get('subrepos'):
488 ctx = repo[None]
504 ctx = repo[None]
489 for subpath in sorted(ctx.substate):
505 for subpath in sorted(ctx.substate):
490 sub = ctx.sub(subpath)
506 sub = ctx.sub(subpath)
491 ret = min(ret, sub.outgoing(ui, dest, opts))
507 ret = min(ret, sub.outgoing(ui, dest, opts))
492 return ret
508 return ret
493
509
494 limit = cmdutil.loglimit(opts)
510 limit = cmdutil.loglimit(opts)
495 dest = ui.expandpath(dest or 'default-push', dest or 'default')
511 dest = ui.expandpath(dest or 'default-push', dest or 'default')
496 dest, branches = parseurl(dest, opts.get('branch'))
512 dest, branches = parseurl(dest, opts.get('branch'))
497 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
513 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
498 if revs:
514 if revs:
499 revs = [repo.lookup(rev) for rev in revs]
515 revs = [repo.lookup(rev) for rev in revs]
500
516
501 other = repository(remoteui(repo, opts), dest)
517 other = repository(remoteui(repo, opts), dest)
502 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
518 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
503 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
519 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
504 if not o:
520 if not o:
505 ui.status(_("no changes found\n"))
521 ui.status(_("no changes found\n"))
506 return recurse()
522 return recurse()
507
523
508 o = repo.changelog.nodesbetween(o, revs)[0]
524 o = repo.changelog.nodesbetween(o, revs)[0]
509 if opts.get('newest_first'):
525 if opts.get('newest_first'):
510 o.reverse()
526 o.reverse()
511 displayer = cmdutil.show_changeset(ui, repo, opts)
527 displayer = cmdutil.show_changeset(ui, repo, opts)
512 count = 0
528 count = 0
513 for n in o:
529 for n in o:
514 if limit is not None and count >= limit:
530 if limit is not None and count >= limit:
515 break
531 break
516 parents = [p for p in repo.changelog.parents(n) if p != nullid]
532 parents = [p for p in repo.changelog.parents(n) if p != nullid]
517 if opts.get('no_merges') and len(parents) == 2:
533 if opts.get('no_merges') and len(parents) == 2:
518 continue
534 continue
519 count += 1
535 count += 1
520 displayer.show(repo[n])
536 displayer.show(repo[n])
521 displayer.close()
537 displayer.close()
522 recurse()
538 recurse()
523 return 0 # exit code is zero since we found outgoing changes
539 return 0 # exit code is zero since we found outgoing changes
524
540
525 def revert(repo, node, choose):
541 def revert(repo, node, choose):
526 """revert changes to revision in node without updating dirstate"""
542 """revert changes to revision in node without updating dirstate"""
527 return mergemod.update(repo, node, False, True, choose)[3] > 0
543 return mergemod.update(repo, node, False, True, choose)[3] > 0
528
544
529 def verify(repo):
545 def verify(repo):
530 """verify the consistency of a repository"""
546 """verify the consistency of a repository"""
531 return verifymod.verify(repo)
547 return verifymod.verify(repo)
532
548
533 def remoteui(src, opts):
549 def remoteui(src, opts):
534 'build a remote ui from ui or repo and opts'
550 'build a remote ui from ui or repo and opts'
535 if hasattr(src, 'baseui'): # looks like a repository
551 if hasattr(src, 'baseui'): # looks like a repository
536 dst = src.baseui.copy() # drop repo-specific config
552 dst = src.baseui.copy() # drop repo-specific config
537 src = src.ui # copy target options from repo
553 src = src.ui # copy target options from repo
538 else: # assume it's a global ui object
554 else: # assume it's a global ui object
539 dst = src.copy() # keep all global options
555 dst = src.copy() # keep all global options
540
556
541 # copy ssh-specific options
557 # copy ssh-specific options
542 for o in 'ssh', 'remotecmd':
558 for o in 'ssh', 'remotecmd':
543 v = opts.get(o) or src.config('ui', o)
559 v = opts.get(o) or src.config('ui', o)
544 if v:
560 if v:
545 dst.setconfig("ui", o, v)
561 dst.setconfig("ui", o, v)
546
562
547 # copy bundle-specific options
563 # copy bundle-specific options
548 r = src.config('bundle', 'mainreporoot')
564 r = src.config('bundle', 'mainreporoot')
549 if r:
565 if r:
550 dst.setconfig('bundle', 'mainreporoot', r)
566 dst.setconfig('bundle', 'mainreporoot', r)
551
567
552 # copy auth and http_proxy section settings
568 # copy auth and http_proxy section settings
553 for sect in ('auth', 'http_proxy'):
569 for sect in ('auth', 'http_proxy'):
554 for key, val in src.configitems(sect):
570 for key, val in src.configitems(sect):
555 dst.setconfig(sect, key, val)
571 dst.setconfig(sect, key, val)
556
572
557 return dst
573 return dst
General Comments 0
You need to be logged in to leave comments. Login now