##// END OF EJS Templates
outgoing: unify common graphlog.outgoing and hg.outgoing code
Nicolas Dumazet -
r12735:8888e56a default
parent child Browse files
Show More
@@ -1,346 +1,337
1 # ASCII graph log extension for Mercurial
1 # ASCII graph log extension for Mercurial
2 #
2 #
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to view revision graphs from a shell
8 '''command to view revision graphs from a shell
9
9
10 This extension adds a --graph option to the incoming, outgoing and log
10 This extension adds a --graph option to the incoming, outgoing and log
11 commands. When this options is given, an ASCII representation of the
11 commands. When this options is given, an ASCII representation of the
12 revision graph is also shown.
12 revision graph is also shown.
13 '''
13 '''
14
14
15 import os
15 import os
16 from mercurial.cmdutil import revrange, show_changeset
16 from mercurial.cmdutil import revrange, show_changeset
17 from mercurial.commands import templateopts
17 from mercurial.commands import templateopts
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.node import nullrev
19 from mercurial.node import nullrev
20 from mercurial import cmdutil, commands, extensions
20 from mercurial import cmdutil, commands, extensions
21 from mercurial import hg, url, util, graphmod, discovery
21 from mercurial import hg, util, graphmod
22
22
23 ASCIIDATA = 'ASC'
23 ASCIIDATA = 'ASC'
24
24
25 def asciiedges(seen, rev, parents):
25 def asciiedges(seen, rev, parents):
26 """adds edge info to changelog DAG walk suitable for ascii()"""
26 """adds edge info to changelog DAG walk suitable for ascii()"""
27 if rev not in seen:
27 if rev not in seen:
28 seen.append(rev)
28 seen.append(rev)
29 nodeidx = seen.index(rev)
29 nodeidx = seen.index(rev)
30
30
31 knownparents = []
31 knownparents = []
32 newparents = []
32 newparents = []
33 for parent in parents:
33 for parent in parents:
34 if parent in seen:
34 if parent in seen:
35 knownparents.append(parent)
35 knownparents.append(parent)
36 else:
36 else:
37 newparents.append(parent)
37 newparents.append(parent)
38
38
39 ncols = len(seen)
39 ncols = len(seen)
40 seen[nodeidx:nodeidx + 1] = newparents
40 seen[nodeidx:nodeidx + 1] = newparents
41 edges = [(nodeidx, seen.index(p)) for p in knownparents]
41 edges = [(nodeidx, seen.index(p)) for p in knownparents]
42
42
43 if len(newparents) > 0:
43 if len(newparents) > 0:
44 edges.append((nodeidx, nodeidx))
44 edges.append((nodeidx, nodeidx))
45 if len(newparents) > 1:
45 if len(newparents) > 1:
46 edges.append((nodeidx, nodeidx + 1))
46 edges.append((nodeidx, nodeidx + 1))
47
47
48 nmorecols = len(seen) - ncols
48 nmorecols = len(seen) - ncols
49 return nodeidx, edges, ncols, nmorecols
49 return nodeidx, edges, ncols, nmorecols
50
50
51 def fix_long_right_edges(edges):
51 def fix_long_right_edges(edges):
52 for (i, (start, end)) in enumerate(edges):
52 for (i, (start, end)) in enumerate(edges):
53 if end > start:
53 if end > start:
54 edges[i] = (start, end + 1)
54 edges[i] = (start, end + 1)
55
55
56 def get_nodeline_edges_tail(
56 def get_nodeline_edges_tail(
57 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
57 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
58 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
58 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
59 # Still going in the same non-vertical direction.
59 # Still going in the same non-vertical direction.
60 if n_columns_diff == -1:
60 if n_columns_diff == -1:
61 start = max(node_index + 1, p_node_index)
61 start = max(node_index + 1, p_node_index)
62 tail = ["|", " "] * (start - node_index - 1)
62 tail = ["|", " "] * (start - node_index - 1)
63 tail.extend(["/", " "] * (n_columns - start))
63 tail.extend(["/", " "] * (n_columns - start))
64 return tail
64 return tail
65 else:
65 else:
66 return ["\\", " "] * (n_columns - node_index - 1)
66 return ["\\", " "] * (n_columns - node_index - 1)
67 else:
67 else:
68 return ["|", " "] * (n_columns - node_index - 1)
68 return ["|", " "] * (n_columns - node_index - 1)
69
69
70 def draw_edges(edges, nodeline, interline):
70 def draw_edges(edges, nodeline, interline):
71 for (start, end) in edges:
71 for (start, end) in edges:
72 if start == end + 1:
72 if start == end + 1:
73 interline[2 * end + 1] = "/"
73 interline[2 * end + 1] = "/"
74 elif start == end - 1:
74 elif start == end - 1:
75 interline[2 * start + 1] = "\\"
75 interline[2 * start + 1] = "\\"
76 elif start == end:
76 elif start == end:
77 interline[2 * start] = "|"
77 interline[2 * start] = "|"
78 else:
78 else:
79 nodeline[2 * end] = "+"
79 nodeline[2 * end] = "+"
80 if start > end:
80 if start > end:
81 (start, end) = (end, start)
81 (start, end) = (end, start)
82 for i in range(2 * start + 1, 2 * end):
82 for i in range(2 * start + 1, 2 * end):
83 if nodeline[i] != "+":
83 if nodeline[i] != "+":
84 nodeline[i] = "-"
84 nodeline[i] = "-"
85
85
86 def get_padding_line(ni, n_columns, edges):
86 def get_padding_line(ni, n_columns, edges):
87 line = []
87 line = []
88 line.extend(["|", " "] * ni)
88 line.extend(["|", " "] * ni)
89 if (ni, ni - 1) in edges or (ni, ni) in edges:
89 if (ni, ni - 1) in edges or (ni, ni) in edges:
90 # (ni, ni - 1) (ni, ni)
90 # (ni, ni - 1) (ni, ni)
91 # | | | | | | | |
91 # | | | | | | | |
92 # +---o | | o---+
92 # +---o | | o---+
93 # | | c | | c | |
93 # | | c | | c | |
94 # | |/ / | |/ /
94 # | |/ / | |/ /
95 # | | | | | |
95 # | | | | | |
96 c = "|"
96 c = "|"
97 else:
97 else:
98 c = " "
98 c = " "
99 line.extend([c, " "])
99 line.extend([c, " "])
100 line.extend(["|", " "] * (n_columns - ni - 1))
100 line.extend(["|", " "] * (n_columns - ni - 1))
101 return line
101 return line
102
102
103 def asciistate():
103 def asciistate():
104 """returns the initial value for the "state" argument to ascii()"""
104 """returns the initial value for the "state" argument to ascii()"""
105 return [0, 0]
105 return [0, 0]
106
106
107 def ascii(ui, state, type, char, text, coldata):
107 def ascii(ui, state, type, char, text, coldata):
108 """prints an ASCII graph of the DAG
108 """prints an ASCII graph of the DAG
109
109
110 takes the following arguments (one call per node in the graph):
110 takes the following arguments (one call per node in the graph):
111
111
112 - ui to write to
112 - ui to write to
113 - Somewhere to keep the needed state in (init to asciistate())
113 - Somewhere to keep the needed state in (init to asciistate())
114 - Column of the current node in the set of ongoing edges.
114 - Column of the current node in the set of ongoing edges.
115 - Type indicator of node data == ASCIIDATA.
115 - Type indicator of node data == ASCIIDATA.
116 - Payload: (char, lines):
116 - Payload: (char, lines):
117 - Character to use as node's symbol.
117 - Character to use as node's symbol.
118 - List of lines to display as the node's text.
118 - List of lines to display as the node's text.
119 - Edges; a list of (col, next_col) indicating the edges between
119 - Edges; a list of (col, next_col) indicating the edges between
120 the current node and its parents.
120 the current node and its parents.
121 - Number of columns (ongoing edges) in the current revision.
121 - Number of columns (ongoing edges) in the current revision.
122 - The difference between the number of columns (ongoing edges)
122 - The difference between the number of columns (ongoing edges)
123 in the next revision and the number of columns (ongoing edges)
123 in the next revision and the number of columns (ongoing edges)
124 in the current revision. That is: -1 means one column removed;
124 in the current revision. That is: -1 means one column removed;
125 0 means no columns added or removed; 1 means one column added.
125 0 means no columns added or removed; 1 means one column added.
126 """
126 """
127
127
128 idx, edges, ncols, coldiff = coldata
128 idx, edges, ncols, coldiff = coldata
129 assert -2 < coldiff < 2
129 assert -2 < coldiff < 2
130 if coldiff == -1:
130 if coldiff == -1:
131 # Transform
131 # Transform
132 #
132 #
133 # | | | | | |
133 # | | | | | |
134 # o | | into o---+
134 # o | | into o---+
135 # |X / |/ /
135 # |X / |/ /
136 # | | | |
136 # | | | |
137 fix_long_right_edges(edges)
137 fix_long_right_edges(edges)
138
138
139 # add_padding_line says whether to rewrite
139 # add_padding_line says whether to rewrite
140 #
140 #
141 # | | | | | | | |
141 # | | | | | | | |
142 # | o---+ into | o---+
142 # | o---+ into | o---+
143 # | / / | | | # <--- padding line
143 # | / / | | | # <--- padding line
144 # o | | | / /
144 # o | | | / /
145 # o | |
145 # o | |
146 add_padding_line = (len(text) > 2 and coldiff == -1 and
146 add_padding_line = (len(text) > 2 and coldiff == -1 and
147 [x for (x, y) in edges if x + 1 < y])
147 [x for (x, y) in edges if x + 1 < y])
148
148
149 # fix_nodeline_tail says whether to rewrite
149 # fix_nodeline_tail says whether to rewrite
150 #
150 #
151 # | | o | | | | o | |
151 # | | o | | | | o | |
152 # | | |/ / | | |/ /
152 # | | |/ / | | |/ /
153 # | o | | into | o / / # <--- fixed nodeline tail
153 # | o | | into | o / / # <--- fixed nodeline tail
154 # | |/ / | |/ /
154 # | |/ / | |/ /
155 # o | | o | |
155 # o | | o | |
156 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
156 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
157
157
158 # nodeline is the line containing the node character (typically o)
158 # nodeline is the line containing the node character (typically o)
159 nodeline = ["|", " "] * idx
159 nodeline = ["|", " "] * idx
160 nodeline.extend([char, " "])
160 nodeline.extend([char, " "])
161
161
162 nodeline.extend(
162 nodeline.extend(
163 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
163 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
164 state[0], fix_nodeline_tail))
164 state[0], fix_nodeline_tail))
165
165
166 # shift_interline is the line containing the non-vertical
166 # shift_interline is the line containing the non-vertical
167 # edges between this entry and the next
167 # edges between this entry and the next
168 shift_interline = ["|", " "] * idx
168 shift_interline = ["|", " "] * idx
169 if coldiff == -1:
169 if coldiff == -1:
170 n_spaces = 1
170 n_spaces = 1
171 edge_ch = "/"
171 edge_ch = "/"
172 elif coldiff == 0:
172 elif coldiff == 0:
173 n_spaces = 2
173 n_spaces = 2
174 edge_ch = "|"
174 edge_ch = "|"
175 else:
175 else:
176 n_spaces = 3
176 n_spaces = 3
177 edge_ch = "\\"
177 edge_ch = "\\"
178 shift_interline.extend(n_spaces * [" "])
178 shift_interline.extend(n_spaces * [" "])
179 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
179 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
180
180
181 # draw edges from the current node to its parents
181 # draw edges from the current node to its parents
182 draw_edges(edges, nodeline, shift_interline)
182 draw_edges(edges, nodeline, shift_interline)
183
183
184 # lines is the list of all graph lines to print
184 # lines is the list of all graph lines to print
185 lines = [nodeline]
185 lines = [nodeline]
186 if add_padding_line:
186 if add_padding_line:
187 lines.append(get_padding_line(idx, ncols, edges))
187 lines.append(get_padding_line(idx, ncols, edges))
188 lines.append(shift_interline)
188 lines.append(shift_interline)
189
189
190 # make sure that there are as many graph lines as there are
190 # make sure that there are as many graph lines as there are
191 # log strings
191 # log strings
192 while len(text) < len(lines):
192 while len(text) < len(lines):
193 text.append("")
193 text.append("")
194 if len(lines) < len(text):
194 if len(lines) < len(text):
195 extra_interline = ["|", " "] * (ncols + coldiff)
195 extra_interline = ["|", " "] * (ncols + coldiff)
196 while len(lines) < len(text):
196 while len(lines) < len(text):
197 lines.append(extra_interline)
197 lines.append(extra_interline)
198
198
199 # print lines
199 # print lines
200 indentation_level = max(ncols, ncols + coldiff)
200 indentation_level = max(ncols, ncols + coldiff)
201 for (line, logstr) in zip(lines, text):
201 for (line, logstr) in zip(lines, text):
202 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
202 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
203 ui.write(ln.rstrip() + '\n')
203 ui.write(ln.rstrip() + '\n')
204
204
205 # ... and start over
205 # ... and start over
206 state[0] = coldiff
206 state[0] = coldiff
207 state[1] = idx
207 state[1] = idx
208
208
209 def get_revs(repo, rev_opt):
209 def get_revs(repo, rev_opt):
210 if rev_opt:
210 if rev_opt:
211 revs = revrange(repo, rev_opt)
211 revs = revrange(repo, rev_opt)
212 if len(revs) == 0:
212 if len(revs) == 0:
213 return (nullrev, nullrev)
213 return (nullrev, nullrev)
214 return (max(revs), min(revs))
214 return (max(revs), min(revs))
215 else:
215 else:
216 return (len(repo) - 1, 0)
216 return (len(repo) - 1, 0)
217
217
218 def check_unsupported_flags(opts):
218 def check_unsupported_flags(opts):
219 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
219 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
220 "only_merges", "user", "branch", "only_branch", "prune",
220 "only_merges", "user", "branch", "only_branch", "prune",
221 "newest_first", "no_merges", "include", "exclude"]:
221 "newest_first", "no_merges", "include", "exclude"]:
222 if op in opts and opts[op]:
222 if op in opts and opts[op]:
223 raise util.Abort(_("--graph option is incompatible with --%s")
223 raise util.Abort(_("--graph option is incompatible with --%s")
224 % op.replace("_", "-"))
224 % op.replace("_", "-"))
225
225
226 def generate(ui, dag, displayer, showparents, edgefn):
226 def generate(ui, dag, displayer, showparents, edgefn):
227 seen, state = [], asciistate()
227 seen, state = [], asciistate()
228 for rev, type, ctx, parents in dag:
228 for rev, type, ctx, parents in dag:
229 char = ctx.node() in showparents and '@' or 'o'
229 char = ctx.node() in showparents and '@' or 'o'
230 displayer.show(ctx)
230 displayer.show(ctx)
231 lines = displayer.hunk.pop(rev).split('\n')[:-1]
231 lines = displayer.hunk.pop(rev).split('\n')[:-1]
232 displayer.flush(rev)
232 displayer.flush(rev)
233 ascii(ui, state, type, char, lines, edgefn(seen, rev, parents))
233 ascii(ui, state, type, char, lines, edgefn(seen, rev, parents))
234 displayer.close()
234 displayer.close()
235
235
236 def graphlog(ui, repo, path=None, **opts):
236 def graphlog(ui, repo, path=None, **opts):
237 """show revision history alongside an ASCII revision graph
237 """show revision history alongside an ASCII revision graph
238
238
239 Print a revision history alongside a revision graph drawn with
239 Print a revision history alongside a revision graph drawn with
240 ASCII characters.
240 ASCII characters.
241
241
242 Nodes printed as an @ character are parents of the working
242 Nodes printed as an @ character are parents of the working
243 directory.
243 directory.
244 """
244 """
245
245
246 check_unsupported_flags(opts)
246 check_unsupported_flags(opts)
247 limit = cmdutil.loglimit(opts)
247 limit = cmdutil.loglimit(opts)
248 start, stop = get_revs(repo, opts["rev"])
248 start, stop = get_revs(repo, opts["rev"])
249 if start == nullrev:
249 if start == nullrev:
250 return
250 return
251
251
252 if path:
252 if path:
253 path = util.canonpath(repo.root, os.getcwd(), path)
253 path = util.canonpath(repo.root, os.getcwd(), path)
254 if path: # could be reset in canonpath
254 if path: # could be reset in canonpath
255 revdag = graphmod.filerevs(repo, path, start, stop, limit)
255 revdag = graphmod.filerevs(repo, path, start, stop, limit)
256 else:
256 else:
257 if limit is not None:
257 if limit is not None:
258 stop = max(stop, start - limit + 1)
258 stop = max(stop, start - limit + 1)
259 revdag = graphmod.revisions(repo, start, stop)
259 revdag = graphmod.revisions(repo, start, stop)
260
260
261 displayer = show_changeset(ui, repo, opts, buffered=True)
261 displayer = show_changeset(ui, repo, opts, buffered=True)
262 showparents = [ctx.node() for ctx in repo[None].parents()]
262 showparents = [ctx.node() for ctx in repo[None].parents()]
263 generate(ui, revdag, displayer, showparents, asciiedges)
263 generate(ui, revdag, displayer, showparents, asciiedges)
264
264
265 def graphrevs(repo, nodes, opts):
265 def graphrevs(repo, nodes, opts):
266 limit = cmdutil.loglimit(opts)
266 limit = cmdutil.loglimit(opts)
267 nodes.reverse()
267 nodes.reverse()
268 if limit is not None:
268 if limit is not None:
269 nodes = nodes[:limit]
269 nodes = nodes[:limit]
270 return graphmod.nodes(repo, nodes)
270 return graphmod.nodes(repo, nodes)
271
271
272 def goutgoing(ui, repo, dest=None, **opts):
272 def goutgoing(ui, repo, dest=None, **opts):
273 """show the outgoing changesets alongside an ASCII revision graph
273 """show the outgoing changesets alongside an ASCII revision graph
274
274
275 Print the outgoing changesets alongside a revision graph drawn with
275 Print the outgoing changesets alongside a revision graph drawn with
276 ASCII characters.
276 ASCII characters.
277
277
278 Nodes printed as an @ character are parents of the working
278 Nodes printed as an @ character are parents of the working
279 directory.
279 directory.
280 """
280 """
281
281
282 check_unsupported_flags(opts)
282 check_unsupported_flags(opts)
283 dest = ui.expandpath(dest or 'default-push', dest or 'default')
283 o = hg._outgoing(ui, repo, dest, opts)
284 dest, branches = hg.parseurl(dest, opts.get('branch'))
284 if o is None:
285 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
286 other = hg.repository(hg.remoteui(ui, opts), dest)
287 if revs:
288 revs = [repo.lookup(rev) for rev in revs]
289 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
290 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
291 if not o:
292 ui.status(_("no changes found\n"))
293 return
285 return
294
286
295 o = repo.changelog.nodesbetween(o, revs)[0]
296 revdag = graphrevs(repo, o, opts)
287 revdag = graphrevs(repo, o, opts)
297 displayer = show_changeset(ui, repo, opts, buffered=True)
288 displayer = show_changeset(ui, repo, opts, buffered=True)
298 showparents = [ctx.node() for ctx in repo[None].parents()]
289 showparents = [ctx.node() for ctx in repo[None].parents()]
299 generate(ui, revdag, displayer, showparents, asciiedges)
290 generate(ui, revdag, displayer, showparents, asciiedges)
300
291
301 def gincoming(ui, repo, source="default", **opts):
292 def gincoming(ui, repo, source="default", **opts):
302 """show the incoming changesets alongside an ASCII revision graph
293 """show the incoming changesets alongside an ASCII revision graph
303
294
304 Print the incoming changesets alongside a revision graph drawn with
295 Print the incoming changesets alongside a revision graph drawn with
305 ASCII characters.
296 ASCII characters.
306
297
307 Nodes printed as an @ character are parents of the working
298 Nodes printed as an @ character are parents of the working
308 directory.
299 directory.
309 """
300 """
310 def subreporecurse():
301 def subreporecurse():
311 return 1
302 return 1
312
303
313 check_unsupported_flags(opts)
304 check_unsupported_flags(opts)
314 def display(other, chlist, displayer):
305 def display(other, chlist, displayer):
315 revdag = graphrevs(other, chlist, opts)
306 revdag = graphrevs(other, chlist, opts)
316 showparents = [ctx.node() for ctx in repo[None].parents()]
307 showparents = [ctx.node() for ctx in repo[None].parents()]
317 generate(ui, revdag, displayer, showparents, asciiedges)
308 generate(ui, revdag, displayer, showparents, asciiedges)
318
309
319 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
310 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
320
311
321 def uisetup(ui):
312 def uisetup(ui):
322 '''Initialize the extension.'''
313 '''Initialize the extension.'''
323 _wrapcmd(ui, 'log', commands.table, graphlog)
314 _wrapcmd(ui, 'log', commands.table, graphlog)
324 _wrapcmd(ui, 'incoming', commands.table, gincoming)
315 _wrapcmd(ui, 'incoming', commands.table, gincoming)
325 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
316 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
326
317
327 def _wrapcmd(ui, cmd, table, wrapfn):
318 def _wrapcmd(ui, cmd, table, wrapfn):
328 '''wrap the command'''
319 '''wrap the command'''
329 def graph(orig, *args, **kwargs):
320 def graph(orig, *args, **kwargs):
330 if kwargs['graph']:
321 if kwargs['graph']:
331 return wrapfn(*args, **kwargs)
322 return wrapfn(*args, **kwargs)
332 return orig(*args, **kwargs)
323 return orig(*args, **kwargs)
333 entry = extensions.wrapcommand(table, cmd, graph)
324 entry = extensions.wrapcommand(table, cmd, graph)
334 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
325 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
335
326
336 cmdtable = {
327 cmdtable = {
337 "glog":
328 "glog":
338 (graphlog,
329 (graphlog,
339 [('l', 'limit', '',
330 [('l', 'limit', '',
340 _('limit number of changes displayed'), _('NUM')),
331 _('limit number of changes displayed'), _('NUM')),
341 ('p', 'patch', False, _('show patch')),
332 ('p', 'patch', False, _('show patch')),
342 ('r', 'rev', [],
333 ('r', 'rev', [],
343 _('show the specified revision or range'), _('REV')),
334 _('show the specified revision or range'), _('REV')),
344 ] + templateopts,
335 ] + templateopts,
345 _('hg glog [OPTION]... [FILE]')),
336 _('hg glog [OPTION]... [FILE]')),
346 }
337 }
@@ -1,547 +1,553
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid, nullrev, short
11 from node import hex, nullid, nullrev, short
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import lock, util, extensions, error, encoding, node
13 import lock, util, extensions, error, encoding, node
14 import cmdutil, discovery, url
14 import cmdutil, discovery, url
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.drop_scheme('file', path))
20 path = util.expandpath(util.drop_scheme('file', path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, repo, branches, revs):
23 def addbranchrevs(lrepo, repo, branches, revs):
24 hashbranch, branches = branches
24 hashbranch, branches = branches
25 if not hashbranch and not branches:
25 if not hashbranch and not branches:
26 return revs or None, revs and revs[0] or None
26 return revs or None, revs and revs[0] or None
27 revs = revs and list(revs) or []
27 revs = revs and list(revs) or []
28 if not repo.capable('branchmap'):
28 if not repo.capable('branchmap'):
29 if branches:
29 if branches:
30 raise util.Abort(_("remote branch lookup not supported"))
30 raise util.Abort(_("remote branch lookup not supported"))
31 revs.append(hashbranch)
31 revs.append(hashbranch)
32 return revs, revs[0]
32 return revs, revs[0]
33 branchmap = repo.branchmap()
33 branchmap = repo.branchmap()
34
34
35 def primary(butf8):
35 def primary(butf8):
36 if butf8 == '.':
36 if butf8 == '.':
37 if not lrepo or not lrepo.local():
37 if not lrepo or not lrepo.local():
38 raise util.Abort(_("dirstate branch not accessible"))
38 raise util.Abort(_("dirstate branch not accessible"))
39 butf8 = lrepo.dirstate.branch()
39 butf8 = lrepo.dirstate.branch()
40 if butf8 in branchmap:
40 if butf8 in branchmap:
41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
42 return True
42 return True
43 else:
43 else:
44 return False
44 return False
45
45
46 for branch in branches:
46 for branch in branches:
47 butf8 = encoding.fromlocal(branch)
47 butf8 = encoding.fromlocal(branch)
48 if not primary(butf8):
48 if not primary(butf8):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 butf8 = encoding.fromlocal(hashbranch)
51 butf8 = encoding.fromlocal(hashbranch)
52 if not primary(butf8):
52 if not primary(butf8):
53 revs.append(hashbranch)
53 revs.append(hashbranch)
54 return revs, revs[0]
54 return revs, revs[0]
55
55
56 def parseurl(url, branches=None):
56 def parseurl(url, branches=None):
57 '''parse url#branch, returning (url, (branch, branches))'''
57 '''parse url#branch, returning (url, (branch, branches))'''
58
58
59 if '#' not in url:
59 if '#' not in url:
60 return url, (None, branches or [])
60 return url, (None, branches or [])
61 url, branch = url.split('#', 1)
61 url, branch = url.split('#', 1)
62 return url, (branch, branches or [])
62 return url, (branch, branches or [])
63
63
64 schemes = {
64 schemes = {
65 'bundle': bundlerepo,
65 'bundle': bundlerepo,
66 'file': _local,
66 'file': _local,
67 'http': httprepo,
67 'http': httprepo,
68 'https': httprepo,
68 'https': httprepo,
69 'ssh': sshrepo,
69 'ssh': sshrepo,
70 'static-http': statichttprepo,
70 'static-http': statichttprepo,
71 }
71 }
72
72
73 def _lookup(path):
73 def _lookup(path):
74 scheme = 'file'
74 scheme = 'file'
75 if path:
75 if path:
76 c = path.find(':')
76 c = path.find(':')
77 if c > 0:
77 if c > 0:
78 scheme = path[:c]
78 scheme = path[:c]
79 thing = schemes.get(scheme) or schemes['file']
79 thing = schemes.get(scheme) or schemes['file']
80 try:
80 try:
81 return thing(path)
81 return thing(path)
82 except TypeError:
82 except TypeError:
83 return thing
83 return thing
84
84
85 def islocal(repo):
85 def islocal(repo):
86 '''return true if repo or path is local'''
86 '''return true if repo or path is local'''
87 if isinstance(repo, str):
87 if isinstance(repo, str):
88 try:
88 try:
89 return _lookup(repo).islocal(repo)
89 return _lookup(repo).islocal(repo)
90 except AttributeError:
90 except AttributeError:
91 return False
91 return False
92 return repo.local()
92 return repo.local()
93
93
94 def repository(ui, path='', create=False):
94 def repository(ui, path='', create=False):
95 """return a repository object for the specified path"""
95 """return a repository object for the specified path"""
96 repo = _lookup(path).instance(ui, path, create)
96 repo = _lookup(path).instance(ui, path, create)
97 ui = getattr(repo, "ui", ui)
97 ui = getattr(repo, "ui", ui)
98 for name, module in extensions.extensions():
98 for name, module in extensions.extensions():
99 hook = getattr(module, 'reposetup', None)
99 hook = getattr(module, 'reposetup', None)
100 if hook:
100 if hook:
101 hook(ui, repo)
101 hook(ui, repo)
102 return repo
102 return repo
103
103
104 def defaultdest(source):
104 def defaultdest(source):
105 '''return default destination of clone if none is given'''
105 '''return default destination of clone if none is given'''
106 return os.path.basename(os.path.normpath(source))
106 return os.path.basename(os.path.normpath(source))
107
107
108 def localpath(path):
108 def localpath(path):
109 if path.startswith('file://localhost/'):
109 if path.startswith('file://localhost/'):
110 return path[16:]
110 return path[16:]
111 if path.startswith('file://'):
111 if path.startswith('file://'):
112 return path[7:]
112 return path[7:]
113 if path.startswith('file:'):
113 if path.startswith('file:'):
114 return path[5:]
114 return path[5:]
115 return path
115 return path
116
116
117 def share(ui, source, dest=None, update=True):
117 def share(ui, source, dest=None, update=True):
118 '''create a shared repository'''
118 '''create a shared repository'''
119
119
120 if not islocal(source):
120 if not islocal(source):
121 raise util.Abort(_('can only share local repositories'))
121 raise util.Abort(_('can only share local repositories'))
122
122
123 if not dest:
123 if not dest:
124 dest = defaultdest(source)
124 dest = defaultdest(source)
125 else:
125 else:
126 dest = ui.expandpath(dest)
126 dest = ui.expandpath(dest)
127
127
128 if isinstance(source, str):
128 if isinstance(source, str):
129 origsource = ui.expandpath(source)
129 origsource = ui.expandpath(source)
130 source, branches = parseurl(origsource)
130 source, branches = parseurl(origsource)
131 srcrepo = repository(ui, source)
131 srcrepo = repository(ui, source)
132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
133 else:
133 else:
134 srcrepo = source
134 srcrepo = source
135 origsource = source = srcrepo.url()
135 origsource = source = srcrepo.url()
136 checkout = None
136 checkout = None
137
137
138 sharedpath = srcrepo.sharedpath # if our source is already sharing
138 sharedpath = srcrepo.sharedpath # if our source is already sharing
139
139
140 root = os.path.realpath(dest)
140 root = os.path.realpath(dest)
141 roothg = os.path.join(root, '.hg')
141 roothg = os.path.join(root, '.hg')
142
142
143 if os.path.exists(roothg):
143 if os.path.exists(roothg):
144 raise util.Abort(_('destination already exists'))
144 raise util.Abort(_('destination already exists'))
145
145
146 if not os.path.isdir(root):
146 if not os.path.isdir(root):
147 os.mkdir(root)
147 os.mkdir(root)
148 os.mkdir(roothg)
148 os.mkdir(roothg)
149
149
150 requirements = ''
150 requirements = ''
151 try:
151 try:
152 requirements = srcrepo.opener('requires').read()
152 requirements = srcrepo.opener('requires').read()
153 except IOError, inst:
153 except IOError, inst:
154 if inst.errno != errno.ENOENT:
154 if inst.errno != errno.ENOENT:
155 raise
155 raise
156
156
157 requirements += 'shared\n'
157 requirements += 'shared\n'
158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
160
160
161 default = srcrepo.ui.config('paths', 'default')
161 default = srcrepo.ui.config('paths', 'default')
162 if default:
162 if default:
163 f = file(os.path.join(roothg, 'hgrc'), 'w')
163 f = file(os.path.join(roothg, 'hgrc'), 'w')
164 f.write('[paths]\ndefault = %s\n' % default)
164 f.write('[paths]\ndefault = %s\n' % default)
165 f.close()
165 f.close()
166
166
167 r = repository(ui, root)
167 r = repository(ui, root)
168
168
169 if update:
169 if update:
170 r.ui.status(_("updating working directory\n"))
170 r.ui.status(_("updating working directory\n"))
171 if update is not True:
171 if update is not True:
172 checkout = update
172 checkout = update
173 for test in (checkout, 'default', 'tip'):
173 for test in (checkout, 'default', 'tip'):
174 if test is None:
174 if test is None:
175 continue
175 continue
176 try:
176 try:
177 uprev = r.lookup(test)
177 uprev = r.lookup(test)
178 break
178 break
179 except error.RepoLookupError:
179 except error.RepoLookupError:
180 continue
180 continue
181 _update(r, uprev)
181 _update(r, uprev)
182
182
183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
184 stream=False, branch=None):
184 stream=False, branch=None):
185 """Make a copy of an existing repository.
185 """Make a copy of an existing repository.
186
186
187 Create a copy of an existing repository in a new directory. The
187 Create a copy of an existing repository in a new directory. The
188 source and destination are URLs, as passed to the repository
188 source and destination are URLs, as passed to the repository
189 function. Returns a pair of repository objects, the source and
189 function. Returns a pair of repository objects, the source and
190 newly created destination.
190 newly created destination.
191
191
192 The location of the source is added to the new repository's
192 The location of the source is added to the new repository's
193 .hg/hgrc file, as the default to be used for future pulls and
193 .hg/hgrc file, as the default to be used for future pulls and
194 pushes.
194 pushes.
195
195
196 If an exception is raised, the partly cloned/updated destination
196 If an exception is raised, the partly cloned/updated destination
197 repository will be deleted.
197 repository will be deleted.
198
198
199 Arguments:
199 Arguments:
200
200
201 source: repository object or URL
201 source: repository object or URL
202
202
203 dest: URL of destination repository to create (defaults to base
203 dest: URL of destination repository to create (defaults to base
204 name of source repository)
204 name of source repository)
205
205
206 pull: always pull from source repository, even in local case
206 pull: always pull from source repository, even in local case
207
207
208 stream: stream raw data uncompressed from repository (fast over
208 stream: stream raw data uncompressed from repository (fast over
209 LAN, slow over WAN)
209 LAN, slow over WAN)
210
210
211 rev: revision to clone up to (implies pull=True)
211 rev: revision to clone up to (implies pull=True)
212
212
213 update: update working directory after clone completes, if
213 update: update working directory after clone completes, if
214 destination is local repository (True means update to default rev,
214 destination is local repository (True means update to default rev,
215 anything else is treated as a revision)
215 anything else is treated as a revision)
216
216
217 branch: branches to clone
217 branch: branches to clone
218 """
218 """
219
219
220 if isinstance(source, str):
220 if isinstance(source, str):
221 origsource = ui.expandpath(source)
221 origsource = ui.expandpath(source)
222 source, branch = parseurl(origsource, branch)
222 source, branch = parseurl(origsource, branch)
223 src_repo = repository(ui, source)
223 src_repo = repository(ui, source)
224 else:
224 else:
225 src_repo = source
225 src_repo = source
226 branch = (None, branch or [])
226 branch = (None, branch or [])
227 origsource = source = src_repo.url()
227 origsource = source = src_repo.url()
228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
229
229
230 if dest is None:
230 if dest is None:
231 dest = defaultdest(source)
231 dest = defaultdest(source)
232 ui.status(_("destination directory: %s\n") % dest)
232 ui.status(_("destination directory: %s\n") % dest)
233 else:
233 else:
234 dest = ui.expandpath(dest)
234 dest = ui.expandpath(dest)
235
235
236 dest = localpath(dest)
236 dest = localpath(dest)
237 source = localpath(source)
237 source = localpath(source)
238
238
239 if os.path.exists(dest):
239 if os.path.exists(dest):
240 if not os.path.isdir(dest):
240 if not os.path.isdir(dest):
241 raise util.Abort(_("destination '%s' already exists") % dest)
241 raise util.Abort(_("destination '%s' already exists") % dest)
242 elif os.listdir(dest):
242 elif os.listdir(dest):
243 raise util.Abort(_("destination '%s' is not empty") % dest)
243 raise util.Abort(_("destination '%s' is not empty") % dest)
244
244
245 class DirCleanup(object):
245 class DirCleanup(object):
246 def __init__(self, dir_):
246 def __init__(self, dir_):
247 self.rmtree = shutil.rmtree
247 self.rmtree = shutil.rmtree
248 self.dir_ = dir_
248 self.dir_ = dir_
249 def close(self):
249 def close(self):
250 self.dir_ = None
250 self.dir_ = None
251 def cleanup(self):
251 def cleanup(self):
252 if self.dir_:
252 if self.dir_:
253 self.rmtree(self.dir_, True)
253 self.rmtree(self.dir_, True)
254
254
255 src_lock = dest_lock = dir_cleanup = None
255 src_lock = dest_lock = dir_cleanup = None
256 try:
256 try:
257 if islocal(dest):
257 if islocal(dest):
258 dir_cleanup = DirCleanup(dest)
258 dir_cleanup = DirCleanup(dest)
259
259
260 abspath = origsource
260 abspath = origsource
261 copy = False
261 copy = False
262 if src_repo.cancopy() and islocal(dest):
262 if src_repo.cancopy() and islocal(dest):
263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
264 copy = not pull and not rev
264 copy = not pull and not rev
265
265
266 if copy:
266 if copy:
267 try:
267 try:
268 # we use a lock here because if we race with commit, we
268 # we use a lock here because if we race with commit, we
269 # can end up with extra data in the cloned revlogs that's
269 # can end up with extra data in the cloned revlogs that's
270 # not pointed to by changesets, thus causing verify to
270 # not pointed to by changesets, thus causing verify to
271 # fail
271 # fail
272 src_lock = src_repo.lock(wait=False)
272 src_lock = src_repo.lock(wait=False)
273 except error.LockError:
273 except error.LockError:
274 copy = False
274 copy = False
275
275
276 if copy:
276 if copy:
277 src_repo.hook('preoutgoing', throw=True, source='clone')
277 src_repo.hook('preoutgoing', throw=True, source='clone')
278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
279 if not os.path.exists(dest):
279 if not os.path.exists(dest):
280 os.mkdir(dest)
280 os.mkdir(dest)
281 else:
281 else:
282 # only clean up directories we create ourselves
282 # only clean up directories we create ourselves
283 dir_cleanup.dir_ = hgdir
283 dir_cleanup.dir_ = hgdir
284 try:
284 try:
285 dest_path = hgdir
285 dest_path = hgdir
286 os.mkdir(dest_path)
286 os.mkdir(dest_path)
287 except OSError, inst:
287 except OSError, inst:
288 if inst.errno == errno.EEXIST:
288 if inst.errno == errno.EEXIST:
289 dir_cleanup.close()
289 dir_cleanup.close()
290 raise util.Abort(_("destination '%s' already exists")
290 raise util.Abort(_("destination '%s' already exists")
291 % dest)
291 % dest)
292 raise
292 raise
293
293
294 hardlink = None
294 hardlink = None
295 num = 0
295 num = 0
296 for f in src_repo.store.copylist():
296 for f in src_repo.store.copylist():
297 src = os.path.join(src_repo.sharedpath, f)
297 src = os.path.join(src_repo.sharedpath, f)
298 dst = os.path.join(dest_path, f)
298 dst = os.path.join(dest_path, f)
299 dstbase = os.path.dirname(dst)
299 dstbase = os.path.dirname(dst)
300 if dstbase and not os.path.exists(dstbase):
300 if dstbase and not os.path.exists(dstbase):
301 os.mkdir(dstbase)
301 os.mkdir(dstbase)
302 if os.path.exists(src):
302 if os.path.exists(src):
303 if dst.endswith('data'):
303 if dst.endswith('data'):
304 # lock to avoid premature writing to the target
304 # lock to avoid premature writing to the target
305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
306 hardlink, n = util.copyfiles(src, dst, hardlink)
306 hardlink, n = util.copyfiles(src, dst, hardlink)
307 num += n
307 num += n
308 if hardlink:
308 if hardlink:
309 ui.debug("linked %d files\n" % num)
309 ui.debug("linked %d files\n" % num)
310 else:
310 else:
311 ui.debug("copied %d files\n" % num)
311 ui.debug("copied %d files\n" % num)
312
312
313 # we need to re-init the repo after manually copying the data
313 # we need to re-init the repo after manually copying the data
314 # into it
314 # into it
315 dest_repo = repository(ui, dest)
315 dest_repo = repository(ui, dest)
316 src_repo.hook('outgoing', source='clone',
316 src_repo.hook('outgoing', source='clone',
317 node=node.hex(node.nullid))
317 node=node.hex(node.nullid))
318 else:
318 else:
319 try:
319 try:
320 dest_repo = repository(ui, dest, create=True)
320 dest_repo = repository(ui, dest, create=True)
321 except OSError, inst:
321 except OSError, inst:
322 if inst.errno == errno.EEXIST:
322 if inst.errno == errno.EEXIST:
323 dir_cleanup.close()
323 dir_cleanup.close()
324 raise util.Abort(_("destination '%s' already exists")
324 raise util.Abort(_("destination '%s' already exists")
325 % dest)
325 % dest)
326 raise
326 raise
327
327
328 revs = None
328 revs = None
329 if rev:
329 if rev:
330 if 'lookup' not in src_repo.capabilities:
330 if 'lookup' not in src_repo.capabilities:
331 raise util.Abort(_("src repository does not support "
331 raise util.Abort(_("src repository does not support "
332 "revision lookup and so doesn't "
332 "revision lookup and so doesn't "
333 "support clone by revision"))
333 "support clone by revision"))
334 revs = [src_repo.lookup(r) for r in rev]
334 revs = [src_repo.lookup(r) for r in rev]
335 checkout = revs[0]
335 checkout = revs[0]
336 if dest_repo.local():
336 if dest_repo.local():
337 dest_repo.clone(src_repo, heads=revs, stream=stream)
337 dest_repo.clone(src_repo, heads=revs, stream=stream)
338 elif src_repo.local():
338 elif src_repo.local():
339 src_repo.push(dest_repo, revs=revs)
339 src_repo.push(dest_repo, revs=revs)
340 else:
340 else:
341 raise util.Abort(_("clone from remote to remote not supported"))
341 raise util.Abort(_("clone from remote to remote not supported"))
342
342
343 if dir_cleanup:
343 if dir_cleanup:
344 dir_cleanup.close()
344 dir_cleanup.close()
345
345
346 if dest_repo.local():
346 if dest_repo.local():
347 fp = dest_repo.opener("hgrc", "w", text=True)
347 fp = dest_repo.opener("hgrc", "w", text=True)
348 fp.write("[paths]\n")
348 fp.write("[paths]\n")
349 fp.write("default = %s\n" % abspath)
349 fp.write("default = %s\n" % abspath)
350 fp.close()
350 fp.close()
351
351
352 dest_repo.ui.setconfig('paths', 'default', abspath)
352 dest_repo.ui.setconfig('paths', 'default', abspath)
353
353
354 if update:
354 if update:
355 if update is not True:
355 if update is not True:
356 checkout = update
356 checkout = update
357 if src_repo.local():
357 if src_repo.local():
358 checkout = src_repo.lookup(update)
358 checkout = src_repo.lookup(update)
359 for test in (checkout, 'default', 'tip'):
359 for test in (checkout, 'default', 'tip'):
360 if test is None:
360 if test is None:
361 continue
361 continue
362 try:
362 try:
363 uprev = dest_repo.lookup(test)
363 uprev = dest_repo.lookup(test)
364 break
364 break
365 except error.RepoLookupError:
365 except error.RepoLookupError:
366 continue
366 continue
367 bn = dest_repo[uprev].branch()
367 bn = dest_repo[uprev].branch()
368 dest_repo.ui.status(_("updating to branch %s\n")
368 dest_repo.ui.status(_("updating to branch %s\n")
369 % encoding.tolocal(bn))
369 % encoding.tolocal(bn))
370 _update(dest_repo, uprev)
370 _update(dest_repo, uprev)
371
371
372 return src_repo, dest_repo
372 return src_repo, dest_repo
373 finally:
373 finally:
374 release(src_lock, dest_lock)
374 release(src_lock, dest_lock)
375 if dir_cleanup is not None:
375 if dir_cleanup is not None:
376 dir_cleanup.cleanup()
376 dir_cleanup.cleanup()
377
377
378 def _showstats(repo, stats):
378 def _showstats(repo, stats):
379 repo.ui.status(_("%d files updated, %d files merged, "
379 repo.ui.status(_("%d files updated, %d files merged, "
380 "%d files removed, %d files unresolved\n") % stats)
380 "%d files removed, %d files unresolved\n") % stats)
381
381
382 def update(repo, node):
382 def update(repo, node):
383 """update the working directory to node, merging linear changes"""
383 """update the working directory to node, merging linear changes"""
384 stats = mergemod.update(repo, node, False, False, None)
384 stats = mergemod.update(repo, node, False, False, None)
385 _showstats(repo, stats)
385 _showstats(repo, stats)
386 if stats[3]:
386 if stats[3]:
387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
388 return stats[3] > 0
388 return stats[3] > 0
389
389
390 # naming conflict in clone()
390 # naming conflict in clone()
391 _update = update
391 _update = update
392
392
393 def clean(repo, node, show_stats=True):
393 def clean(repo, node, show_stats=True):
394 """forcibly switch the working directory to node, clobbering changes"""
394 """forcibly switch the working directory to node, clobbering changes"""
395 stats = mergemod.update(repo, node, False, True, None)
395 stats = mergemod.update(repo, node, False, True, None)
396 if show_stats:
396 if show_stats:
397 _showstats(repo, stats)
397 _showstats(repo, stats)
398 return stats[3] > 0
398 return stats[3] > 0
399
399
400 def merge(repo, node, force=None, remind=True):
400 def merge(repo, node, force=None, remind=True):
401 """branch merge with node, resolving changes"""
401 """branch merge with node, resolving changes"""
402 stats = mergemod.update(repo, node, True, force, False)
402 stats = mergemod.update(repo, node, True, force, False)
403 _showstats(repo, stats)
403 _showstats(repo, stats)
404 if stats[3]:
404 if stats[3]:
405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
406 "or 'hg update -C .' to abandon\n"))
406 "or 'hg update -C .' to abandon\n"))
407 elif remind:
407 elif remind:
408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
409 return stats[3] > 0
409 return stats[3] > 0
410
410
411 def _incoming(displaychlist, subreporecurse, ui, repo, source,
411 def _incoming(displaychlist, subreporecurse, ui, repo, source,
412 opts, buffered=False):
412 opts, buffered=False):
413 """
413 """
414 Helper for incoming / gincoming.
414 Helper for incoming / gincoming.
415 displaychlist gets called with
415 displaychlist gets called with
416 (remoterepo, incomingchangesetlist, displayer) parameters,
416 (remoterepo, incomingchangesetlist, displayer) parameters,
417 and is supposed to contain only code that can't be unified.
417 and is supposed to contain only code that can't be unified.
418 """
418 """
419 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
419 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
420 other = repository(remoteui(repo, opts), source)
420 other = repository(remoteui(repo, opts), source)
421 ui.status(_('comparing with %s\n') % url.hidepassword(source))
421 ui.status(_('comparing with %s\n') % url.hidepassword(source))
422 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
422 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
423
423
424 if revs:
424 if revs:
425 revs = [other.lookup(rev) for rev in revs]
425 revs = [other.lookup(rev) for rev in revs]
426 other, incoming, bundle = bundlerepo.getremotechanges(ui, repo, other, revs,
426 other, incoming, bundle = bundlerepo.getremotechanges(ui, repo, other, revs,
427 opts["bundle"], opts["force"])
427 opts["bundle"], opts["force"])
428 if incoming is None:
428 if incoming is None:
429 ui.status(_("no changes found\n"))
429 ui.status(_("no changes found\n"))
430 return subreporecurse()
430 return subreporecurse()
431
431
432 try:
432 try:
433 chlist = other.changelog.nodesbetween(incoming, revs)[0]
433 chlist = other.changelog.nodesbetween(incoming, revs)[0]
434 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
434 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
435
435
436 # XXX once graphlog extension makes it into core,
436 # XXX once graphlog extension makes it into core,
437 # should be replaced by a if graph/else
437 # should be replaced by a if graph/else
438 displaychlist(other, chlist, displayer)
438 displaychlist(other, chlist, displayer)
439
439
440 displayer.close()
440 displayer.close()
441 finally:
441 finally:
442 if hasattr(other, 'close'):
442 if hasattr(other, 'close'):
443 other.close()
443 other.close()
444 if bundle:
444 if bundle:
445 os.unlink(bundle)
445 os.unlink(bundle)
446 subreporecurse()
446 subreporecurse()
447 return 0 # exit code is zero since we found incoming changes
447 return 0 # exit code is zero since we found incoming changes
448
448
449 def incoming(ui, repo, source, opts):
449 def incoming(ui, repo, source, opts):
450 def subreporecurse():
450 def subreporecurse():
451 ret = 1
451 ret = 1
452 if opts.get('subrepos'):
452 if opts.get('subrepos'):
453 ctx = repo[None]
453 ctx = repo[None]
454 for subpath in sorted(ctx.substate):
454 for subpath in sorted(ctx.substate):
455 sub = ctx.sub(subpath)
455 sub = ctx.sub(subpath)
456 ret = min(ret, sub.incoming(ui, source, opts))
456 ret = min(ret, sub.incoming(ui, source, opts))
457 return ret
457 return ret
458
458
459 def display(other, chlist, displayer):
459 def display(other, chlist, displayer):
460 limit = cmdutil.loglimit(opts)
460 limit = cmdutil.loglimit(opts)
461 if opts.get('newest_first'):
461 if opts.get('newest_first'):
462 chlist.reverse()
462 chlist.reverse()
463 count = 0
463 count = 0
464 for n in chlist:
464 for n in chlist:
465 if limit is not None and count >= limit:
465 if limit is not None and count >= limit:
466 break
466 break
467 parents = [p for p in other.changelog.parents(n) if p != nullid]
467 parents = [p for p in other.changelog.parents(n) if p != nullid]
468 if opts.get('no_merges') and len(parents) == 2:
468 if opts.get('no_merges') and len(parents) == 2:
469 continue
469 continue
470 count += 1
470 count += 1
471 displayer.show(other[n])
471 displayer.show(other[n])
472 return _incoming(display, subreporecurse, ui, repo, source, opts)
472 return _incoming(display, subreporecurse, ui, repo, source, opts)
473
473
474 def _outgoing(ui, repo, dest, opts):
475 dest = ui.expandpath(dest or 'default-push', dest or 'default')
476 dest, branches = parseurl(dest, opts.get('branch'))
477 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
478 if revs:
479 revs = [repo.lookup(rev) for rev in revs]
480
481 other = repository(remoteui(repo, opts), dest)
482 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
483 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
484 if not o:
485 ui.status(_("no changes found\n"))
486 return None
487
488 return repo.changelog.nodesbetween(o, revs)[0]
489
474 def outgoing(ui, repo, dest, opts):
490 def outgoing(ui, repo, dest, opts):
475 def recurse():
491 def recurse():
476 ret = 1
492 ret = 1
477 if opts.get('subrepos'):
493 if opts.get('subrepos'):
478 ctx = repo[None]
494 ctx = repo[None]
479 for subpath in sorted(ctx.substate):
495 for subpath in sorted(ctx.substate):
480 sub = ctx.sub(subpath)
496 sub = ctx.sub(subpath)
481 ret = min(ret, sub.outgoing(ui, dest, opts))
497 ret = min(ret, sub.outgoing(ui, dest, opts))
482 return ret
498 return ret
483
499
484 limit = cmdutil.loglimit(opts)
500 limit = cmdutil.loglimit(opts)
485 dest = ui.expandpath(dest or 'default-push', dest or 'default')
501 o = _outgoing(ui, repo, dest, opts)
486 dest, branches = parseurl(dest, opts.get('branch'))
502 if o is None:
487 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
488 if revs:
489 revs = [repo.lookup(rev) for rev in revs]
490
491 other = repository(remoteui(repo, opts), dest)
492 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
493 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
494 if not o:
495 ui.status(_("no changes found\n"))
496 return recurse()
503 return recurse()
497
504
498 o = repo.changelog.nodesbetween(o, revs)[0]
499 if opts.get('newest_first'):
505 if opts.get('newest_first'):
500 o.reverse()
506 o.reverse()
501 displayer = cmdutil.show_changeset(ui, repo, opts)
507 displayer = cmdutil.show_changeset(ui, repo, opts)
502 count = 0
508 count = 0
503 for n in o:
509 for n in o:
504 if limit is not None and count >= limit:
510 if limit is not None and count >= limit:
505 break
511 break
506 parents = [p for p in repo.changelog.parents(n) if p != nullid]
512 parents = [p for p in repo.changelog.parents(n) if p != nullid]
507 if opts.get('no_merges') and len(parents) == 2:
513 if opts.get('no_merges') and len(parents) == 2:
508 continue
514 continue
509 count += 1
515 count += 1
510 displayer.show(repo[n])
516 displayer.show(repo[n])
511 displayer.close()
517 displayer.close()
512 recurse()
518 recurse()
513 return 0 # exit code is zero since we found outgoing changes
519 return 0 # exit code is zero since we found outgoing changes
514
520
515 def revert(repo, node, choose):
521 def revert(repo, node, choose):
516 """revert changes to revision in node without updating dirstate"""
522 """revert changes to revision in node without updating dirstate"""
517 return mergemod.update(repo, node, False, True, choose)[3] > 0
523 return mergemod.update(repo, node, False, True, choose)[3] > 0
518
524
519 def verify(repo):
525 def verify(repo):
520 """verify the consistency of a repository"""
526 """verify the consistency of a repository"""
521 return verifymod.verify(repo)
527 return verifymod.verify(repo)
522
528
523 def remoteui(src, opts):
529 def remoteui(src, opts):
524 'build a remote ui from ui or repo and opts'
530 'build a remote ui from ui or repo and opts'
525 if hasattr(src, 'baseui'): # looks like a repository
531 if hasattr(src, 'baseui'): # looks like a repository
526 dst = src.baseui.copy() # drop repo-specific config
532 dst = src.baseui.copy() # drop repo-specific config
527 src = src.ui # copy target options from repo
533 src = src.ui # copy target options from repo
528 else: # assume it's a global ui object
534 else: # assume it's a global ui object
529 dst = src.copy() # keep all global options
535 dst = src.copy() # keep all global options
530
536
531 # copy ssh-specific options
537 # copy ssh-specific options
532 for o in 'ssh', 'remotecmd':
538 for o in 'ssh', 'remotecmd':
533 v = opts.get(o) or src.config('ui', o)
539 v = opts.get(o) or src.config('ui', o)
534 if v:
540 if v:
535 dst.setconfig("ui", o, v)
541 dst.setconfig("ui", o, v)
536
542
537 # copy bundle-specific options
543 # copy bundle-specific options
538 r = src.config('bundle', 'mainreporoot')
544 r = src.config('bundle', 'mainreporoot')
539 if r:
545 if r:
540 dst.setconfig('bundle', 'mainreporoot', r)
546 dst.setconfig('bundle', 'mainreporoot', r)
541
547
542 # copy auth and http_proxy section settings
548 # copy auth and http_proxy section settings
543 for sect in ('auth', 'http_proxy'):
549 for sect in ('auth', 'http_proxy'):
544 for key, val in src.configitems(sect):
550 for key, val in src.configitems(sect):
545 dst.setconfig(sect, key, val)
551 dst.setconfig(sect, key, val)
546
552
547 return dst
553 return dst
General Comments 0
You need to be logged in to leave comments. Login now