##// END OF EJS Templates
incoming: unify code for incoming and graphlog.incoming
Nicolas Dumazet -
r12730:33e1fd2a default
parent child Browse files
Show More
@@ -1,384 +1,346
1 1 # ASCII graph log extension for Mercurial
2 2 #
3 3 # Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to view revision graphs from a shell
9 9
10 10 This extension adds a --graph option to the incoming, outgoing and log
11 11 commands. When this options is given, an ASCII representation of the
12 12 revision graph is also shown.
13 13 '''
14 14
15 15 import os
16 16 from mercurial.cmdutil import revrange, show_changeset
17 17 from mercurial.commands import templateopts
18 18 from mercurial.i18n import _
19 19 from mercurial.node import nullrev
20 from mercurial import bundlerepo, changegroup, cmdutil, commands, extensions
20 from mercurial import cmdutil, commands, extensions
21 21 from mercurial import hg, url, util, graphmod, discovery
22 22
23 23 ASCIIDATA = 'ASC'
24 24
25 25 def asciiedges(seen, rev, parents):
26 26 """adds edge info to changelog DAG walk suitable for ascii()"""
27 27 if rev not in seen:
28 28 seen.append(rev)
29 29 nodeidx = seen.index(rev)
30 30
31 31 knownparents = []
32 32 newparents = []
33 33 for parent in parents:
34 34 if parent in seen:
35 35 knownparents.append(parent)
36 36 else:
37 37 newparents.append(parent)
38 38
39 39 ncols = len(seen)
40 40 seen[nodeidx:nodeidx + 1] = newparents
41 41 edges = [(nodeidx, seen.index(p)) for p in knownparents]
42 42
43 43 if len(newparents) > 0:
44 44 edges.append((nodeidx, nodeidx))
45 45 if len(newparents) > 1:
46 46 edges.append((nodeidx, nodeidx + 1))
47 47
48 48 nmorecols = len(seen) - ncols
49 49 return nodeidx, edges, ncols, nmorecols
50 50
51 51 def fix_long_right_edges(edges):
52 52 for (i, (start, end)) in enumerate(edges):
53 53 if end > start:
54 54 edges[i] = (start, end + 1)
55 55
56 56 def get_nodeline_edges_tail(
57 57 node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
58 58 if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
59 59 # Still going in the same non-vertical direction.
60 60 if n_columns_diff == -1:
61 61 start = max(node_index + 1, p_node_index)
62 62 tail = ["|", " "] * (start - node_index - 1)
63 63 tail.extend(["/", " "] * (n_columns - start))
64 64 return tail
65 65 else:
66 66 return ["\\", " "] * (n_columns - node_index - 1)
67 67 else:
68 68 return ["|", " "] * (n_columns - node_index - 1)
69 69
70 70 def draw_edges(edges, nodeline, interline):
71 71 for (start, end) in edges:
72 72 if start == end + 1:
73 73 interline[2 * end + 1] = "/"
74 74 elif start == end - 1:
75 75 interline[2 * start + 1] = "\\"
76 76 elif start == end:
77 77 interline[2 * start] = "|"
78 78 else:
79 79 nodeline[2 * end] = "+"
80 80 if start > end:
81 81 (start, end) = (end, start)
82 82 for i in range(2 * start + 1, 2 * end):
83 83 if nodeline[i] != "+":
84 84 nodeline[i] = "-"
85 85
86 86 def get_padding_line(ni, n_columns, edges):
87 87 line = []
88 88 line.extend(["|", " "] * ni)
89 89 if (ni, ni - 1) in edges or (ni, ni) in edges:
90 90 # (ni, ni - 1) (ni, ni)
91 91 # | | | | | | | |
92 92 # +---o | | o---+
93 93 # | | c | | c | |
94 94 # | |/ / | |/ /
95 95 # | | | | | |
96 96 c = "|"
97 97 else:
98 98 c = " "
99 99 line.extend([c, " "])
100 100 line.extend(["|", " "] * (n_columns - ni - 1))
101 101 return line
102 102
103 103 def asciistate():
104 104 """returns the initial value for the "state" argument to ascii()"""
105 105 return [0, 0]
106 106
107 107 def ascii(ui, state, type, char, text, coldata):
108 108 """prints an ASCII graph of the DAG
109 109
110 110 takes the following arguments (one call per node in the graph):
111 111
112 112 - ui to write to
113 113 - Somewhere to keep the needed state in (init to asciistate())
114 114 - Column of the current node in the set of ongoing edges.
115 115 - Type indicator of node data == ASCIIDATA.
116 116 - Payload: (char, lines):
117 117 - Character to use as node's symbol.
118 118 - List of lines to display as the node's text.
119 119 - Edges; a list of (col, next_col) indicating the edges between
120 120 the current node and its parents.
121 121 - Number of columns (ongoing edges) in the current revision.
122 122 - The difference between the number of columns (ongoing edges)
123 123 in the next revision and the number of columns (ongoing edges)
124 124 in the current revision. That is: -1 means one column removed;
125 125 0 means no columns added or removed; 1 means one column added.
126 126 """
127 127
128 128 idx, edges, ncols, coldiff = coldata
129 129 assert -2 < coldiff < 2
130 130 if coldiff == -1:
131 131 # Transform
132 132 #
133 133 # | | | | | |
134 134 # o | | into o---+
135 135 # |X / |/ /
136 136 # | | | |
137 137 fix_long_right_edges(edges)
138 138
139 139 # add_padding_line says whether to rewrite
140 140 #
141 141 # | | | | | | | |
142 142 # | o---+ into | o---+
143 143 # | / / | | | # <--- padding line
144 144 # o | | | / /
145 145 # o | |
146 146 add_padding_line = (len(text) > 2 and coldiff == -1 and
147 147 [x for (x, y) in edges if x + 1 < y])
148 148
149 149 # fix_nodeline_tail says whether to rewrite
150 150 #
151 151 # | | o | | | | o | |
152 152 # | | |/ / | | |/ /
153 153 # | o | | into | o / / # <--- fixed nodeline tail
154 154 # | |/ / | |/ /
155 155 # o | | o | |
156 156 fix_nodeline_tail = len(text) <= 2 and not add_padding_line
157 157
158 158 # nodeline is the line containing the node character (typically o)
159 159 nodeline = ["|", " "] * idx
160 160 nodeline.extend([char, " "])
161 161
162 162 nodeline.extend(
163 163 get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
164 164 state[0], fix_nodeline_tail))
165 165
166 166 # shift_interline is the line containing the non-vertical
167 167 # edges between this entry and the next
168 168 shift_interline = ["|", " "] * idx
169 169 if coldiff == -1:
170 170 n_spaces = 1
171 171 edge_ch = "/"
172 172 elif coldiff == 0:
173 173 n_spaces = 2
174 174 edge_ch = "|"
175 175 else:
176 176 n_spaces = 3
177 177 edge_ch = "\\"
178 178 shift_interline.extend(n_spaces * [" "])
179 179 shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
180 180
181 181 # draw edges from the current node to its parents
182 182 draw_edges(edges, nodeline, shift_interline)
183 183
184 184 # lines is the list of all graph lines to print
185 185 lines = [nodeline]
186 186 if add_padding_line:
187 187 lines.append(get_padding_line(idx, ncols, edges))
188 188 lines.append(shift_interline)
189 189
190 190 # make sure that there are as many graph lines as there are
191 191 # log strings
192 192 while len(text) < len(lines):
193 193 text.append("")
194 194 if len(lines) < len(text):
195 195 extra_interline = ["|", " "] * (ncols + coldiff)
196 196 while len(lines) < len(text):
197 197 lines.append(extra_interline)
198 198
199 199 # print lines
200 200 indentation_level = max(ncols, ncols + coldiff)
201 201 for (line, logstr) in zip(lines, text):
202 202 ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
203 203 ui.write(ln.rstrip() + '\n')
204 204
205 205 # ... and start over
206 206 state[0] = coldiff
207 207 state[1] = idx
208 208
209 209 def get_revs(repo, rev_opt):
210 210 if rev_opt:
211 211 revs = revrange(repo, rev_opt)
212 212 if len(revs) == 0:
213 213 return (nullrev, nullrev)
214 214 return (max(revs), min(revs))
215 215 else:
216 216 return (len(repo) - 1, 0)
217 217
218 218 def check_unsupported_flags(opts):
219 219 for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
220 220 "only_merges", "user", "branch", "only_branch", "prune",
221 221 "newest_first", "no_merges", "include", "exclude"]:
222 222 if op in opts and opts[op]:
223 223 raise util.Abort(_("--graph option is incompatible with --%s")
224 224 % op.replace("_", "-"))
225 225
226 226 def generate(ui, dag, displayer, showparents, edgefn):
227 227 seen, state = [], asciistate()
228 228 for rev, type, ctx, parents in dag:
229 229 char = ctx.node() in showparents and '@' or 'o'
230 230 displayer.show(ctx)
231 231 lines = displayer.hunk.pop(rev).split('\n')[:-1]
232 232 displayer.flush(rev)
233 233 ascii(ui, state, type, char, lines, edgefn(seen, rev, parents))
234 234 displayer.close()
235 235
236 236 def graphlog(ui, repo, path=None, **opts):
237 237 """show revision history alongside an ASCII revision graph
238 238
239 239 Print a revision history alongside a revision graph drawn with
240 240 ASCII characters.
241 241
242 242 Nodes printed as an @ character are parents of the working
243 243 directory.
244 244 """
245 245
246 246 check_unsupported_flags(opts)
247 247 limit = cmdutil.loglimit(opts)
248 248 start, stop = get_revs(repo, opts["rev"])
249 249 if start == nullrev:
250 250 return
251 251
252 252 if path:
253 253 path = util.canonpath(repo.root, os.getcwd(), path)
254 254 if path: # could be reset in canonpath
255 255 revdag = graphmod.filerevs(repo, path, start, stop, limit)
256 256 else:
257 257 if limit is not None:
258 258 stop = max(stop, start - limit + 1)
259 259 revdag = graphmod.revisions(repo, start, stop)
260 260
261 261 displayer = show_changeset(ui, repo, opts, buffered=True)
262 262 showparents = [ctx.node() for ctx in repo[None].parents()]
263 263 generate(ui, revdag, displayer, showparents, asciiedges)
264 264
265 265 def graphrevs(repo, nodes, opts):
266 266 limit = cmdutil.loglimit(opts)
267 267 nodes.reverse()
268 268 if limit is not None:
269 269 nodes = nodes[:limit]
270 270 return graphmod.nodes(repo, nodes)
271 271
272 272 def goutgoing(ui, repo, dest=None, **opts):
273 273 """show the outgoing changesets alongside an ASCII revision graph
274 274
275 275 Print the outgoing changesets alongside a revision graph drawn with
276 276 ASCII characters.
277 277
278 278 Nodes printed as an @ character are parents of the working
279 279 directory.
280 280 """
281 281
282 282 check_unsupported_flags(opts)
283 283 dest = ui.expandpath(dest or 'default-push', dest or 'default')
284 284 dest, branches = hg.parseurl(dest, opts.get('branch'))
285 285 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
286 286 other = hg.repository(hg.remoteui(ui, opts), dest)
287 287 if revs:
288 288 revs = [repo.lookup(rev) for rev in revs]
289 289 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
290 290 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
291 291 if not o:
292 292 ui.status(_("no changes found\n"))
293 293 return
294 294
295 295 o = repo.changelog.nodesbetween(o, revs)[0]
296 296 revdag = graphrevs(repo, o, opts)
297 297 displayer = show_changeset(ui, repo, opts, buffered=True)
298 298 showparents = [ctx.node() for ctx in repo[None].parents()]
299 299 generate(ui, revdag, displayer, showparents, asciiedges)
300 300
301 301 def gincoming(ui, repo, source="default", **opts):
302 302 """show the incoming changesets alongside an ASCII revision graph
303 303
304 304 Print the incoming changesets alongside a revision graph drawn with
305 305 ASCII characters.
306 306
307 307 Nodes printed as an @ character are parents of the working
308 308 directory.
309 309 """
310 def subreporecurse():
311 return 1
310 312
311 313 check_unsupported_flags(opts)
312 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
313 other = hg.repository(hg.remoteui(repo, opts), source)
314 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
315 ui.status(_('comparing with %s\n') % url.hidepassword(source))
316 if revs:
317 revs = [other.lookup(rev) for rev in revs]
318 incoming = discovery.findincoming(repo, other, heads=revs,
319 force=opts["force"])
320 if not incoming:
321 try:
322 os.unlink(opts["bundle"])
323 except:
324 pass
325 ui.status(_("no changes found\n"))
326 return
327
328 cleanup = None
329 try:
330
331 fname = opts["bundle"]
332 if fname or not other.local():
333 # create a bundle (uncompressed if other repo is not local)
334 if revs is None:
335 cg = other.changegroup(incoming, "incoming")
336 else:
337 cg = other.changegroupsubset(incoming, revs, 'incoming')
338 bundletype = other.local() and "HG10BZ" or "HG10UN"
339 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
340 # keep written bundle?
341 if opts["bundle"]:
342 cleanup = None
343 if not other.local():
344 # use the created uncompressed bundlerepo
345 other = bundlerepo.bundlerepository(ui, repo.root, fname)
346
347 chlist = other.changelog.nodesbetween(incoming, revs)[0]
314 def display(other, chlist, displayer):
348 315 revdag = graphrevs(other, chlist, opts)
349 displayer = show_changeset(ui, other, opts, buffered=True)
350 316 showparents = [ctx.node() for ctx in repo[None].parents()]
351 317 generate(ui, revdag, displayer, showparents, asciiedges)
352 318
353 finally:
354 if hasattr(other, 'close'):
355 other.close()
356 if cleanup:
357 os.unlink(cleanup)
319 hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
358 320
359 321 def uisetup(ui):
360 322 '''Initialize the extension.'''
361 323 _wrapcmd(ui, 'log', commands.table, graphlog)
362 324 _wrapcmd(ui, 'incoming', commands.table, gincoming)
363 325 _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
364 326
365 327 def _wrapcmd(ui, cmd, table, wrapfn):
366 328 '''wrap the command'''
367 329 def graph(orig, *args, **kwargs):
368 330 if kwargs['graph']:
369 331 return wrapfn(*args, **kwargs)
370 332 return orig(*args, **kwargs)
371 333 entry = extensions.wrapcommand(table, cmd, graph)
372 334 entry[1].append(('G', 'graph', None, _("show the revision DAG")))
373 335
374 336 cmdtable = {
375 337 "glog":
376 338 (graphlog,
377 339 [('l', 'limit', '',
378 340 _('limit number of changes displayed'), _('NUM')),
379 341 ('p', 'patch', False, _('show patch')),
380 342 ('r', 'rev', [],
381 343 _('show the specified revision or range'), _('REV')),
382 344 ] + templateopts,
383 345 _('hg glog [OPTION]... [FILE]')),
384 346 }
@@ -1,557 +1,573
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from i18n import _
10 10 from lock import release
11 11 from node import hex, nullid, nullrev, short
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 13 import lock, util, extensions, error, encoding, node
14 14 import cmdutil, discovery, url, changegroup
15 15 import merge as mergemod
16 16 import verify as verifymod
17 17 import errno, os, shutil
18 18
19 19 def _local(path):
20 20 path = util.expandpath(util.drop_scheme('file', path))
21 21 return (os.path.isfile(path) and bundlerepo or localrepo)
22 22
23 23 def addbranchrevs(lrepo, repo, branches, revs):
24 24 hashbranch, branches = branches
25 25 if not hashbranch and not branches:
26 26 return revs or None, revs and revs[0] or None
27 27 revs = revs and list(revs) or []
28 28 if not repo.capable('branchmap'):
29 29 if branches:
30 30 raise util.Abort(_("remote branch lookup not supported"))
31 31 revs.append(hashbranch)
32 32 return revs, revs[0]
33 33 branchmap = repo.branchmap()
34 34
35 35 def primary(butf8):
36 36 if butf8 == '.':
37 37 if not lrepo or not lrepo.local():
38 38 raise util.Abort(_("dirstate branch not accessible"))
39 39 butf8 = lrepo.dirstate.branch()
40 40 if butf8 in branchmap:
41 41 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
42 42 return True
43 43 else:
44 44 return False
45 45
46 46 for branch in branches:
47 47 butf8 = encoding.fromlocal(branch)
48 48 if not primary(butf8):
49 49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 50 if hashbranch:
51 51 butf8 = encoding.fromlocal(hashbranch)
52 52 if not primary(butf8):
53 53 revs.append(hashbranch)
54 54 return revs, revs[0]
55 55
56 56 def parseurl(url, branches=None):
57 57 '''parse url#branch, returning (url, (branch, branches))'''
58 58
59 59 if '#' not in url:
60 60 return url, (None, branches or [])
61 61 url, branch = url.split('#', 1)
62 62 return url, (branch, branches or [])
63 63
64 64 schemes = {
65 65 'bundle': bundlerepo,
66 66 'file': _local,
67 67 'http': httprepo,
68 68 'https': httprepo,
69 69 'ssh': sshrepo,
70 70 'static-http': statichttprepo,
71 71 }
72 72
73 73 def _lookup(path):
74 74 scheme = 'file'
75 75 if path:
76 76 c = path.find(':')
77 77 if c > 0:
78 78 scheme = path[:c]
79 79 thing = schemes.get(scheme) or schemes['file']
80 80 try:
81 81 return thing(path)
82 82 except TypeError:
83 83 return thing
84 84
85 85 def islocal(repo):
86 86 '''return true if repo or path is local'''
87 87 if isinstance(repo, str):
88 88 try:
89 89 return _lookup(repo).islocal(repo)
90 90 except AttributeError:
91 91 return False
92 92 return repo.local()
93 93
94 94 def repository(ui, path='', create=False):
95 95 """return a repository object for the specified path"""
96 96 repo = _lookup(path).instance(ui, path, create)
97 97 ui = getattr(repo, "ui", ui)
98 98 for name, module in extensions.extensions():
99 99 hook = getattr(module, 'reposetup', None)
100 100 if hook:
101 101 hook(ui, repo)
102 102 return repo
103 103
104 104 def defaultdest(source):
105 105 '''return default destination of clone if none is given'''
106 106 return os.path.basename(os.path.normpath(source))
107 107
108 108 def localpath(path):
109 109 if path.startswith('file://localhost/'):
110 110 return path[16:]
111 111 if path.startswith('file://'):
112 112 return path[7:]
113 113 if path.startswith('file:'):
114 114 return path[5:]
115 115 return path
116 116
117 117 def share(ui, source, dest=None, update=True):
118 118 '''create a shared repository'''
119 119
120 120 if not islocal(source):
121 121 raise util.Abort(_('can only share local repositories'))
122 122
123 123 if not dest:
124 124 dest = defaultdest(source)
125 125 else:
126 126 dest = ui.expandpath(dest)
127 127
128 128 if isinstance(source, str):
129 129 origsource = ui.expandpath(source)
130 130 source, branches = parseurl(origsource)
131 131 srcrepo = repository(ui, source)
132 132 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
133 133 else:
134 134 srcrepo = source
135 135 origsource = source = srcrepo.url()
136 136 checkout = None
137 137
138 138 sharedpath = srcrepo.sharedpath # if our source is already sharing
139 139
140 140 root = os.path.realpath(dest)
141 141 roothg = os.path.join(root, '.hg')
142 142
143 143 if os.path.exists(roothg):
144 144 raise util.Abort(_('destination already exists'))
145 145
146 146 if not os.path.isdir(root):
147 147 os.mkdir(root)
148 148 os.mkdir(roothg)
149 149
150 150 requirements = ''
151 151 try:
152 152 requirements = srcrepo.opener('requires').read()
153 153 except IOError, inst:
154 154 if inst.errno != errno.ENOENT:
155 155 raise
156 156
157 157 requirements += 'shared\n'
158 158 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
159 159 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
160 160
161 161 default = srcrepo.ui.config('paths', 'default')
162 162 if default:
163 163 f = file(os.path.join(roothg, 'hgrc'), 'w')
164 164 f.write('[paths]\ndefault = %s\n' % default)
165 165 f.close()
166 166
167 167 r = repository(ui, root)
168 168
169 169 if update:
170 170 r.ui.status(_("updating working directory\n"))
171 171 if update is not True:
172 172 checkout = update
173 173 for test in (checkout, 'default', 'tip'):
174 174 if test is None:
175 175 continue
176 176 try:
177 177 uprev = r.lookup(test)
178 178 break
179 179 except error.RepoLookupError:
180 180 continue
181 181 _update(r, uprev)
182 182
183 183 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
184 184 stream=False, branch=None):
185 185 """Make a copy of an existing repository.
186 186
187 187 Create a copy of an existing repository in a new directory. The
188 188 source and destination are URLs, as passed to the repository
189 189 function. Returns a pair of repository objects, the source and
190 190 newly created destination.
191 191
192 192 The location of the source is added to the new repository's
193 193 .hg/hgrc file, as the default to be used for future pulls and
194 194 pushes.
195 195
196 196 If an exception is raised, the partly cloned/updated destination
197 197 repository will be deleted.
198 198
199 199 Arguments:
200 200
201 201 source: repository object or URL
202 202
203 203 dest: URL of destination repository to create (defaults to base
204 204 name of source repository)
205 205
206 206 pull: always pull from source repository, even in local case
207 207
208 208 stream: stream raw data uncompressed from repository (fast over
209 209 LAN, slow over WAN)
210 210
211 211 rev: revision to clone up to (implies pull=True)
212 212
213 213 update: update working directory after clone completes, if
214 214 destination is local repository (True means update to default rev,
215 215 anything else is treated as a revision)
216 216
217 217 branch: branches to clone
218 218 """
219 219
220 220 if isinstance(source, str):
221 221 origsource = ui.expandpath(source)
222 222 source, branch = parseurl(origsource, branch)
223 223 src_repo = repository(ui, source)
224 224 else:
225 225 src_repo = source
226 226 branch = (None, branch or [])
227 227 origsource = source = src_repo.url()
228 228 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
229 229
230 230 if dest is None:
231 231 dest = defaultdest(source)
232 232 ui.status(_("destination directory: %s\n") % dest)
233 233 else:
234 234 dest = ui.expandpath(dest)
235 235
236 236 dest = localpath(dest)
237 237 source = localpath(source)
238 238
239 239 if os.path.exists(dest):
240 240 if not os.path.isdir(dest):
241 241 raise util.Abort(_("destination '%s' already exists") % dest)
242 242 elif os.listdir(dest):
243 243 raise util.Abort(_("destination '%s' is not empty") % dest)
244 244
245 245 class DirCleanup(object):
246 246 def __init__(self, dir_):
247 247 self.rmtree = shutil.rmtree
248 248 self.dir_ = dir_
249 249 def close(self):
250 250 self.dir_ = None
251 251 def cleanup(self):
252 252 if self.dir_:
253 253 self.rmtree(self.dir_, True)
254 254
255 255 src_lock = dest_lock = dir_cleanup = None
256 256 try:
257 257 if islocal(dest):
258 258 dir_cleanup = DirCleanup(dest)
259 259
260 260 abspath = origsource
261 261 copy = False
262 262 if src_repo.cancopy() and islocal(dest):
263 263 abspath = os.path.abspath(util.drop_scheme('file', origsource))
264 264 copy = not pull and not rev
265 265
266 266 if copy:
267 267 try:
268 268 # we use a lock here because if we race with commit, we
269 269 # can end up with extra data in the cloned revlogs that's
270 270 # not pointed to by changesets, thus causing verify to
271 271 # fail
272 272 src_lock = src_repo.lock(wait=False)
273 273 except error.LockError:
274 274 copy = False
275 275
276 276 if copy:
277 277 src_repo.hook('preoutgoing', throw=True, source='clone')
278 278 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
279 279 if not os.path.exists(dest):
280 280 os.mkdir(dest)
281 281 else:
282 282 # only clean up directories we create ourselves
283 283 dir_cleanup.dir_ = hgdir
284 284 try:
285 285 dest_path = hgdir
286 286 os.mkdir(dest_path)
287 287 except OSError, inst:
288 288 if inst.errno == errno.EEXIST:
289 289 dir_cleanup.close()
290 290 raise util.Abort(_("destination '%s' already exists")
291 291 % dest)
292 292 raise
293 293
294 294 hardlink = None
295 295 num = 0
296 296 for f in src_repo.store.copylist():
297 297 src = os.path.join(src_repo.sharedpath, f)
298 298 dst = os.path.join(dest_path, f)
299 299 dstbase = os.path.dirname(dst)
300 300 if dstbase and not os.path.exists(dstbase):
301 301 os.mkdir(dstbase)
302 302 if os.path.exists(src):
303 303 if dst.endswith('data'):
304 304 # lock to avoid premature writing to the target
305 305 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
306 306 hardlink, n = util.copyfiles(src, dst, hardlink)
307 307 num += n
308 308 if hardlink:
309 309 ui.debug("linked %d files\n" % num)
310 310 else:
311 311 ui.debug("copied %d files\n" % num)
312 312
313 313 # we need to re-init the repo after manually copying the data
314 314 # into it
315 315 dest_repo = repository(ui, dest)
316 316 src_repo.hook('outgoing', source='clone',
317 317 node=node.hex(node.nullid))
318 318 else:
319 319 try:
320 320 dest_repo = repository(ui, dest, create=True)
321 321 except OSError, inst:
322 322 if inst.errno == errno.EEXIST:
323 323 dir_cleanup.close()
324 324 raise util.Abort(_("destination '%s' already exists")
325 325 % dest)
326 326 raise
327 327
328 328 revs = None
329 329 if rev:
330 330 if 'lookup' not in src_repo.capabilities:
331 331 raise util.Abort(_("src repository does not support "
332 332 "revision lookup and so doesn't "
333 333 "support clone by revision"))
334 334 revs = [src_repo.lookup(r) for r in rev]
335 335 checkout = revs[0]
336 336 if dest_repo.local():
337 337 dest_repo.clone(src_repo, heads=revs, stream=stream)
338 338 elif src_repo.local():
339 339 src_repo.push(dest_repo, revs=revs)
340 340 else:
341 341 raise util.Abort(_("clone from remote to remote not supported"))
342 342
343 343 if dir_cleanup:
344 344 dir_cleanup.close()
345 345
346 346 if dest_repo.local():
347 347 fp = dest_repo.opener("hgrc", "w", text=True)
348 348 fp.write("[paths]\n")
349 349 fp.write("default = %s\n" % abspath)
350 350 fp.close()
351 351
352 352 dest_repo.ui.setconfig('paths', 'default', abspath)
353 353
354 354 if update:
355 355 if update is not True:
356 356 checkout = update
357 357 if src_repo.local():
358 358 checkout = src_repo.lookup(update)
359 359 for test in (checkout, 'default', 'tip'):
360 360 if test is None:
361 361 continue
362 362 try:
363 363 uprev = dest_repo.lookup(test)
364 364 break
365 365 except error.RepoLookupError:
366 366 continue
367 367 bn = dest_repo[uprev].branch()
368 368 dest_repo.ui.status(_("updating to branch %s\n")
369 369 % encoding.tolocal(bn))
370 370 _update(dest_repo, uprev)
371 371
372 372 return src_repo, dest_repo
373 373 finally:
374 374 release(src_lock, dest_lock)
375 375 if dir_cleanup is not None:
376 376 dir_cleanup.cleanup()
377 377
378 378 def _showstats(repo, stats):
379 379 repo.ui.status(_("%d files updated, %d files merged, "
380 380 "%d files removed, %d files unresolved\n") % stats)
381 381
382 382 def update(repo, node):
383 383 """update the working directory to node, merging linear changes"""
384 384 stats = mergemod.update(repo, node, False, False, None)
385 385 _showstats(repo, stats)
386 386 if stats[3]:
387 387 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
388 388 return stats[3] > 0
389 389
390 390 # naming conflict in clone()
391 391 _update = update
392 392
393 393 def clean(repo, node, show_stats=True):
394 394 """forcibly switch the working directory to node, clobbering changes"""
395 395 stats = mergemod.update(repo, node, False, True, None)
396 396 if show_stats:
397 397 _showstats(repo, stats)
398 398 return stats[3] > 0
399 399
400 400 def merge(repo, node, force=None, remind=True):
401 401 """branch merge with node, resolving changes"""
402 402 stats = mergemod.update(repo, node, True, force, False)
403 403 _showstats(repo, stats)
404 404 if stats[3]:
405 405 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
406 406 "or 'hg update -C .' to abandon\n"))
407 407 elif remind:
408 408 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
409 409 return stats[3] > 0
410 410
411 def _incoming(displaychlist, subreporecurse, ui, repo, source,
412 opts, buffered=False):
413 """
414 Helper for incoming / gincoming.
415 displaychlist gets called with
416 (remoterepo, incomingchangesetlist, displayer) parameters,
417 and is supposed to contain only code that can't be unified.
418 """
419 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
420 other = repository(remoteui(repo, opts), source)
421 ui.status(_('comparing with %s\n') % url.hidepassword(source))
422 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
423
424 if revs:
425 revs = [other.lookup(rev) for rev in revs]
426 bundlename = opts["bundle"]
427 force = opts["force"]
428 tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force)
429 common, incoming, rheads = tmp
430 if not incoming:
431 try:
432 os.unlink(bundlename)
433 except:
434 pass
435 ui.status(_("no changes found\n"))
436 return subreporecurse()
437
438 bundle = None
439 if bundlename or not other.local():
440 # create a bundle (uncompressed if other repo is not local)
441
442 if revs is None and other.capable('changegroupsubset'):
443 revs = rheads
444
445 if revs is None:
446 cg = other.changegroup(incoming, "incoming")
447 else:
448 cg = other.changegroupsubset(incoming, revs, 'incoming')
449 bundletype = other.local() and "HG10BZ" or "HG10UN"
450 fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
451 # keep written bundle?
452 if bundlename:
453 bundle = None
454 if not other.local():
455 # use the created uncompressed bundlerepo
456 other = bundlerepo.bundlerepository(ui, repo.root, fname)
457
458 try:
459 chlist = other.changelog.nodesbetween(incoming, revs)[0]
460 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
461
462 # XXX once graphlog extension makes it into core,
463 # should be replaced by a if graph/else
464 displaychlist(other, chlist, displayer)
465
466 displayer.close()
467 finally:
468 if hasattr(other, 'close'):
469 other.close()
470 if bundle:
471 os.unlink(bundle)
472 subreporecurse()
473 return 0 # exit code is zero since we found incoming changes
474
411 475 def incoming(ui, repo, source, opts):
412 def recurse():
476 def subreporecurse():
413 477 ret = 1
414 478 if opts.get('subrepos'):
415 479 ctx = repo[None]
416 480 for subpath in sorted(ctx.substate):
417 481 sub = ctx.sub(subpath)
418 482 ret = min(ret, sub.incoming(ui, source, opts))
419 483 return ret
420 484
421 limit = cmdutil.loglimit(opts)
422 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
423 other = repository(remoteui(repo, opts), source)
424 ui.status(_('comparing with %s\n') % url.hidepassword(source))
425 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
426 if revs:
427 revs = [other.lookup(rev) for rev in revs]
428
429 tmp = discovery.findcommonincoming(repo, other, heads=revs,
430 force=opts.get('force'))
431 common, incoming, rheads = tmp
432 if not incoming:
433 try:
434 os.unlink(opts["bundle"])
435 except:
436 pass
437 ui.status(_("no changes found\n"))
438 return recurse()
439
440 cleanup = None
441 try:
442 fname = opts["bundle"]
443 if fname or not other.local():
444 # create a bundle (uncompressed if other repo is not local)
445
446 if revs is None and other.capable('changegroupsubset'):
447 revs = rheads
448
449 if revs is None:
450 cg = other.changegroup(incoming, "incoming")
451 else:
452 cg = other.changegroupsubset(incoming, revs, 'incoming')
453 bundletype = other.local() and "HG10BZ" or "HG10UN"
454 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
455 # keep written bundle?
456 if opts["bundle"]:
457 cleanup = None
458 if not other.local():
459 # use the created uncompressed bundlerepo
460 other = bundlerepo.bundlerepository(ui, repo.root, fname)
461
462 chlist = other.changelog.nodesbetween(incoming, revs)[0]
485 def display(other, chlist, displayer):
486 limit = cmdutil.loglimit(opts)
463 487 if opts.get('newest_first'):
464 488 chlist.reverse()
465 displayer = cmdutil.show_changeset(ui, other, opts)
466 489 count = 0
467 490 for n in chlist:
468 491 if limit is not None and count >= limit:
469 492 break
470 493 parents = [p for p in other.changelog.parents(n) if p != nullid]
471 494 if opts.get('no_merges') and len(parents) == 2:
472 495 continue
473 496 count += 1
474 497 displayer.show(other[n])
475 displayer.close()
476 finally:
477 if hasattr(other, 'close'):
478 other.close()
479 if cleanup:
480 os.unlink(cleanup)
481 recurse()
482 return 0 # exit code is zero since we found incoming changes
498 return _incoming(display, subreporecurse, ui, repo, source, opts)
483 499
484 500 def outgoing(ui, repo, dest, opts):
485 501 def recurse():
486 502 ret = 1
487 503 if opts.get('subrepos'):
488 504 ctx = repo[None]
489 505 for subpath in sorted(ctx.substate):
490 506 sub = ctx.sub(subpath)
491 507 ret = min(ret, sub.outgoing(ui, dest, opts))
492 508 return ret
493 509
494 510 limit = cmdutil.loglimit(opts)
495 511 dest = ui.expandpath(dest or 'default-push', dest or 'default')
496 512 dest, branches = parseurl(dest, opts.get('branch'))
497 513 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
498 514 if revs:
499 515 revs = [repo.lookup(rev) for rev in revs]
500 516
501 517 other = repository(remoteui(repo, opts), dest)
502 518 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
503 519 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
504 520 if not o:
505 521 ui.status(_("no changes found\n"))
506 522 return recurse()
507 523
508 524 o = repo.changelog.nodesbetween(o, revs)[0]
509 525 if opts.get('newest_first'):
510 526 o.reverse()
511 527 displayer = cmdutil.show_changeset(ui, repo, opts)
512 528 count = 0
513 529 for n in o:
514 530 if limit is not None and count >= limit:
515 531 break
516 532 parents = [p for p in repo.changelog.parents(n) if p != nullid]
517 533 if opts.get('no_merges') and len(parents) == 2:
518 534 continue
519 535 count += 1
520 536 displayer.show(repo[n])
521 537 displayer.close()
522 538 recurse()
523 539 return 0 # exit code is zero since we found outgoing changes
524 540
525 541 def revert(repo, node, choose):
526 542 """revert changes to revision in node without updating dirstate"""
527 543 return mergemod.update(repo, node, False, True, choose)[3] > 0
528 544
529 545 def verify(repo):
530 546 """verify the consistency of a repository"""
531 547 return verifymod.verify(repo)
532 548
533 549 def remoteui(src, opts):
534 550 'build a remote ui from ui or repo and opts'
535 551 if hasattr(src, 'baseui'): # looks like a repository
536 552 dst = src.baseui.copy() # drop repo-specific config
537 553 src = src.ui # copy target options from repo
538 554 else: # assume it's a global ui object
539 555 dst = src.copy() # keep all global options
540 556
541 557 # copy ssh-specific options
542 558 for o in 'ssh', 'remotecmd':
543 559 v = opts.get(o) or src.config('ui', o)
544 560 if v:
545 561 dst.setconfig("ui", o, v)
546 562
547 563 # copy bundle-specific options
548 564 r = src.config('bundle', 'mainreporoot')
549 565 if r:
550 566 dst.setconfig('bundle', 'mainreporoot', r)
551 567
552 568 # copy auth and http_proxy section settings
553 569 for sect in ('auth', 'http_proxy'):
554 570 for key, val in src.configitems(sect):
555 571 dst.setconfig(sect, key, val)
556 572
557 573 return dst
General Comments 0
You need to be logged in to leave comments. Login now