##// END OF EJS Templates
hg: look up schemes using url.url
Brodie Rao -
r13823:ad179644 default
parent child Browse files
Show More
@@ -1,574 +1,571 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from i18n import _
10 10 from lock import release
11 11 from node import hex, nullid, nullrev, short
12 12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
13 13 import lock, util, extensions, error, encoding, node
14 14 import cmdutil, discovery, url
15 15 import merge as mergemod
16 16 import verify as verifymod
17 17 import errno, os, shutil
18 18
19 19 def _local(path):
20 20 path = util.expandpath(util.drop_scheme('file', path))
21 21 return (os.path.isfile(path) and bundlerepo or localrepo)
22 22
23 23 def addbranchrevs(lrepo, repo, branches, revs):
24 24 hashbranch, branches = branches
25 25 if not hashbranch and not branches:
26 26 return revs or None, revs and revs[0] or None
27 27 revs = revs and list(revs) or []
28 28 if not repo.capable('branchmap'):
29 29 if branches:
30 30 raise util.Abort(_("remote branch lookup not supported"))
31 31 revs.append(hashbranch)
32 32 return revs, revs[0]
33 33 branchmap = repo.branchmap()
34 34
35 35 def primary(branch):
36 36 if branch == '.':
37 37 if not lrepo or not lrepo.local():
38 38 raise util.Abort(_("dirstate branch not accessible"))
39 39 branch = lrepo.dirstate.branch()
40 40 if branch in branchmap:
41 41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 42 return True
43 43 else:
44 44 return False
45 45
46 46 for branch in branches:
47 47 if not primary(branch):
48 48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 49 if hashbranch:
50 50 if not primary(hashbranch):
51 51 revs.append(hashbranch)
52 52 return revs, revs[0]
53 53
54 54 def parseurl(url, branches=None):
55 55 '''parse url#branch, returning (url, (branch, branches))'''
56 56
57 57 if '#' not in url:
58 58 return url, (None, branches or [])
59 59 url, branch = url.split('#', 1)
60 60 return url, (branch, branches or [])
61 61
62 62 schemes = {
63 63 'bundle': bundlerepo,
64 64 'file': _local,
65 65 'http': httprepo,
66 66 'https': httprepo,
67 67 'ssh': sshrepo,
68 68 'static-http': statichttprepo,
69 69 }
70 70
71 71 def _lookup(path):
72 scheme = 'file'
73 if path:
74 c = path.find(':')
75 if c > 0:
76 scheme = path[:c]
72 u = url.url(path)
73 scheme = u.scheme or 'file'
77 74 thing = schemes.get(scheme) or schemes['file']
78 75 try:
79 76 return thing(path)
80 77 except TypeError:
81 78 return thing
82 79
83 80 def islocal(repo):
84 81 '''return true if repo or path is local'''
85 82 if isinstance(repo, str):
86 83 try:
87 84 return _lookup(repo).islocal(repo)
88 85 except AttributeError:
89 86 return False
90 87 return repo.local()
91 88
92 89 def repository(ui, path='', create=False):
93 90 """return a repository object for the specified path"""
94 91 repo = _lookup(path).instance(ui, path, create)
95 92 ui = getattr(repo, "ui", ui)
96 93 for name, module in extensions.extensions():
97 94 hook = getattr(module, 'reposetup', None)
98 95 if hook:
99 96 hook(ui, repo)
100 97 return repo
101 98
102 99 def defaultdest(source):
103 100 '''return default destination of clone if none is given'''
104 101 return os.path.basename(os.path.normpath(source))
105 102
106 103 def localpath(path):
107 104 if path.startswith('file://localhost/'):
108 105 return path[16:]
109 106 if path.startswith('file://'):
110 107 return path[7:]
111 108 if path.startswith('file:'):
112 109 return path[5:]
113 110 return path
114 111
115 112 def share(ui, source, dest=None, update=True):
116 113 '''create a shared repository'''
117 114
118 115 if not islocal(source):
119 116 raise util.Abort(_('can only share local repositories'))
120 117
121 118 if not dest:
122 119 dest = defaultdest(source)
123 120 else:
124 121 dest = ui.expandpath(dest)
125 122
126 123 if isinstance(source, str):
127 124 origsource = ui.expandpath(source)
128 125 source, branches = parseurl(origsource)
129 126 srcrepo = repository(ui, source)
130 127 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
131 128 else:
132 129 srcrepo = source
133 130 origsource = source = srcrepo.url()
134 131 checkout = None
135 132
136 133 sharedpath = srcrepo.sharedpath # if our source is already sharing
137 134
138 135 root = os.path.realpath(dest)
139 136 roothg = os.path.join(root, '.hg')
140 137
141 138 if os.path.exists(roothg):
142 139 raise util.Abort(_('destination already exists'))
143 140
144 141 if not os.path.isdir(root):
145 142 os.mkdir(root)
146 143 util.makedir(roothg, notindexed=True)
147 144
148 145 requirements = ''
149 146 try:
150 147 requirements = srcrepo.opener('requires').read()
151 148 except IOError, inst:
152 149 if inst.errno != errno.ENOENT:
153 150 raise
154 151
155 152 requirements += 'shared\n'
156 153 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
157 154 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
158 155
159 156 default = srcrepo.ui.config('paths', 'default')
160 157 if default:
161 158 f = file(os.path.join(roothg, 'hgrc'), 'w')
162 159 f.write('[paths]\ndefault = %s\n' % default)
163 160 f.close()
164 161
165 162 r = repository(ui, root)
166 163
167 164 if update:
168 165 r.ui.status(_("updating working directory\n"))
169 166 if update is not True:
170 167 checkout = update
171 168 for test in (checkout, 'default', 'tip'):
172 169 if test is None:
173 170 continue
174 171 try:
175 172 uprev = r.lookup(test)
176 173 break
177 174 except error.RepoLookupError:
178 175 continue
179 176 _update(r, uprev)
180 177
181 178 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
182 179 stream=False, branch=None):
183 180 """Make a copy of an existing repository.
184 181
185 182 Create a copy of an existing repository in a new directory. The
186 183 source and destination are URLs, as passed to the repository
187 184 function. Returns a pair of repository objects, the source and
188 185 newly created destination.
189 186
190 187 The location of the source is added to the new repository's
191 188 .hg/hgrc file, as the default to be used for future pulls and
192 189 pushes.
193 190
194 191 If an exception is raised, the partly cloned/updated destination
195 192 repository will be deleted.
196 193
197 194 Arguments:
198 195
199 196 source: repository object or URL
200 197
201 198 dest: URL of destination repository to create (defaults to base
202 199 name of source repository)
203 200
204 201 pull: always pull from source repository, even in local case
205 202
206 203 stream: stream raw data uncompressed from repository (fast over
207 204 LAN, slow over WAN)
208 205
209 206 rev: revision to clone up to (implies pull=True)
210 207
211 208 update: update working directory after clone completes, if
212 209 destination is local repository (True means update to default rev,
213 210 anything else is treated as a revision)
214 211
215 212 branch: branches to clone
216 213 """
217 214
218 215 if isinstance(source, str):
219 216 origsource = ui.expandpath(source)
220 217 source, branch = parseurl(origsource, branch)
221 218 src_repo = repository(ui, source)
222 219 else:
223 220 src_repo = source
224 221 branch = (None, branch or [])
225 222 origsource = source = src_repo.url()
226 223 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
227 224
228 225 if dest is None:
229 226 dest = defaultdest(source)
230 227 ui.status(_("destination directory: %s\n") % dest)
231 228 else:
232 229 dest = ui.expandpath(dest)
233 230
234 231 dest = localpath(dest)
235 232 source = localpath(source)
236 233
237 234 if os.path.exists(dest):
238 235 if not os.path.isdir(dest):
239 236 raise util.Abort(_("destination '%s' already exists") % dest)
240 237 elif os.listdir(dest):
241 238 raise util.Abort(_("destination '%s' is not empty") % dest)
242 239
243 240 class DirCleanup(object):
244 241 def __init__(self, dir_):
245 242 self.rmtree = shutil.rmtree
246 243 self.dir_ = dir_
247 244 def close(self):
248 245 self.dir_ = None
249 246 def cleanup(self):
250 247 if self.dir_:
251 248 self.rmtree(self.dir_, True)
252 249
253 250 src_lock = dest_lock = dir_cleanup = None
254 251 try:
255 252 if islocal(dest):
256 253 dir_cleanup = DirCleanup(dest)
257 254
258 255 abspath = origsource
259 256 copy = False
260 257 if src_repo.cancopy() and islocal(dest):
261 258 abspath = os.path.abspath(util.drop_scheme('file', origsource))
262 259 copy = not pull and not rev
263 260
264 261 if copy:
265 262 try:
266 263 # we use a lock here because if we race with commit, we
267 264 # can end up with extra data in the cloned revlogs that's
268 265 # not pointed to by changesets, thus causing verify to
269 266 # fail
270 267 src_lock = src_repo.lock(wait=False)
271 268 except error.LockError:
272 269 copy = False
273 270
274 271 if copy:
275 272 src_repo.hook('preoutgoing', throw=True, source='clone')
276 273 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
277 274 if not os.path.exists(dest):
278 275 os.mkdir(dest)
279 276 else:
280 277 # only clean up directories we create ourselves
281 278 dir_cleanup.dir_ = hgdir
282 279 try:
283 280 dest_path = hgdir
284 281 util.makedir(dest_path, notindexed=True)
285 282 except OSError, inst:
286 283 if inst.errno == errno.EEXIST:
287 284 dir_cleanup.close()
288 285 raise util.Abort(_("destination '%s' already exists")
289 286 % dest)
290 287 raise
291 288
292 289 hardlink = None
293 290 num = 0
294 291 for f in src_repo.store.copylist():
295 292 src = os.path.join(src_repo.sharedpath, f)
296 293 dst = os.path.join(dest_path, f)
297 294 dstbase = os.path.dirname(dst)
298 295 if dstbase and not os.path.exists(dstbase):
299 296 os.mkdir(dstbase)
300 297 if os.path.exists(src):
301 298 if dst.endswith('data'):
302 299 # lock to avoid premature writing to the target
303 300 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
304 301 hardlink, n = util.copyfiles(src, dst, hardlink)
305 302 num += n
306 303 if hardlink:
307 304 ui.debug("linked %d files\n" % num)
308 305 else:
309 306 ui.debug("copied %d files\n" % num)
310 307
311 308 # we need to re-init the repo after manually copying the data
312 309 # into it
313 310 dest_repo = repository(ui, dest)
314 311 src_repo.hook('outgoing', source='clone',
315 312 node=node.hex(node.nullid))
316 313 else:
317 314 try:
318 315 dest_repo = repository(ui, dest, create=True)
319 316 except OSError, inst:
320 317 if inst.errno == errno.EEXIST:
321 318 dir_cleanup.close()
322 319 raise util.Abort(_("destination '%s' already exists")
323 320 % dest)
324 321 raise
325 322
326 323 revs = None
327 324 if rev:
328 325 if 'lookup' not in src_repo.capabilities:
329 326 raise util.Abort(_("src repository does not support "
330 327 "revision lookup and so doesn't "
331 328 "support clone by revision"))
332 329 revs = [src_repo.lookup(r) for r in rev]
333 330 checkout = revs[0]
334 331 if dest_repo.local():
335 332 dest_repo.clone(src_repo, heads=revs, stream=stream)
336 333 elif src_repo.local():
337 334 src_repo.push(dest_repo, revs=revs)
338 335 else:
339 336 raise util.Abort(_("clone from remote to remote not supported"))
340 337
341 338 if dir_cleanup:
342 339 dir_cleanup.close()
343 340
344 341 if dest_repo.local():
345 342 fp = dest_repo.opener("hgrc", "w", text=True)
346 343 fp.write("[paths]\n")
347 344 fp.write("default = %s\n" % abspath)
348 345 fp.close()
349 346
350 347 dest_repo.ui.setconfig('paths', 'default', abspath)
351 348
352 349 if update:
353 350 if update is not True:
354 351 checkout = update
355 352 if src_repo.local():
356 353 checkout = src_repo.lookup(update)
357 354 for test in (checkout, 'default', 'tip'):
358 355 if test is None:
359 356 continue
360 357 try:
361 358 uprev = dest_repo.lookup(test)
362 359 break
363 360 except error.RepoLookupError:
364 361 continue
365 362 bn = dest_repo[uprev].branch()
366 363 dest_repo.ui.status(_("updating to branch %s\n") % bn)
367 364 _update(dest_repo, uprev)
368 365
369 366 # clone all bookmarks
370 367 if dest_repo.local() and src_repo.capable("pushkey"):
371 368 rb = src_repo.listkeys('bookmarks')
372 369 for k, n in rb.iteritems():
373 370 try:
374 371 m = dest_repo.lookup(n)
375 372 dest_repo._bookmarks[k] = m
376 373 except:
377 374 pass
378 375 if rb:
379 376 bookmarks.write(dest_repo)
380 377 elif src_repo.local() and dest_repo.capable("pushkey"):
381 378 for k, n in src_repo._bookmarks.iteritems():
382 379 dest_repo.pushkey('bookmarks', k, '', hex(n))
383 380
384 381 return src_repo, dest_repo
385 382 finally:
386 383 release(src_lock, dest_lock)
387 384 if dir_cleanup is not None:
388 385 dir_cleanup.cleanup()
389 386
390 387 def _showstats(repo, stats):
391 388 repo.ui.status(_("%d files updated, %d files merged, "
392 389 "%d files removed, %d files unresolved\n") % stats)
393 390
394 391 def update(repo, node):
395 392 """update the working directory to node, merging linear changes"""
396 393 stats = mergemod.update(repo, node, False, False, None)
397 394 _showstats(repo, stats)
398 395 if stats[3]:
399 396 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
400 397 return stats[3] > 0
401 398
402 399 # naming conflict in clone()
403 400 _update = update
404 401
405 402 def clean(repo, node, show_stats=True):
406 403 """forcibly switch the working directory to node, clobbering changes"""
407 404 stats = mergemod.update(repo, node, False, True, None)
408 405 if show_stats:
409 406 _showstats(repo, stats)
410 407 return stats[3] > 0
411 408
412 409 def merge(repo, node, force=None, remind=True):
413 410 """Branch merge with node, resolving changes. Return true if any
414 411 unresolved conflicts."""
415 412 stats = mergemod.update(repo, node, True, force, False)
416 413 _showstats(repo, stats)
417 414 if stats[3]:
418 415 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
419 416 "or 'hg update -C .' to abandon\n"))
420 417 elif remind:
421 418 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
422 419 return stats[3] > 0
423 420
424 421 def _incoming(displaychlist, subreporecurse, ui, repo, source,
425 422 opts, buffered=False):
426 423 """
427 424 Helper for incoming / gincoming.
428 425 displaychlist gets called with
429 426 (remoterepo, incomingchangesetlist, displayer) parameters,
430 427 and is supposed to contain only code that can't be unified.
431 428 """
432 429 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
433 430 other = repository(remoteui(repo, opts), source)
434 431 ui.status(_('comparing with %s\n') % url.hidepassword(source))
435 432 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
436 433
437 434 if revs:
438 435 revs = [other.lookup(rev) for rev in revs]
439 436 usecommon = other.capable('getbundle')
440 437 other, common, incoming, bundle = bundlerepo.getremotechanges(ui, repo, other,
441 438 revs, opts["bundle"], opts["force"],
442 439 usecommon=usecommon)
443 440 if not incoming:
444 441 ui.status(_("no changes found\n"))
445 442 return subreporecurse()
446 443
447 444 try:
448 445 if usecommon:
449 446 chlist = other.changelog.findmissing(common, revs)
450 447 else:
451 448 chlist = other.changelog.nodesbetween(incoming, revs)[0]
452 449 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
453 450
454 451 # XXX once graphlog extension makes it into core,
455 452 # should be replaced by a if graph/else
456 453 displaychlist(other, chlist, displayer)
457 454
458 455 displayer.close()
459 456 finally:
460 457 if hasattr(other, 'close'):
461 458 other.close()
462 459 if bundle:
463 460 os.unlink(bundle)
464 461 subreporecurse()
465 462 return 0 # exit code is zero since we found incoming changes
466 463
467 464 def incoming(ui, repo, source, opts):
468 465 def subreporecurse():
469 466 ret = 1
470 467 if opts.get('subrepos'):
471 468 ctx = repo[None]
472 469 for subpath in sorted(ctx.substate):
473 470 sub = ctx.sub(subpath)
474 471 ret = min(ret, sub.incoming(ui, source, opts))
475 472 return ret
476 473
477 474 def display(other, chlist, displayer):
478 475 limit = cmdutil.loglimit(opts)
479 476 if opts.get('newest_first'):
480 477 chlist.reverse()
481 478 count = 0
482 479 for n in chlist:
483 480 if limit is not None and count >= limit:
484 481 break
485 482 parents = [p for p in other.changelog.parents(n) if p != nullid]
486 483 if opts.get('no_merges') and len(parents) == 2:
487 484 continue
488 485 count += 1
489 486 displayer.show(other[n])
490 487 return _incoming(display, subreporecurse, ui, repo, source, opts)
491 488
492 489 def _outgoing(ui, repo, dest, opts):
493 490 dest = ui.expandpath(dest or 'default-push', dest or 'default')
494 491 dest, branches = parseurl(dest, opts.get('branch'))
495 492 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
496 493 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
497 494 if revs:
498 495 revs = [repo.lookup(rev) for rev in revs]
499 496
500 497 other = repository(remoteui(repo, opts), dest)
501 498 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
502 499 if not o:
503 500 ui.status(_("no changes found\n"))
504 501 return None
505 502
506 503 return repo.changelog.nodesbetween(o, revs)[0]
507 504
508 505 def outgoing(ui, repo, dest, opts):
509 506 def recurse():
510 507 ret = 1
511 508 if opts.get('subrepos'):
512 509 ctx = repo[None]
513 510 for subpath in sorted(ctx.substate):
514 511 sub = ctx.sub(subpath)
515 512 ret = min(ret, sub.outgoing(ui, dest, opts))
516 513 return ret
517 514
518 515 limit = cmdutil.loglimit(opts)
519 516 o = _outgoing(ui, repo, dest, opts)
520 517 if o is None:
521 518 return recurse()
522 519
523 520 if opts.get('newest_first'):
524 521 o.reverse()
525 522 displayer = cmdutil.show_changeset(ui, repo, opts)
526 523 count = 0
527 524 for n in o:
528 525 if limit is not None and count >= limit:
529 526 break
530 527 parents = [p for p in repo.changelog.parents(n) if p != nullid]
531 528 if opts.get('no_merges') and len(parents) == 2:
532 529 continue
533 530 count += 1
534 531 displayer.show(repo[n])
535 532 displayer.close()
536 533 recurse()
537 534 return 0 # exit code is zero since we found outgoing changes
538 535
539 536 def revert(repo, node, choose):
540 537 """revert changes to revision in node without updating dirstate"""
541 538 return mergemod.update(repo, node, False, True, choose)[3] > 0
542 539
543 540 def verify(repo):
544 541 """verify the consistency of a repository"""
545 542 return verifymod.verify(repo)
546 543
547 544 def remoteui(src, opts):
548 545 'build a remote ui from ui or repo and opts'
549 546 if hasattr(src, 'baseui'): # looks like a repository
550 547 dst = src.baseui.copy() # drop repo-specific config
551 548 src = src.ui # copy target options from repo
552 549 else: # assume it's a global ui object
553 550 dst = src.copy() # keep all global options
554 551
555 552 # copy ssh-specific options
556 553 for o in 'ssh', 'remotecmd':
557 554 v = opts.get(o) or src.config('ui', o)
558 555 if v:
559 556 dst.setconfig("ui", o, v)
560 557
561 558 # copy bundle-specific options
562 559 r = src.config('bundle', 'mainreporoot')
563 560 if r:
564 561 dst.setconfig('bundle', 'mainreporoot', r)
565 562
566 563 # copy selected local settings to the remote ui
567 564 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
568 565 for key, val in src.configitems(sect):
569 566 dst.setconfig(sect, key, val)
570 567 v = src.config('web', 'cacerts')
571 568 if v:
572 569 dst.setconfig('web', 'cacerts', util.expandpath(v))
573 570
574 571 return dst
@@ -1,85 +1,85 b''
1 1 $ mkdir test
2 2 $ cd test
3 3
4 4 $ echo foo>foo
5 5 $ hg init
6 6 $ hg addremove
7 7 adding foo
8 8 $ hg commit -m 1
9 9
10 10 $ hg verify
11 11 checking changesets
12 12 checking manifests
13 13 crosschecking files in changesets and manifests
14 14 checking files
15 15 1 files, 1 changesets, 1 total revisions
16 16
17 17 $ hg serve -p $HGPORT -d --pid-file=hg.pid
18 18 $ cat hg.pid >> $DAEMON_PIDS
19 19 $ cd ..
20 20
21 21 $ hg clone --pull http://foo:bar@localhost:$HGPORT/ copy
22 22 requesting all changes
23 23 adding changesets
24 24 adding manifests
25 25 adding file changes
26 26 added 1 changesets with 1 changes to 1 files
27 27 updating to branch default
28 28 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
29 29
30 30 $ cd copy
31 31 $ hg verify
32 32 checking changesets
33 33 checking manifests
34 34 crosschecking files in changesets and manifests
35 35 checking files
36 36 1 files, 1 changesets, 1 total revisions
37 37
38 38 $ hg co
39 39 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 40 $ cat foo
41 41 foo
42 42
43 43 $ hg manifest --debug
44 44 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
45 45
46 46 $ hg pull
47 47 pulling from http://foo:***@localhost:$HGPORT/
48 48 searching for changes
49 49 no changes found
50 50
51 51 $ hg rollback --dry-run --verbose
52 52 repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/)
53 53
54 54 Issue622: hg init && hg pull -u URL doesn't checkout default branch
55 55
56 56 $ cd ..
57 57 $ hg init empty
58 58 $ cd empty
59 59 $ hg pull -u ../test
60 60 pulling from ../test
61 61 requesting all changes
62 62 adding changesets
63 63 adding manifests
64 64 adding file changes
65 65 added 1 changesets with 1 changes to 1 files
66 66 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
67 67
68 68 Test 'file:' uri handling:
69 69
70 70 $ hg pull -q file://../test-doesnt-exist
71 abort: repository /test-doesnt-exist not found!
71 abort: file:// URLs can only refer to localhost
72 72 [255]
73 73
74 74 $ hg pull -q file:../test
75 75
76 76 It's tricky to make file:// URLs working on every platform with
77 77 regular shell commands.
78 78
79 79 $ URL=`python -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
80 80 $ hg pull -q "$URL"
81 81 abort: file:// URLs can only refer to localhost
82 82 [255]
83 83
84 84 $ URL=`python -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
85 85 $ hg pull -q "$URL"
General Comments 0
You need to be logged in to leave comments. Login now