Show More
@@ -296,8 +296,8 b' def standin(filename):' | |||
|
296 | 296 | '''Return the repo-relative path to the standin for the specified big |
|
297 | 297 | file.''' |
|
298 | 298 | # Notes: |
|
299 |
# 1) |
|
|
300 |
# it repo-relative so |
|
|
299 | # 1) Some callers want an absolute path, but for instance addlargefiles | |
|
300 | # needs it repo-relative so it can be passed to repoadd(). So leave | |
|
301 | 301 | # it up to the caller to use repo.wjoin() to get an absolute path. |
|
302 | 302 | # 2) Join with '/' because that's what dirstate always uses, even on |
|
303 | 303 | # Windows. Change existing separator to '/' first in case we are |
@@ -4,7 +4,7 b'' | |||
|
4 | 4 | # This software may be used and distributed according to the terms of the |
|
5 | 5 | # GNU General Public License version 2 or any later version. |
|
6 | 6 | |
|
7 |
'''remote largefile store; the base class for |
|
|
7 | '''remote largefile store; the base class for wirestore''' | |
|
8 | 8 | |
|
9 | 9 | import urllib2 |
|
10 | 10 |
@@ -1522,7 +1522,7 b' class queue(object):' | |||
|
1522 | 1522 | # |
|
1523 | 1523 | # this should really read: |
|
1524 | 1524 | # mm, dd, aa = repo.status(top, patchparent)[:3] |
|
1525 | # but we do it backwards to take advantage of manifest/chlog | |
|
1525 | # but we do it backwards to take advantage of manifest/changelog | |
|
1526 | 1526 | # caching against the next repo.status call |
|
1527 | 1527 | mm, aa, dd = repo.status(patchparent, top)[:3] |
|
1528 | 1528 | changes = repo.changelog.read(top) |
@@ -33,7 +33,7 b' def scanpatch(fp):' | |||
|
33 | 33 | - ('file', [header_lines + fromfile + tofile]) |
|
34 | 34 | - ('context', [context_lines]) |
|
35 | 35 | - ('hunk', [hunk_lines]) |
|
36 |
- ('range', (-start,len, +start,len, |
|
|
36 | - ('range', (-start,len, +start,len, proc)) | |
|
37 | 37 | """ |
|
38 | 38 | lr = patch.linereader(fp) |
|
39 | 39 |
@@ -61,7 +61,7 b' class ShortRepository(object):' | |||
|
61 | 61 | return '<ShortRepository: %s>' % self.scheme |
|
62 | 62 | |
|
63 | 63 | def instance(self, ui, url, create): |
|
64 |
# Should this use |
|
|
64 | # Should this use the util.url class, or is manual parsing better? | |
|
65 | 65 | url = url.split('://', 1)[1] |
|
66 | 66 | parts = url.split('/', self.parts) |
|
67 | 67 | if len(parts) > self.parts: |
@@ -114,7 +114,7 b" globals()['_GLOBAL_DONE'] = 0" | |||
|
114 | 114 | _FLAGS_QR_QUERY = 0x0000 # query |
|
115 | 115 | _FLAGS_QR_RESPONSE = 0x8000 # response |
|
116 | 116 | |
|
117 | _FLAGS_AA = 0x0400 # Authorative answer | |
|
117 | _FLAGS_AA = 0x0400 # Authoritative answer | |
|
118 | 118 | _FLAGS_TC = 0x0200 # Truncated |
|
119 | 119 | _FLAGS_RD = 0x0100 # Recursion desired |
|
120 | 120 | _FLAGS_RA = 0x8000 # Recursion available |
@@ -650,7 +650,7 b' class DNSOutgoing(object):' | |||
|
650 | 650 | if now == 0 or not record.isExpired(now): |
|
651 | 651 | self.answers.append((record, now)) |
|
652 | 652 | |
|
653 | def addAuthorativeAnswer(self, record): | |
|
653 | def addAuthoritativeAnswer(self, record): | |
|
654 | 654 | """Adds an authoritative answer""" |
|
655 | 655 | self.authorities.append(record) |
|
656 | 656 | |
@@ -1433,7 +1433,7 b' class Zeroconf(object):' | |||
|
1433 | 1433 | out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA) |
|
1434 | 1434 | self.debug = out |
|
1435 | 1435 | out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN)) |
|
1436 | out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) | |
|
1436 | out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) | |
|
1437 | 1437 | self.send(out) |
|
1438 | 1438 | i += 1 |
|
1439 | 1439 | nextTime += _CHECK_TIME |
@@ -58,7 +58,7 b' def readcurrent(repo):' | |||
|
58 | 58 | raise |
|
59 | 59 | return None |
|
60 | 60 | try: |
|
61 |
# No readline() in posixfile |
|
|
61 | # No readline() in osutil.posixfile, reading everything is cheap | |
|
62 | 62 | mark = encoding.tolocal((file.readlines() or [''])[0]) |
|
63 | 63 | if mark == '' or mark not in repo._bookmarks: |
|
64 | 64 | mark = None |
@@ -1352,20 +1352,20 b' def commit(ui, repo, *pats, **opts):' | |||
|
1352 | 1352 | # printed anyway. |
|
1353 | 1353 | # |
|
1354 | 1354 | # Par Msg Comment |
|
1355 |
# N |
|
|
1355 | # N N y additional topo root | |
|
1356 | 1356 | # |
|
1357 |
# B |
|
|
1358 |
# C |
|
|
1359 |
# H |
|
|
1357 | # B N y additional branch root | |
|
1358 | # C N y additional topo head | |
|
1359 | # H N n usual case | |
|
1360 | 1360 | # |
|
1361 |
# B |
|
|
1362 |
# C |
|
|
1363 |
# H |
|
|
1361 | # B B y weird additional branch root | |
|
1362 | # C B y branch merge | |
|
1363 | # H B n merge with named branch | |
|
1364 | 1364 | # |
|
1365 |
# C |
|
|
1366 |
# C |
|
|
1365 | # C C y additional head from merge | |
|
1366 | # C H n merge with a head | |
|
1367 | 1367 | # |
|
1368 |
# H |
|
|
1368 | # H H n head merge: head count decreases | |
|
1369 | 1369 | |
|
1370 | 1370 | if not opts.get('close_branch'): |
|
1371 | 1371 | for r in parents: |
@@ -137,7 +137,7 b' class server(object):' | |||
|
137 | 137 | if logpath: |
|
138 | 138 | global logfile |
|
139 | 139 | if logpath == '-': |
|
140 | # write log on a special 'd'ebug channel | |
|
140 | # write log on a special 'd' (debug) channel | |
|
141 | 141 | logfile = channeledoutput(sys.stdout, sys.stdout, 'd') |
|
142 | 142 | else: |
|
143 | 143 | logfile = open(logpath, 'a') |
@@ -67,7 +67,7 b' class config(object):' | |||
|
67 | 67 | return self._data.get(section, {}).get(item, default) |
|
68 | 68 | |
|
69 | 69 | def backup(self, section, item): |
|
70 |
"""return a tuple allowing restore to reinstall |
|
|
70 | """return a tuple allowing restore to reinstall previous values | |
|
71 | 71 | |
|
72 | 72 | The main reason we need it is because it handle the "no data" case. |
|
73 | 73 | """ |
@@ -1167,7 +1167,7 b' class workingfilectx(filectx):' | |||
|
1167 | 1167 | |
|
1168 | 1168 | returns True if different than fctx. |
|
1169 | 1169 | """ |
|
1170 | # fctx should be a filectx (not a wfctx) | |
|
1170 | # fctx should be a filectx (not a workingfilectx) | |
|
1171 | 1171 | # invert comparison to reuse the same code path |
|
1172 | 1172 | return fctx.cmp(self) |
|
1173 | 1173 |
@@ -49,7 +49,7 b' class match(object):' | |||
|
49 | 49 | a pattern is one of: |
|
50 | 50 | 'glob:<glob>' - a glob relative to cwd |
|
51 | 51 | 're:<regexp>' - a regular expression |
|
52 |
'path:<path>' - a path relative to |
|
|
52 | 'path:<path>' - a path relative to repository root | |
|
53 | 53 | 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs) |
|
54 | 54 | 'relpath:<path>' - a path relative to cwd |
|
55 | 55 | 'relre:<regexp>' - a regexp that needn't match the start of a name |
@@ -318,7 +318,7 b' def successormarkers(ctx):' | |||
|
318 | 318 | def anysuccessors(obsstore, node): |
|
319 | 319 | """Yield every successor of <node> |
|
320 | 320 | |
|
321 |
This |
|
|
321 | This is a linear yield unsuitable to detect split changesets.""" | |
|
322 | 322 | remaining = set([node]) |
|
323 | 323 | seen = set(remaining) |
|
324 | 324 | while remaining: |
@@ -363,7 +363,7 b' def newheads(repo, heads, roots):' | |||
|
363 | 363 | """compute new head of a subset minus another |
|
364 | 364 | |
|
365 | 365 | * `heads`: define the first subset |
|
366 |
* `roots`: define the second we sub |
|
|
366 | * `roots`: define the second we subtract from the first""" | |
|
367 | 367 | revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))', |
|
368 | 368 | heads, roots, roots, heads) |
|
369 | 369 | return [c.node() for c in revset] |
@@ -70,7 +70,7 b' def parse_index2(data, inline):' | |||
|
70 | 70 | |
|
71 | 71 | def parse_dirstate(dmap, copymap, st): |
|
72 | 72 | parents = [st[:20], st[20: 40]] |
|
73 | # deref fields so they will be local in loop | |
|
73 | # dereference fields so they will be local in loop | |
|
74 | 74 | format = ">cllll" |
|
75 | 75 | e_size = struct.calcsize(format) |
|
76 | 76 | pos1 = 40 |
@@ -1477,7 +1477,11 b' def parsebool(s):' | |||
|
1477 | 1477 | for a in _hexdig for b in _hexdig) |
|
1478 | 1478 | |
|
1479 | 1479 | def _urlunquote(s): |
|
1480 | """unquote('abc%20def') -> 'abc def'.""" | |
|
1480 | """Decode HTTP/HTML % encoding. | |
|
1481 | ||
|
1482 | >>> _urlunquote('abc%20def') | |
|
1483 | 'abc def' | |
|
1484 | """ | |
|
1481 | 1485 | res = s.split('%') |
|
1482 | 1486 | # fastpath |
|
1483 | 1487 | if len(res) == 1: |
General Comments 0
You need to be logged in to leave comments.
Login now