##// END OF EJS Templates
cleanup: use () to wrap long lines instead of \...
Augie Fackler -
r41925:aaad36b8 default
parent child Browse files
Show More
@@ -34,9 +34,9 b' def ipdb(ui, repo, msg, **opts):'
34 34
35 35 @command('debugshell|dbsh', [])
36 36 def debugshell(ui, repo, **opts):
37 bannermsg = "loaded repo : %s\n" \
38 "using source: %s" % (repo.root,
39 mercurial.__path__[0])
37 bannermsg = ("loaded repo : %s\n"
38 "using source: %s" % (repo.root,
39 mercurial.__path__[0]))
40 40
41 41 pdbmap = {
42 42 'pdb' : 'code',
@@ -76,7 +76,7 b' def build_docker_image(dockerfile: pathl'
76 76 p.communicate(input=dockerfile)
77 77 if p.returncode:
78 78 raise subprocess.CalledProcessException(
79 p.returncode, 'failed to build docker image: %s %s' \
79 p.returncode, 'failed to build docker image: %s %s'
80 80 % (p.stdout, p.stderr))
81 81
82 82 def command_build(args):
@@ -293,15 +293,15 b' def _usermatch(ui, user, usersorgroups):'
293 293 # if ug is a user name: !username
294 294 # if ug is a group name: !@groupname
295 295 ug = ug[1:]
296 if not ug.startswith('@') and user != ug \
297 or ug.startswith('@') and user not in _getusers(ui, ug[1:]):
296 if (not ug.startswith('@') and user != ug
297 or ug.startswith('@') and user not in _getusers(ui, ug[1:])):
298 298 return True
299 299
300 300 # Test for user or group. Format:
301 301 # if ug is a user name: username
302 302 # if ug is a group name: @groupname
303 elif user == ug \
304 or ug.startswith('@') and user in _getusers(ui, ug[1:]):
303 elif (user == ug
304 or ug.startswith('@') and user in _getusers(ui, ug[1:])):
305 305 return True
306 306
307 307 return False
@@ -600,8 +600,8 b' class bzmysql_2_18(bzmysql):'
600 600
601 601 def __init__(self, ui):
602 602 bzmysql.__init__(self, ui)
603 self.default_notify = \
604 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
603 self.default_notify = (
604 "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s")
605 605
606 606 class bzmysql_3_0(bzmysql_2_18):
607 607 '''support for bugzilla 3.0 series.'''
@@ -776,8 +776,8 b' def createchangeset(ui, log, fuzz=60, me'
776 776
777 777 # Ensure no changeset has a synthetic changeset as a parent.
778 778 while p.synthetic:
779 assert len(p.parents) <= 1, \
780 _('synthetic changeset cannot have multiple parents')
779 assert len(p.parents) <= 1, (
780 _('synthetic changeset cannot have multiple parents'))
781 781 if p.parents:
782 782 p = p.parents[0]
783 783 else:
@@ -954,12 +954,12 b' def debugcvsps(ui, *args, **opts):'
954 954
955 955 # have we seen the start tag?
956 956 if revisions and off:
957 if revisions[0] == (b"%d" % cs.id) or \
958 revisions[0] in cs.tags:
957 if (revisions[0] == (b"%d" % cs.id) or
958 revisions[0] in cs.tags):
959 959 off = False
960 960
961 961 # see if we reached the end tag
962 962 if len(revisions) > 1 and not off:
963 if revisions[1] == (b"%d" % cs.id) or \
964 revisions[1] in cs.tags:
963 if (revisions[1] == (b"%d" % cs.id) or
964 revisions[1] in cs.tags):
965 965 break
@@ -387,7 +387,7 b' class convert_git(common.converter_sourc'
387 387 def numcommits(self):
388 388 output, ret = self.gitrunlines('rev-list', '--all')
389 389 if ret:
390 raise error.Abort(_('cannot retrieve number of commits in %s') \
390 raise error.Abort(_('cannot retrieve number of commits in %s')
391 391 % self.path)
392 392 return len(output)
393 393
@@ -198,8 +198,8 b' class p4_source(common.converter_source)'
198 198 for filename in copiedfiles:
199 199 oldname = depotname[filename]
200 200
201 flcmd = 'p4 -G filelog %s' \
202 % procutil.shellquote(oldname)
201 flcmd = ('p4 -G filelog %s'
202 % procutil.shellquote(oldname))
203 203 flstdout = procutil.popen(flcmd, mode='rb')
204 204
205 205 copiedfilename = None
@@ -272,8 +272,8 b' class p4_source(common.converter_source)'
272 272 return self.heads
273 273
274 274 def getfile(self, name, rev):
275 cmd = 'p4 -G print %s' \
276 % procutil.shellquote("%s#%s" % (self.depotname[name], rev))
275 cmd = ('p4 -G print %s'
276 % procutil.shellquote("%s#%s" % (self.depotname[name], rev)))
277 277
278 278 lasterror = None
279 279 while True:
@@ -790,7 +790,7 b' class svn_source(converter_source):'
790 790 if childpath:
791 791 removed.add(self.recode(childpath))
792 792 else:
793 self.ui.debug('unknown path in revision %d: %s\n' % \
793 self.ui.debug('unknown path in revision %d: %s\n' %
794 794 (revnum, path))
795 795 elif kind == svn.core.svn_node_dir:
796 796 if ent.action == 'M':
@@ -1782,7 +1782,7 b' def _continuehistedit(ui, repo, state):'
1782 1782 state.write(tr=tr)
1783 1783 actobj = state.actions[0]
1784 1784 progress.increment(item=actobj.torule())
1785 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1785 ui.debug('histedit: processing %s %s\n' % (actobj.verb,
1786 1786 actobj.torule()))
1787 1787 parentctx, replacement_ = actobj.run()
1788 1788 state.parentctxnode = parentctx.node()
@@ -1881,7 +1881,7 b' def _edithisteditplan(ui, repo, state, r'
1881 1881 else:
1882 1882 rules = _readfile(ui, rules)
1883 1883 actions = parserules(rules, state)
1884 ctxs = [repo[act.node] \
1884 ctxs = [repo[act.node]
1885 1885 for act in state.actions if act.node]
1886 1886 warnverifyactions(ui, repo, actions, state, ctxs)
1887 1887 state.actions = actions
@@ -282,8 +282,8 b' def commonsetup(ui):'
282 282 scratchbranchpat = ui.config('infinitepush', 'branchpattern')
283 283 if scratchbranchpat:
284 284 global _scratchbranchmatcher
285 kind, pat, _scratchbranchmatcher = \
286 stringutil.stringmatcher(scratchbranchpat)
285 kind, pat, _scratchbranchmatcher = (
286 stringutil.stringmatcher(scratchbranchpat))
287 287
288 288 def serverextsetup(ui):
289 289 origpushkeyhandler = bundle2.parthandlermapping['pushkey']
@@ -294,8 +294,8 b' def serverextsetup(ui):'
294 294 bundle2.parthandlermapping['pushkey'] = newpushkeyhandler
295 295
296 296 orighandlephasehandler = bundle2.parthandlermapping['phase-heads']
297 newphaseheadshandler = lambda *args, **kwargs: \
298 bundle2handlephases(orighandlephasehandler, *args, **kwargs)
297 newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases(
298 orighandlephasehandler, *args, **kwargs)
299 299 newphaseheadshandler.params = orighandlephasehandler.params
300 300 bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler
301 301
@@ -754,10 +754,10 b' def _deleteinfinitepushbookmarks(ui, rep'
754 754 nametype_idx = 1
755 755 remote_idx = 2
756 756 name_idx = 3
757 remotenames = [remotename for remotename in \
758 remotenamesext.readremotenames(repo) \
757 remotenames = [remotename for remotename in
758 remotenamesext.readremotenames(repo)
759 759 if remotename[remote_idx] == path]
760 remote_bm_names = [remotename[name_idx] for remotename in \
760 remote_bm_names = [remotename[name_idx] for remotename in
761 761 remotenames if remotename[nametype_idx] == "bookmarks"]
762 762
763 763 for name in names:
@@ -76,8 +76,8 b' def _usercachedir(ui, name=longname):'
76 76 if path:
77 77 return path
78 78 if pycompat.iswindows:
79 appdata = encoding.environ.get('LOCALAPPDATA',\
80 encoding.environ.get('APPDATA'))
79 appdata = encoding.environ.get('LOCALAPPDATA',
80 encoding.environ.get('APPDATA'))
81 81 if appdata:
82 82 return os.path.join(appdata, name)
83 83 elif pycompat.isdarwin:
@@ -174,8 +174,8 b' def reposetup(ui, repo):'
174 174 if standin not in ctx1:
175 175 # from second parent
176 176 modified.append(lfile)
177 elif lfutil.readasstandin(ctx1[standin]) \
178 != lfutil.hashfile(self.wjoin(lfile)):
177 elif (lfutil.readasstandin(ctx1[standin])
178 != lfutil.hashfile(self.wjoin(lfile))):
179 179 modified.append(lfile)
180 180 else:
181 181 if listclean:
@@ -134,12 +134,12 b' def uisetup(ui):'
134 134 except KeyError:
135 135 return
136 136
137 cmdtable["qrecord"] = \
138 (qrecord,
139 # same options as qnew, but copy them so we don't get
140 # -i/--interactive for qrecord and add white space diff options
141 mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts,
142 _('hg qrecord [OPTION]... PATCH [FILE]...'))
137 cmdtable["qrecord"] = (
138 qrecord,
139 # same options as qnew, but copy them so we don't get
140 # -i/--interactive for qrecord and add white space diff options
141 mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts,
142 _('hg qrecord [OPTION]... PATCH [FILE]...'))
143 143
144 144 _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
145 145 _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
@@ -107,8 +107,9 b' class parsedreleasenotes(object):'
107 107 "releasenotes is disabled\n"))
108 108
109 109 for section in other:
110 existingnotes = converttitled(self.titledforsection(section)) + \
111 convertnontitled(self.nontitledforsection(section))
110 existingnotes = (
111 converttitled(self.titledforsection(section)) +
112 convertnontitled(self.nontitledforsection(section)))
112 113 for title, paragraphs in other.titledforsection(section):
113 114 if self.hastitledinsection(section, title):
114 115 # TODO prompt for resolution if different and running in
@@ -138,8 +138,8 b' class cacheconnection(object):'
138 138 def connect(self, cachecommand):
139 139 if self.pipeo:
140 140 raise error.Abort(_("cache connection already open"))
141 self.pipei, self.pipeo, self.pipee, self.subprocess = \
142 procutil.popen4(cachecommand)
141 self.pipei, self.pipeo, self.pipee, self.subprocess = (
142 procutil.popen4(cachecommand))
143 143 self.connected = True
144 144
145 145 def close(self):
@@ -248,8 +248,8 b' class shelvedstate(object):'
248 248 if version < cls._version:
249 249 d = cls._readold(repo)
250 250 elif version == cls._version:
251 d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
252 .read(firstlinenonkeyval=True)
251 d = scmutil.simplekeyvaluefile(
252 repo.vfs, cls._filename).read(firstlinenonkeyval=True)
253 253 else:
254 254 raise error.Abort(_('this version of shelve is incompatible '
255 255 'with the version used in this repo'))
@@ -287,8 +287,9 b' class shelvedstate(object):'
287 287 "keep": cls._keep if keep else cls._nokeep,
288 288 "activebook": activebook or cls._noactivebook
289 289 }
290 scmutil.simplekeyvaluefile(repo.vfs, cls._filename)\
291 .write(info, firstline=("%d" % cls._version))
290 scmutil.simplekeyvaluefile(
291 repo.vfs, cls._filename).write(info,
292 firstline=("%d" % cls._version))
292 293
293 294 @classmethod
294 295 def clear(cls, repo):
@@ -77,7 +77,7 b' if __name__ == "__main__":'
77 77 continue
78 78 else:
79 79 # lines following directly, unexpected
80 print('Warning: text follows line with directive' \
80 print('Warning: text follows line with directive'
81 81 ' %s' % directive)
82 82 comment = 'do not translate: .. %s::' % directive
83 83 if not newentry.comment:
@@ -229,7 +229,7 b' class branchcache(dict):'
229 229 - True when cache is up to date or a subset of current repo."""
230 230 try:
231 231 return ((self.tipnode == repo.changelog.node(self.tiprev))
232 and (self.filteredhash == \
232 and (self.filteredhash ==
233 233 scmutil.filteredhash(repo, self.tiprev)))
234 234 except IndexError:
235 235 return False
@@ -1397,8 +1397,8 b' class seekableunbundlepart(unbundlepart)'
1397 1397 assert chunknum == 0, 'Must start with chunk 0'
1398 1398 self._chunkindex.append((0, self._tellfp()))
1399 1399 else:
1400 assert chunknum < len(self._chunkindex), \
1401 'Unknown chunk %d' % chunknum
1400 assert chunknum < len(self._chunkindex), (
1401 'Unknown chunk %d' % chunknum)
1402 1402 self._seekfp(self._chunkindex[chunknum][1])
1403 1403
1404 1404 pos = self._chunkindex[chunknum][0]
@@ -179,8 +179,8 b' def ishunk(x):'
179 179 def newandmodified(chunks, originalchunks):
180 180 newlyaddedandmodifiedfiles = set()
181 181 for chunk in chunks:
182 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
183 originalchunks:
182 if (ishunk(chunk) and chunk.header.isnewfile() and chunk not in
183 originalchunks):
184 184 newlyaddedandmodifiedfiles.add(chunk.header.filename())
185 185 return newlyaddedandmodifiedfiles
186 186
@@ -322,8 +322,8 b' def dorecord(ui, repo, commitfunc, cmdsu'
322 322 if backupall:
323 323 tobackup = changed
324 324 else:
325 tobackup = [f for f in newfiles if f in modified or f in \
326 newlyaddedandmodifiedfiles]
325 tobackup = [f for f in newfiles if f in modified or f in
326 newlyaddedandmodifiedfiles]
327 327 backups = {}
328 328 if tobackup:
329 329 backupdir = repo.vfs.join('record-backups')
@@ -1676,8 +1676,8 b' def _docommit(ui, repo, *pats, **opts):'
1676 1676 if not bheads:
1677 1677 raise error.Abort(_('can only close branch heads'))
1678 1678 elif opts.get('amend'):
1679 if repo['.'].p1().branch() != branch and \
1680 repo['.'].p2().branch() != branch:
1679 if (repo['.'].p1().branch() != branch and
1680 repo['.'].p2().branch() != branch):
1681 1681 raise error.Abort(_('can only close branch heads'))
1682 1682
1683 1683 if opts.get('amend'):
@@ -4822,8 +4822,8 b' def resolve(ui, repo, *pats, **opts):'
4822 4822 opts = pycompat.byteskwargs(opts)
4823 4823 confirm = ui.configbool('commands', 'resolve.confirm')
4824 4824 flaglist = 'all mark unmark list no_status re_merge'.split()
4825 all, mark, unmark, show, nostatus, remerge = \
4826 [opts.get(o) for o in flaglist]
4825 all, mark, unmark, show, nostatus, remerge = [
4826 opts.get(o) for o in flaglist]
4827 4827
4828 4828 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
4829 4829 if actioncount > 1:
@@ -4952,8 +4952,8 b' def resolve(ui, repo, *pats, **opts):'
4952 4952 if mark:
4953 4953 if markcheck:
4954 4954 fdata = repo.wvfs.tryread(f)
4955 if filemerge.hasconflictmarkers(fdata) and \
4956 ms[f] != mergemod.MERGE_RECORD_RESOLVED:
4955 if (filemerge.hasconflictmarkers(fdata) and
4956 ms[f] != mergemod.MERGE_RECORD_RESOLVED):
4957 4957 hasconflictmarkers.append(f)
4958 4958 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4959 4959 elif unmark:
@@ -983,9 +983,9 b' class filectx(basefilectx):'
983 983
984 984 assert (changeid is not None
985 985 or fileid is not None
986 or changectx is not None), \
987 ("bad args: changeid=%r, fileid=%r, changectx=%r"
988 % (changeid, fileid, changectx))
986 or changectx is not None), (
987 "bad args: changeid=%r, fileid=%r, changectx=%r"
988 % (changeid, fileid, changectx))
989 989
990 990 if filelog is not None:
991 991 self._filelog = filelog
@@ -1442,8 +1442,8 b' def debuglocks(ui, repo, **opts):'
1442 1442 if host == socket.gethostname():
1443 1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 1444 else:
1445 locker = 'user %s, process %s, host %s' \
1446 % (user or b'None', pid, host)
1445 locker = ('user %s, process %s, host %s'
1446 % (user or b'None', pid, host))
1447 1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 1448 return 1
1449 1449 except OSError as e:
@@ -2547,8 +2547,8 b' def isstreamclonespec(bundlespec):'
2547 2547 return True
2548 2548
2549 2549 # Stream clone v2
2550 if (bundlespec.wirecompression == 'UN' and \
2551 bundlespec.wireversion == '02' and \
2550 if (bundlespec.wirecompression == 'UN' and
2551 bundlespec.wireversion == '02' and
2552 2552 bundlespec.contentopts.get('streamv2')):
2553 2553 return True
2554 2554
@@ -109,10 +109,10 b' def readauthforuri(ui, uri, user):'
109 109 schemes, prefix = [p[0]], p[1]
110 110 else:
111 111 schemes = (auth.get('schemes') or 'https').split()
112 if (prefix == '*' or hostpath.startswith(prefix)) and \
113 (len(prefix) > bestlen or (len(prefix) == bestlen and \
114 not bestuser and 'username' in auth)) \
115 and scheme in schemes:
112 if ((prefix == '*' or hostpath.startswith(prefix)) and
113 (len(prefix) > bestlen or (len(prefix) == bestlen and
114 not bestuser and 'username' in auth))
115 and scheme in schemes):
116 116 bestlen = len(prefix)
117 117 bestauth = group, auth
118 118 bestuser = auth.get('username')
@@ -391,9 +391,9 b' class mergestate(object):'
391 391 """
392 392 # Check local variables before looking at filesystem for performance
393 393 # reasons.
394 return bool(self._local) or bool(self._state) or \
395 self._repo.vfs.exists(self.statepathv1) or \
396 self._repo.vfs.exists(self.statepathv2)
394 return (bool(self._local) or bool(self._state) or
395 self._repo.vfs.exists(self.statepathv1) or
396 self._repo.vfs.exists(self.statepathv2))
397 397
398 398 def commit(self):
399 399 """Write current state on disk (if necessary)"""
@@ -114,9 +114,9 b' def findliteralblocks(blocks):'
114 114 # Partially minimized form: remove space and both
115 115 # colons.
116 116 blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3]
117 elif len(blocks[i]['lines']) == 1 and \
118 blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and \
119 blocks[i]['lines'][0].find(' ', 3) == -1:
117 elif (len(blocks[i]['lines']) == 1 and
118 blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and
119 blocks[i]['lines'][0].find(' ', 3) == -1):
120 120 # directive on its own line, not a literal block
121 121 i += 1
122 122 continue
@@ -790,8 +790,8 b' def _getsections(blocks):'
790 790 if section['type'] != 'margin':
791 791 sindent = section['indent']
792 792 if len(section['lines']) > 1:
793 sindent += len(section['lines'][1]) - \
794 len(section['lines'][1].lstrip(' '))
793 sindent += (len(section['lines'][1]) -
794 len(section['lines'][1].lstrip(' ')))
795 795 if bindent >= sindent:
796 796 break
797 797 pointer += 1
@@ -925,8 +925,8 b' class header(object):'
925 925 # if they have some content as we want to be able to change it
926 926 nocontent = len(self.header) == 2
927 927 emptynewfile = self.isnewfile() and nocontent
928 return emptynewfile or \
929 any(self.special_re.match(h) for h in self.header)
928 return (emptynewfile
929 or any(self.special_re.match(h) for h in self.header))
930 930
931 931 class recordhunk(object):
932 932 """patch hunk
@@ -2283,8 +2283,8 b' def diff(repo, node1=None, node2=None, m'
2283 2283 # If the file has been removed, fctx2 is None; but this should
2284 2284 # not occur here since we catch removed files early in
2285 2285 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2286 assert fctx2 is not None, \
2287 'fctx2 unexpectly None in diff hunks filtering'
2286 assert fctx2 is not None, (
2287 'fctx2 unexpectly None in diff hunks filtering')
2288 2288 hunks = hunksfilterfn(fctx2, hunks)
2289 2289 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2290 2290 if hdr and (text or len(hdr) > 1):
@@ -289,15 +289,15 b' class Merge3Text(object):'
289 289
290 290 # find matches at the front
291 291 ii = 0
292 while ii < alen and ii < blen and \
293 self.a[a1 + ii] == self.b[b1 + ii]:
292 while (ii < alen and ii < blen and
293 self.a[a1 + ii] == self.b[b1 + ii]):
294 294 ii += 1
295 295 startmatches = ii
296 296
297 297 # find matches at the end
298 298 ii = 0
299 while ii < alen and ii < blen and \
300 self.a[a2 - ii - 1] == self.b[b2 - ii - 1]:
299 while (ii < alen and ii < blen and
300 self.a[a2 - ii - 1] == self.b[b2 - ii - 1]):
301 301 ii += 1
302 302 endmatches = ii
303 303
@@ -350,8 +350,8 b' class Merge3Text(object):'
350 350 aend = asub + intlen
351 351 bend = bsub + intlen
352 352
353 assert self.base[intbase:intend] == self.a[asub:aend], \
354 (self.base[intbase:intend], self.a[asub:aend])
353 assert self.base[intbase:intend] == self.a[asub:aend], (
354 (self.base[intbase:intend], self.a[asub:aend]))
355 355
356 356 assert self.base[intbase:intend] == self.b[bsub:bend]
357 357
@@ -643,8 +643,8 b' def updateconfig(repo, pats, opts, inclu'
643 643 for kindpat in pats:
644 644 kind, pat = matchmod._patsplit(kindpat, None)
645 645 if kind in matchmod.cwdrelativepatternkinds or kind is None:
646 ap = (kind + ':' if kind else '') +\
647 pathutil.canonpath(root, cwd, pat)
646 ap = ((kind + ':' if kind else '') +
647 pathutil.canonpath(root, cwd, pat))
648 648 abspats.append(ap)
649 649 else:
650 650 abspats.append(kindpat)
@@ -369,8 +369,8 b' class abstractsubrepo(object):'
369 369 return 1
370 370
371 371 def revert(self, substate, *pats, **opts):
372 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
373 % (substate[0], substate[2]))
372 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n')
373 % (substate[0], substate[2]))
374 374 return []
375 375
376 376 def shortid(self, revid):
@@ -697,7 +697,7 b' class hgsubrepo(abstractsubrepo):'
697 697 ctx = urepo[revision]
698 698 if ctx.hidden():
699 699 urepo.ui.warn(
700 _('revision %s in subrepository "%s" is hidden\n') \
700 _('revision %s in subrepository "%s" is hidden\n')
701 701 % (revision[0:12], self._path))
702 702 repo = urepo
703 703 hg.updaterepo(repo, revision, overwrite)
@@ -1787,8 +1787,8 b' class gitsubrepo(abstractsubrepo):'
1787 1787 cmd.append('--ignore-all-space')
1788 1788 if diffopts.ignorewsamount:
1789 1789 cmd.append('--ignore-space-change')
1790 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1791 and diffopts.ignoreblanklines:
1790 if (self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4)
1791 and diffopts.ignoreblanklines):
1792 1792 cmd.append('--ignore-blank-lines')
1793 1793
1794 1794 cmd.append(node1)
@@ -188,8 +188,8 b' def findglobaltags(ui, repo):'
188 188 return alltags
189 189
190 190 for head in reversed(heads): # oldest to newest
191 assert head in repo.changelog.nodemap, \
192 "tag cache returned bogus head %s" % short(head)
191 assert head in repo.changelog.nodemap, (
192 "tag cache returned bogus head %s" % short(head))
193 193 fnodes = _filterfnodes(tagfnode, reversed(heads))
194 194 alltags = _tagsfromfnodes(ui, repo, fnodes)
195 195
@@ -344,8 +344,8 b' class ui(object):'
344 344 try:
345 345 yield
346 346 finally:
347 self._blockedtimes[key + '_blocked'] += \
348 (util.timer() - starttime) * 1000
347 self._blockedtimes[key + '_blocked'] += (
348 (util.timer() - starttime) * 1000)
349 349
350 350 @contextlib.contextmanager
351 351 def uninterruptible(self):
@@ -1027,8 +1027,8 b' class ui(object):'
1027 1027 except IOError as err:
1028 1028 raise error.StdioError(err)
1029 1029 finally:
1030 self._blockedtimes['stdio_blocked'] += \
1031 (util.timer() - starttime) * 1000
1030 self._blockedtimes['stdio_blocked'] += (
1031 (util.timer() - starttime) * 1000)
1032 1032
1033 1033 def write_err(self, *args, **opts):
1034 1034 self._write(self._ferr, *args, **opts)
@@ -1078,8 +1078,8 b' class ui(object):'
1078 1078 return
1079 1079 raise error.StdioError(err)
1080 1080 finally:
1081 self._blockedtimes['stdio_blocked'] += \
1082 (util.timer() - starttime) * 1000
1081 self._blockedtimes['stdio_blocked'] += (
1082 (util.timer() - starttime) * 1000)
1083 1083
1084 1084 def _writemsg(self, dest, *args, **opts):
1085 1085 _writemsgwith(self._write, dest, *args, **opts)
@@ -1103,8 +1103,8 b' class ui(object):'
1103 1103 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1104 1104 raise error.StdioError(err)
1105 1105 finally:
1106 self._blockedtimes['stdio_blocked'] += \
1107 (util.timer() - starttime) * 1000
1106 self._blockedtimes['stdio_blocked'] += (
1107 (util.timer() - starttime) * 1000)
1108 1108
1109 1109 def _isatty(self, fh):
1110 1110 if self.configbool('ui', 'nontty'):
@@ -437,10 +437,9 b' class hgdist(Distribution):'
437 437 pure = False
438 438 cffi = ispypy
439 439
440 global_options = Distribution.global_options + \
441 [('pure', None, "use pure (slow) Python "
442 "code instead of C extensions"),
443 ]
440 global_options = Distribution.global_options + [
441 ('pure', None, "use pure (slow) Python code instead of C extensions"),
442 ]
444 443
445 444 def has_ext_modules(self):
446 445 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
@@ -343,8 +343,8 b' def has_svn_range(v):'
343 343
344 344 @check("svn", "subversion client and admin tools")
345 345 def has_svn():
346 return matchoutput('svn --version 2>&1', br'^svn, version') and \
347 matchoutput('svnadmin --version 2>&1', br'^svnadmin, version')
346 return (matchoutput('svn --version 2>&1', br'^svn, version') and
347 matchoutput('svnadmin --version 2>&1', br'^svnadmin, version'))
348 348
349 349 @check("svn-bindings", "subversion python bindings")
350 350 def has_svn_bindings():
@@ -929,8 +929,8 b' class Test(unittest.TestCase):'
929 929 self.fail('no result code from test')
930 930 elif out != self._refout:
931 931 # Diff generation may rely on written .err file.
932 if (ret != 0 or out != self._refout) and not self._skipped \
933 and not self._debug:
932 if ((ret != 0 or out != self._refout) and not self._skipped
933 and not self._debug):
934 934 with open(self.errpath, 'wb') as f:
935 935 for line in out:
936 936 f.write(line)
@@ -978,8 +978,8 b' class Test(unittest.TestCase):'
978 978 # files are deleted
979 979 shutil.rmtree(self._chgsockdir, True)
980 980
981 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
982 and not self._debug and self._out:
981 if ((self._ret != 0 or self._out != self._refout) and not self._skipped
982 and not self._debug and self._out):
983 983 with open(self.errpath, 'wb') as f:
984 984 for line in self._out:
985 985 f.write(line)
@@ -1105,8 +1105,8 b' class Test(unittest.TestCase):'
1105 1105 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1106 1106 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1107 1107 # non-test one in as a default, otherwise set to devnull
1108 env['HGTESTCATAPULTSERVERPIPE'] = \
1109 env.get('HGCATAPULTSERVERPIPE', os.devnull)
1108 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1109 'HGCATAPULTSERVERPIPE', os.devnull)
1110 1110
1111 1111 extraextensions = []
1112 1112 for opt in self._extraconfigopts:
@@ -41,8 +41,8 b' for cmd, entry in commands.table.items()'
41 41 seenshort = globalshort.copy()
42 42 seenlong = globallong.copy()
43 43 for option in entry[1]:
44 if (option[0] and option[0] in seenshort) or \
45 (option[1] and option[1] in seenlong):
44 if ((option[0] and option[0] in seenshort) or
45 (option[1] and option[1] in seenlong)):
46 46 print("command '" + cmd + "' has duplicate option " + str(option))
47 47 seenshort.add(option[0])
48 48 seenlong.add(option[1])
@@ -37,8 +37,8 b' def lm(expected, output):'
37 37 """
38 38 assert (expected.endswith(b'\n')
39 39 and output.endswith(b'\n')), 'missing newline'
40 assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), \
41 b'single backslash or unknown char'
40 assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), (
41 b'single backslash or unknown char')
42 42 test = run_tests.TTest(b'test-run-test.t', b'.', b'.')
43 43 match, exact = test.linematch(expected, output)
44 44 if isinstance(match, str):
@@ -82,8 +82,8 b' class testsimplekeyvaluefile(unittest.Te'
82 82 dw = {b'key1': b'value1'}
83 83 scmutil.simplekeyvaluefile(self.vfs, b'fl').write(dw, firstline=b'1.0')
84 84 self.assertEqual(self.vfs.read(b'fl'), b'1.0\nkey1=value1\n')
85 dr = scmutil.simplekeyvaluefile(self.vfs, b'fl')\
86 .read(firstlinenonkeyval=True)
85 dr = scmutil.simplekeyvaluefile(
86 self.vfs, b'fl').read(firstlinenonkeyval=True)
87 87 self.assertEqual(dr, {b'__firstline': b'1.0', b'key1': b'value1'})
88 88
89 89 if __name__ == "__main__":
General Comments 0
You need to be logged in to leave comments. Login now