Show More
@@ -34,9 +34,9 b' def ipdb(ui, repo, msg, **opts):' | |||||
34 |
|
34 | |||
35 | @command('debugshell|dbsh', []) |
|
35 | @command('debugshell|dbsh', []) | |
36 | def debugshell(ui, repo, **opts): |
|
36 | def debugshell(ui, repo, **opts): | |
37 |
bannermsg = "loaded repo : %s\n" |
|
37 | bannermsg = ("loaded repo : %s\n" | |
38 | "using source: %s" % (repo.root, |
|
38 | "using source: %s" % (repo.root, | |
39 | mercurial.__path__[0]) |
|
39 | mercurial.__path__[0])) | |
40 |
|
40 | |||
41 | pdbmap = { |
|
41 | pdbmap = { | |
42 | 'pdb' : 'code', |
|
42 | 'pdb' : 'code', |
@@ -76,7 +76,7 b' def build_docker_image(dockerfile: pathl' | |||||
76 | p.communicate(input=dockerfile) |
|
76 | p.communicate(input=dockerfile) | |
77 | if p.returncode: |
|
77 | if p.returncode: | |
78 | raise subprocess.CalledProcessException( |
|
78 | raise subprocess.CalledProcessException( | |
79 |
p.returncode, 'failed to build docker image: %s %s' |
|
79 | p.returncode, 'failed to build docker image: %s %s' | |
80 | % (p.stdout, p.stderr)) |
|
80 | % (p.stdout, p.stderr)) | |
81 |
|
81 | |||
82 | def command_build(args): |
|
82 | def command_build(args): |
@@ -293,15 +293,15 b' def _usermatch(ui, user, usersorgroups):' | |||||
293 | # if ug is a user name: !username |
|
293 | # if ug is a user name: !username | |
294 | # if ug is a group name: !@groupname |
|
294 | # if ug is a group name: !@groupname | |
295 | ug = ug[1:] |
|
295 | ug = ug[1:] | |
296 |
if not ug.startswith('@') and user != ug |
|
296 | if (not ug.startswith('@') and user != ug | |
297 | or ug.startswith('@') and user not in _getusers(ui, ug[1:]): |
|
297 | or ug.startswith('@') and user not in _getusers(ui, ug[1:])): | |
298 | return True |
|
298 | return True | |
299 |
|
299 | |||
300 | # Test for user or group. Format: |
|
300 | # Test for user or group. Format: | |
301 | # if ug is a user name: username |
|
301 | # if ug is a user name: username | |
302 | # if ug is a group name: @groupname |
|
302 | # if ug is a group name: @groupname | |
303 |
elif user == ug |
|
303 | elif (user == ug | |
304 | or ug.startswith('@') and user in _getusers(ui, ug[1:]): |
|
304 | or ug.startswith('@') and user in _getusers(ui, ug[1:])): | |
305 | return True |
|
305 | return True | |
306 |
|
306 | |||
307 | return False |
|
307 | return False |
@@ -600,8 +600,8 b' class bzmysql_2_18(bzmysql):' | |||||
600 |
|
600 | |||
601 | def __init__(self, ui): |
|
601 | def __init__(self, ui): | |
602 | bzmysql.__init__(self, ui) |
|
602 | bzmysql.__init__(self, ui) | |
603 |
self.default_notify = |
|
603 | self.default_notify = ( | |
604 | "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" |
|
604 | "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s") | |
605 |
|
605 | |||
606 | class bzmysql_3_0(bzmysql_2_18): |
|
606 | class bzmysql_3_0(bzmysql_2_18): | |
607 | '''support for bugzilla 3.0 series.''' |
|
607 | '''support for bugzilla 3.0 series.''' |
@@ -776,8 +776,8 b' def createchangeset(ui, log, fuzz=60, me' | |||||
776 |
|
776 | |||
777 | # Ensure no changeset has a synthetic changeset as a parent. |
|
777 | # Ensure no changeset has a synthetic changeset as a parent. | |
778 | while p.synthetic: |
|
778 | while p.synthetic: | |
779 |
assert len(p.parents) <= 1, |
|
779 | assert len(p.parents) <= 1, ( | |
780 | _('synthetic changeset cannot have multiple parents') |
|
780 | _('synthetic changeset cannot have multiple parents')) | |
781 | if p.parents: |
|
781 | if p.parents: | |
782 | p = p.parents[0] |
|
782 | p = p.parents[0] | |
783 | else: |
|
783 | else: | |
@@ -954,12 +954,12 b' def debugcvsps(ui, *args, **opts):' | |||||
954 |
|
954 | |||
955 | # have we seen the start tag? |
|
955 | # have we seen the start tag? | |
956 | if revisions and off: |
|
956 | if revisions and off: | |
957 |
if revisions[0] == (b"%d" % cs.id) or |
|
957 | if (revisions[0] == (b"%d" % cs.id) or | |
958 | revisions[0] in cs.tags: |
|
958 | revisions[0] in cs.tags): | |
959 | off = False |
|
959 | off = False | |
960 |
|
960 | |||
961 | # see if we reached the end tag |
|
961 | # see if we reached the end tag | |
962 | if len(revisions) > 1 and not off: |
|
962 | if len(revisions) > 1 and not off: | |
963 |
if revisions[1] == (b"%d" % cs.id) or |
|
963 | if (revisions[1] == (b"%d" % cs.id) or | |
964 | revisions[1] in cs.tags: |
|
964 | revisions[1] in cs.tags): | |
965 | break |
|
965 | break |
@@ -387,7 +387,7 b' class convert_git(common.converter_sourc' | |||||
387 | def numcommits(self): |
|
387 | def numcommits(self): | |
388 | output, ret = self.gitrunlines('rev-list', '--all') |
|
388 | output, ret = self.gitrunlines('rev-list', '--all') | |
389 | if ret: |
|
389 | if ret: | |
390 |
raise error.Abort(_('cannot retrieve number of commits in %s') |
|
390 | raise error.Abort(_('cannot retrieve number of commits in %s') | |
391 | % self.path) |
|
391 | % self.path) | |
392 | return len(output) |
|
392 | return len(output) | |
393 |
|
393 |
@@ -198,8 +198,8 b' class p4_source(common.converter_source)' | |||||
198 | for filename in copiedfiles: |
|
198 | for filename in copiedfiles: | |
199 | oldname = depotname[filename] |
|
199 | oldname = depotname[filename] | |
200 |
|
200 | |||
201 |
flcmd = 'p4 -G filelog %s' |
|
201 | flcmd = ('p4 -G filelog %s' | |
202 | % procutil.shellquote(oldname) |
|
202 | % procutil.shellquote(oldname)) | |
203 | flstdout = procutil.popen(flcmd, mode='rb') |
|
203 | flstdout = procutil.popen(flcmd, mode='rb') | |
204 |
|
204 | |||
205 | copiedfilename = None |
|
205 | copiedfilename = None | |
@@ -272,8 +272,8 b' class p4_source(common.converter_source)' | |||||
272 | return self.heads |
|
272 | return self.heads | |
273 |
|
273 | |||
274 | def getfile(self, name, rev): |
|
274 | def getfile(self, name, rev): | |
275 |
cmd = 'p4 -G print %s' |
|
275 | cmd = ('p4 -G print %s' | |
276 | % procutil.shellquote("%s#%s" % (self.depotname[name], rev)) |
|
276 | % procutil.shellquote("%s#%s" % (self.depotname[name], rev))) | |
277 |
|
277 | |||
278 | lasterror = None |
|
278 | lasterror = None | |
279 | while True: |
|
279 | while True: |
@@ -790,7 +790,7 b' class svn_source(converter_source):' | |||||
790 | if childpath: |
|
790 | if childpath: | |
791 | removed.add(self.recode(childpath)) |
|
791 | removed.add(self.recode(childpath)) | |
792 | else: |
|
792 | else: | |
793 |
self.ui.debug('unknown path in revision %d: %s\n' % |
|
793 | self.ui.debug('unknown path in revision %d: %s\n' % | |
794 | (revnum, path)) |
|
794 | (revnum, path)) | |
795 | elif kind == svn.core.svn_node_dir: |
|
795 | elif kind == svn.core.svn_node_dir: | |
796 | if ent.action == 'M': |
|
796 | if ent.action == 'M': |
@@ -1782,7 +1782,7 b' def _continuehistedit(ui, repo, state):' | |||||
1782 | state.write(tr=tr) |
|
1782 | state.write(tr=tr) | |
1783 | actobj = state.actions[0] |
|
1783 | actobj = state.actions[0] | |
1784 | progress.increment(item=actobj.torule()) |
|
1784 | progress.increment(item=actobj.torule()) | |
1785 |
ui.debug('histedit: processing %s %s\n' % (actobj.verb, |
|
1785 | ui.debug('histedit: processing %s %s\n' % (actobj.verb, | |
1786 | actobj.torule())) |
|
1786 | actobj.torule())) | |
1787 | parentctx, replacement_ = actobj.run() |
|
1787 | parentctx, replacement_ = actobj.run() | |
1788 | state.parentctxnode = parentctx.node() |
|
1788 | state.parentctxnode = parentctx.node() | |
@@ -1881,7 +1881,7 b' def _edithisteditplan(ui, repo, state, r' | |||||
1881 | else: |
|
1881 | else: | |
1882 | rules = _readfile(ui, rules) |
|
1882 | rules = _readfile(ui, rules) | |
1883 | actions = parserules(rules, state) |
|
1883 | actions = parserules(rules, state) | |
1884 |
ctxs = [repo[act.node] |
|
1884 | ctxs = [repo[act.node] | |
1885 | for act in state.actions if act.node] |
|
1885 | for act in state.actions if act.node] | |
1886 | warnverifyactions(ui, repo, actions, state, ctxs) |
|
1886 | warnverifyactions(ui, repo, actions, state, ctxs) | |
1887 | state.actions = actions |
|
1887 | state.actions = actions |
@@ -282,8 +282,8 b' def commonsetup(ui):' | |||||
282 | scratchbranchpat = ui.config('infinitepush', 'branchpattern') |
|
282 | scratchbranchpat = ui.config('infinitepush', 'branchpattern') | |
283 | if scratchbranchpat: |
|
283 | if scratchbranchpat: | |
284 | global _scratchbranchmatcher |
|
284 | global _scratchbranchmatcher | |
285 |
kind, pat, _scratchbranchmatcher = |
|
285 | kind, pat, _scratchbranchmatcher = ( | |
286 | stringutil.stringmatcher(scratchbranchpat) |
|
286 | stringutil.stringmatcher(scratchbranchpat)) | |
287 |
|
287 | |||
288 | def serverextsetup(ui): |
|
288 | def serverextsetup(ui): | |
289 | origpushkeyhandler = bundle2.parthandlermapping['pushkey'] |
|
289 | origpushkeyhandler = bundle2.parthandlermapping['pushkey'] | |
@@ -294,8 +294,8 b' def serverextsetup(ui):' | |||||
294 | bundle2.parthandlermapping['pushkey'] = newpushkeyhandler |
|
294 | bundle2.parthandlermapping['pushkey'] = newpushkeyhandler | |
295 |
|
295 | |||
296 | orighandlephasehandler = bundle2.parthandlermapping['phase-heads'] |
|
296 | orighandlephasehandler = bundle2.parthandlermapping['phase-heads'] | |
297 |
newphaseheadshandler = lambda *args, **kwargs: |
|
297 | newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases( | |
298 |
|
|
298 | orighandlephasehandler, *args, **kwargs) | |
299 | newphaseheadshandler.params = orighandlephasehandler.params |
|
299 | newphaseheadshandler.params = orighandlephasehandler.params | |
300 | bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler |
|
300 | bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler | |
301 |
|
301 | |||
@@ -754,10 +754,10 b' def _deleteinfinitepushbookmarks(ui, rep' | |||||
754 | nametype_idx = 1 |
|
754 | nametype_idx = 1 | |
755 | remote_idx = 2 |
|
755 | remote_idx = 2 | |
756 | name_idx = 3 |
|
756 | name_idx = 3 | |
757 |
remotenames = [remotename for remotename in |
|
757 | remotenames = [remotename for remotename in | |
758 |
remotenamesext.readremotenames(repo) |
|
758 | remotenamesext.readremotenames(repo) | |
759 | if remotename[remote_idx] == path] |
|
759 | if remotename[remote_idx] == path] | |
760 |
remote_bm_names = [remotename[name_idx] for remotename in |
|
760 | remote_bm_names = [remotename[name_idx] for remotename in | |
761 | remotenames if remotename[nametype_idx] == "bookmarks"] |
|
761 | remotenames if remotename[nametype_idx] == "bookmarks"] | |
762 |
|
762 | |||
763 | for name in names: |
|
763 | for name in names: |
@@ -76,8 +76,8 b' def _usercachedir(ui, name=longname):' | |||||
76 | if path: |
|
76 | if path: | |
77 | return path |
|
77 | return path | |
78 | if pycompat.iswindows: |
|
78 | if pycompat.iswindows: | |
79 |
appdata = encoding.environ.get('LOCALAPPDATA', |
|
79 | appdata = encoding.environ.get('LOCALAPPDATA', | |
80 | encoding.environ.get('APPDATA')) |
|
80 | encoding.environ.get('APPDATA')) | |
81 | if appdata: |
|
81 | if appdata: | |
82 | return os.path.join(appdata, name) |
|
82 | return os.path.join(appdata, name) | |
83 | elif pycompat.isdarwin: |
|
83 | elif pycompat.isdarwin: |
@@ -174,8 +174,8 b' def reposetup(ui, repo):' | |||||
174 | if standin not in ctx1: |
|
174 | if standin not in ctx1: | |
175 | # from second parent |
|
175 | # from second parent | |
176 | modified.append(lfile) |
|
176 | modified.append(lfile) | |
177 |
elif lfutil.readasstandin(ctx1[standin]) |
|
177 | elif (lfutil.readasstandin(ctx1[standin]) | |
178 |
|
|
178 | != lfutil.hashfile(self.wjoin(lfile))): | |
179 | modified.append(lfile) |
|
179 | modified.append(lfile) | |
180 | else: |
|
180 | else: | |
181 | if listclean: |
|
181 | if listclean: |
@@ -134,12 +134,12 b' def uisetup(ui):' | |||||
134 | except KeyError: |
|
134 | except KeyError: | |
135 | return |
|
135 | return | |
136 |
|
136 | |||
137 |
cmdtable["qrecord"] = |
|
137 | cmdtable["qrecord"] = ( | |
138 |
|
|
138 | qrecord, | |
139 |
|
|
139 | # same options as qnew, but copy them so we don't get | |
140 |
|
|
140 | # -i/--interactive for qrecord and add white space diff options | |
141 |
|
|
141 | mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts, | |
142 |
|
|
142 | _('hg qrecord [OPTION]... PATCH [FILE]...')) | |
143 |
|
143 | |||
144 | _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch")) |
|
144 | _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch")) | |
145 | _wrapcmd('qrefresh', mq.cmdtable, qrefresh, |
|
145 | _wrapcmd('qrefresh', mq.cmdtable, qrefresh, |
@@ -107,8 +107,9 b' class parsedreleasenotes(object):' | |||||
107 | "releasenotes is disabled\n")) |
|
107 | "releasenotes is disabled\n")) | |
108 |
|
108 | |||
109 | for section in other: |
|
109 | for section in other: | |
110 | existingnotes = converttitled(self.titledforsection(section)) + \ |
|
110 | existingnotes = ( | |
111 |
convert |
|
111 | converttitled(self.titledforsection(section)) + | |
|
112 | convertnontitled(self.nontitledforsection(section))) | |||
112 | for title, paragraphs in other.titledforsection(section): |
|
113 | for title, paragraphs in other.titledforsection(section): | |
113 | if self.hastitledinsection(section, title): |
|
114 | if self.hastitledinsection(section, title): | |
114 | # TODO prompt for resolution if different and running in |
|
115 | # TODO prompt for resolution if different and running in |
@@ -138,8 +138,8 b' class cacheconnection(object):' | |||||
138 | def connect(self, cachecommand): |
|
138 | def connect(self, cachecommand): | |
139 | if self.pipeo: |
|
139 | if self.pipeo: | |
140 | raise error.Abort(_("cache connection already open")) |
|
140 | raise error.Abort(_("cache connection already open")) | |
141 |
self.pipei, self.pipeo, self.pipee, self.subprocess = |
|
141 | self.pipei, self.pipeo, self.pipee, self.subprocess = ( | |
142 | procutil.popen4(cachecommand) |
|
142 | procutil.popen4(cachecommand)) | |
143 | self.connected = True |
|
143 | self.connected = True | |
144 |
|
144 | |||
145 | def close(self): |
|
145 | def close(self): |
@@ -248,8 +248,8 b' class shelvedstate(object):' | |||||
248 | if version < cls._version: |
|
248 | if version < cls._version: | |
249 | d = cls._readold(repo) |
|
249 | d = cls._readold(repo) | |
250 | elif version == cls._version: |
|
250 | elif version == cls._version: | |
251 |
d = scmutil.simplekeyvaluefile( |
|
251 | d = scmutil.simplekeyvaluefile( | |
252 |
|
|
252 | repo.vfs, cls._filename).read(firstlinenonkeyval=True) | |
253 | else: |
|
253 | else: | |
254 | raise error.Abort(_('this version of shelve is incompatible ' |
|
254 | raise error.Abort(_('this version of shelve is incompatible ' | |
255 | 'with the version used in this repo')) |
|
255 | 'with the version used in this repo')) | |
@@ -287,8 +287,9 b' class shelvedstate(object):' | |||||
287 | "keep": cls._keep if keep else cls._nokeep, |
|
287 | "keep": cls._keep if keep else cls._nokeep, | |
288 | "activebook": activebook or cls._noactivebook |
|
288 | "activebook": activebook or cls._noactivebook | |
289 | } |
|
289 | } | |
290 |
scmutil.simplekeyvaluefile( |
|
290 | scmutil.simplekeyvaluefile( | |
291 | .write(info, firstline=("%d" % cls._version)) |
|
291 | repo.vfs, cls._filename).write(info, | |
|
292 | firstline=("%d" % cls._version)) | |||
292 |
|
293 | |||
293 | @classmethod |
|
294 | @classmethod | |
294 | def clear(cls, repo): |
|
295 | def clear(cls, repo): |
@@ -77,7 +77,7 b' if __name__ == "__main__":' | |||||
77 | continue |
|
77 | continue | |
78 | else: |
|
78 | else: | |
79 | # lines following directly, unexpected |
|
79 | # lines following directly, unexpected | |
80 |
print('Warning: text follows line with directive' |
|
80 | print('Warning: text follows line with directive' | |
81 | ' %s' % directive) |
|
81 | ' %s' % directive) | |
82 | comment = 'do not translate: .. %s::' % directive |
|
82 | comment = 'do not translate: .. %s::' % directive | |
83 | if not newentry.comment: |
|
83 | if not newentry.comment: |
@@ -229,7 +229,7 b' class branchcache(dict):' | |||||
229 | - True when cache is up to date or a subset of current repo.""" |
|
229 | - True when cache is up to date or a subset of current repo.""" | |
230 | try: |
|
230 | try: | |
231 | return ((self.tipnode == repo.changelog.node(self.tiprev)) |
|
231 | return ((self.tipnode == repo.changelog.node(self.tiprev)) | |
232 |
and (self.filteredhash == |
|
232 | and (self.filteredhash == | |
233 | scmutil.filteredhash(repo, self.tiprev))) |
|
233 | scmutil.filteredhash(repo, self.tiprev))) | |
234 | except IndexError: |
|
234 | except IndexError: | |
235 | return False |
|
235 | return False |
@@ -1397,8 +1397,8 b' class seekableunbundlepart(unbundlepart)' | |||||
1397 | assert chunknum == 0, 'Must start with chunk 0' |
|
1397 | assert chunknum == 0, 'Must start with chunk 0' | |
1398 | self._chunkindex.append((0, self._tellfp())) |
|
1398 | self._chunkindex.append((0, self._tellfp())) | |
1399 | else: |
|
1399 | else: | |
1400 |
assert chunknum < len(self._chunkindex), |
|
1400 | assert chunknum < len(self._chunkindex), ( | |
1401 | 'Unknown chunk %d' % chunknum |
|
1401 | 'Unknown chunk %d' % chunknum) | |
1402 | self._seekfp(self._chunkindex[chunknum][1]) |
|
1402 | self._seekfp(self._chunkindex[chunknum][1]) | |
1403 |
|
1403 | |||
1404 | pos = self._chunkindex[chunknum][0] |
|
1404 | pos = self._chunkindex[chunknum][0] |
@@ -179,8 +179,8 b' def ishunk(x):' | |||||
179 | def newandmodified(chunks, originalchunks): |
|
179 | def newandmodified(chunks, originalchunks): | |
180 | newlyaddedandmodifiedfiles = set() |
|
180 | newlyaddedandmodifiedfiles = set() | |
181 | for chunk in chunks: |
|
181 | for chunk in chunks: | |
182 |
if ishunk(chunk) and chunk.header.isnewfile() and chunk not in |
|
182 | if (ishunk(chunk) and chunk.header.isnewfile() and chunk not in | |
183 | originalchunks: |
|
183 | originalchunks): | |
184 | newlyaddedandmodifiedfiles.add(chunk.header.filename()) |
|
184 | newlyaddedandmodifiedfiles.add(chunk.header.filename()) | |
185 | return newlyaddedandmodifiedfiles |
|
185 | return newlyaddedandmodifiedfiles | |
186 |
|
186 | |||
@@ -322,8 +322,8 b' def dorecord(ui, repo, commitfunc, cmdsu' | |||||
322 | if backupall: |
|
322 | if backupall: | |
323 | tobackup = changed |
|
323 | tobackup = changed | |
324 | else: |
|
324 | else: | |
325 |
tobackup = [f for f in newfiles if f in modified or f in |
|
325 | tobackup = [f for f in newfiles if f in modified or f in | |
326 | newlyaddedandmodifiedfiles] |
|
326 | newlyaddedandmodifiedfiles] | |
327 | backups = {} |
|
327 | backups = {} | |
328 | if tobackup: |
|
328 | if tobackup: | |
329 | backupdir = repo.vfs.join('record-backups') |
|
329 | backupdir = repo.vfs.join('record-backups') |
@@ -1676,8 +1676,8 b' def _docommit(ui, repo, *pats, **opts):' | |||||
1676 | if not bheads: |
|
1676 | if not bheads: | |
1677 | raise error.Abort(_('can only close branch heads')) |
|
1677 | raise error.Abort(_('can only close branch heads')) | |
1678 | elif opts.get('amend'): |
|
1678 | elif opts.get('amend'): | |
1679 |
if repo['.'].p1().branch() != branch and |
|
1679 | if (repo['.'].p1().branch() != branch and | |
1680 |
|
|
1680 | repo['.'].p2().branch() != branch): | |
1681 | raise error.Abort(_('can only close branch heads')) |
|
1681 | raise error.Abort(_('can only close branch heads')) | |
1682 |
|
1682 | |||
1683 | if opts.get('amend'): |
|
1683 | if opts.get('amend'): | |
@@ -4822,8 +4822,8 b' def resolve(ui, repo, *pats, **opts):' | |||||
4822 | opts = pycompat.byteskwargs(opts) |
|
4822 | opts = pycompat.byteskwargs(opts) | |
4823 | confirm = ui.configbool('commands', 'resolve.confirm') |
|
4823 | confirm = ui.configbool('commands', 'resolve.confirm') | |
4824 | flaglist = 'all mark unmark list no_status re_merge'.split() |
|
4824 | flaglist = 'all mark unmark list no_status re_merge'.split() | |
4825 |
all, mark, unmark, show, nostatus, remerge = |
|
4825 | all, mark, unmark, show, nostatus, remerge = [ | |
4826 |
|
|
4826 | opts.get(o) for o in flaglist] | |
4827 |
|
4827 | |||
4828 | actioncount = len(list(filter(None, [show, mark, unmark, remerge]))) |
|
4828 | actioncount = len(list(filter(None, [show, mark, unmark, remerge]))) | |
4829 | if actioncount > 1: |
|
4829 | if actioncount > 1: | |
@@ -4952,8 +4952,8 b' def resolve(ui, repo, *pats, **opts):' | |||||
4952 | if mark: |
|
4952 | if mark: | |
4953 | if markcheck: |
|
4953 | if markcheck: | |
4954 | fdata = repo.wvfs.tryread(f) |
|
4954 | fdata = repo.wvfs.tryread(f) | |
4955 |
if filemerge.hasconflictmarkers(fdata) and |
|
4955 | if (filemerge.hasconflictmarkers(fdata) and | |
4956 | ms[f] != mergemod.MERGE_RECORD_RESOLVED: |
|
4956 | ms[f] != mergemod.MERGE_RECORD_RESOLVED): | |
4957 | hasconflictmarkers.append(f) |
|
4957 | hasconflictmarkers.append(f) | |
4958 | ms.mark(f, mergemod.MERGE_RECORD_RESOLVED) |
|
4958 | ms.mark(f, mergemod.MERGE_RECORD_RESOLVED) | |
4959 | elif unmark: |
|
4959 | elif unmark: |
@@ -983,9 +983,9 b' class filectx(basefilectx):' | |||||
983 |
|
983 | |||
984 | assert (changeid is not None |
|
984 | assert (changeid is not None | |
985 | or fileid is not None |
|
985 | or fileid is not None | |
986 |
or changectx is not None), |
|
986 | or changectx is not None), ( | |
987 |
|
|
987 | "bad args: changeid=%r, fileid=%r, changectx=%r" | |
988 | % (changeid, fileid, changectx)) |
|
988 | % (changeid, fileid, changectx)) | |
989 |
|
989 | |||
990 | if filelog is not None: |
|
990 | if filelog is not None: | |
991 | self._filelog = filelog |
|
991 | self._filelog = filelog |
@@ -1442,8 +1442,8 b' def debuglocks(ui, repo, **opts):' | |||||
1442 | if host == socket.gethostname(): |
|
1442 | if host == socket.gethostname(): | |
1443 | locker = 'user %s, process %s' % (user or b'None', pid) |
|
1443 | locker = 'user %s, process %s' % (user or b'None', pid) | |
1444 | else: |
|
1444 | else: | |
1445 |
locker = 'user %s, process %s, host %s' |
|
1445 | locker = ('user %s, process %s, host %s' | |
1446 | % (user or b'None', pid, host) |
|
1446 | % (user or b'None', pid, host)) | |
1447 | ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age)) |
|
1447 | ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age)) | |
1448 | return 1 |
|
1448 | return 1 | |
1449 | except OSError as e: |
|
1449 | except OSError as e: |
@@ -2547,8 +2547,8 b' def isstreamclonespec(bundlespec):' | |||||
2547 | return True |
|
2547 | return True | |
2548 |
|
2548 | |||
2549 | # Stream clone v2 |
|
2549 | # Stream clone v2 | |
2550 |
if (bundlespec.wirecompression == 'UN' and |
|
2550 | if (bundlespec.wirecompression == 'UN' and | |
2551 |
bundlespec.wireversion == '02' and |
|
2551 | bundlespec.wireversion == '02' and | |
2552 | bundlespec.contentopts.get('streamv2')): |
|
2552 | bundlespec.contentopts.get('streamv2')): | |
2553 | return True |
|
2553 | return True | |
2554 |
|
2554 |
@@ -109,10 +109,10 b' def readauthforuri(ui, uri, user):' | |||||
109 | schemes, prefix = [p[0]], p[1] |
|
109 | schemes, prefix = [p[0]], p[1] | |
110 | else: |
|
110 | else: | |
111 | schemes = (auth.get('schemes') or 'https').split() |
|
111 | schemes = (auth.get('schemes') or 'https').split() | |
112 |
if (prefix == '*' or hostpath.startswith(prefix)) and |
|
112 | if ((prefix == '*' or hostpath.startswith(prefix)) and | |
113 |
(len(prefix) > bestlen or (len(prefix) == bestlen and |
|
113 | (len(prefix) > bestlen or (len(prefix) == bestlen and | |
114 |
not bestuser and 'username' in auth)) |
|
114 | not bestuser and 'username' in auth)) | |
115 |
|
|
115 | and scheme in schemes): | |
116 | bestlen = len(prefix) |
|
116 | bestlen = len(prefix) | |
117 | bestauth = group, auth |
|
117 | bestauth = group, auth | |
118 | bestuser = auth.get('username') |
|
118 | bestuser = auth.get('username') |
@@ -391,9 +391,9 b' class mergestate(object):' | |||||
391 | """ |
|
391 | """ | |
392 | # Check local variables before looking at filesystem for performance |
|
392 | # Check local variables before looking at filesystem for performance | |
393 | # reasons. |
|
393 | # reasons. | |
394 |
return bool(self._local) or bool(self._state) or |
|
394 | return (bool(self._local) or bool(self._state) or | |
395 |
self._repo.vfs.exists(self.statepathv1) or |
|
395 | self._repo.vfs.exists(self.statepathv1) or | |
396 | self._repo.vfs.exists(self.statepathv2) |
|
396 | self._repo.vfs.exists(self.statepathv2)) | |
397 |
|
397 | |||
398 | def commit(self): |
|
398 | def commit(self): | |
399 | """Write current state on disk (if necessary)""" |
|
399 | """Write current state on disk (if necessary)""" |
@@ -114,9 +114,9 b' def findliteralblocks(blocks):' | |||||
114 | # Partially minimized form: remove space and both |
|
114 | # Partially minimized form: remove space and both | |
115 | # colons. |
|
115 | # colons. | |
116 | blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] |
|
116 | blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] | |
117 |
elif len(blocks[i]['lines']) == 1 and |
|
117 | elif (len(blocks[i]['lines']) == 1 and | |
118 |
blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and |
|
118 | blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and | |
119 | blocks[i]['lines'][0].find(' ', 3) == -1: |
|
119 | blocks[i]['lines'][0].find(' ', 3) == -1): | |
120 | # directive on its own line, not a literal block |
|
120 | # directive on its own line, not a literal block | |
121 | i += 1 |
|
121 | i += 1 | |
122 | continue |
|
122 | continue | |
@@ -790,8 +790,8 b' def _getsections(blocks):' | |||||
790 | if section['type'] != 'margin': |
|
790 | if section['type'] != 'margin': | |
791 | sindent = section['indent'] |
|
791 | sindent = section['indent'] | |
792 | if len(section['lines']) > 1: |
|
792 | if len(section['lines']) > 1: | |
793 |
sindent += len(section['lines'][1]) - |
|
793 | sindent += (len(section['lines'][1]) - | |
794 | len(section['lines'][1].lstrip(' ')) |
|
794 | len(section['lines'][1].lstrip(' '))) | |
795 | if bindent >= sindent: |
|
795 | if bindent >= sindent: | |
796 | break |
|
796 | break | |
797 | pointer += 1 |
|
797 | pointer += 1 |
@@ -925,8 +925,8 b' class header(object):' | |||||
925 | # if they have some content as we want to be able to change it |
|
925 | # if they have some content as we want to be able to change it | |
926 | nocontent = len(self.header) == 2 |
|
926 | nocontent = len(self.header) == 2 | |
927 | emptynewfile = self.isnewfile() and nocontent |
|
927 | emptynewfile = self.isnewfile() and nocontent | |
928 |
return emptynewfile |
|
928 | return (emptynewfile | |
929 | any(self.special_re.match(h) for h in self.header) |
|
929 | or any(self.special_re.match(h) for h in self.header)) | |
930 |
|
930 | |||
931 | class recordhunk(object): |
|
931 | class recordhunk(object): | |
932 | """patch hunk |
|
932 | """patch hunk | |
@@ -2283,8 +2283,8 b' def diff(repo, node1=None, node2=None, m' | |||||
2283 | # If the file has been removed, fctx2 is None; but this should |
|
2283 | # If the file has been removed, fctx2 is None; but this should | |
2284 | # not occur here since we catch removed files early in |
|
2284 | # not occur here since we catch removed files early in | |
2285 | # logcmdutil.getlinerangerevs() for 'hg log -L'. |
|
2285 | # logcmdutil.getlinerangerevs() for 'hg log -L'. | |
2286 |
assert fctx2 is not None, |
|
2286 | assert fctx2 is not None, ( | |
2287 | 'fctx2 unexpectly None in diff hunks filtering' |
|
2287 | 'fctx2 unexpectly None in diff hunks filtering') | |
2288 | hunks = hunksfilterfn(fctx2, hunks) |
|
2288 | hunks = hunksfilterfn(fctx2, hunks) | |
2289 | text = ''.join(sum((list(hlines) for hrange, hlines in hunks), [])) |
|
2289 | text = ''.join(sum((list(hlines) for hrange, hlines in hunks), [])) | |
2290 | if hdr and (text or len(hdr) > 1): |
|
2290 | if hdr and (text or len(hdr) > 1): |
@@ -289,15 +289,15 b' class Merge3Text(object):' | |||||
289 |
|
289 | |||
290 | # find matches at the front |
|
290 | # find matches at the front | |
291 | ii = 0 |
|
291 | ii = 0 | |
292 |
while ii < alen and ii < blen and |
|
292 | while (ii < alen and ii < blen and | |
293 | self.a[a1 + ii] == self.b[b1 + ii]: |
|
293 | self.a[a1 + ii] == self.b[b1 + ii]): | |
294 | ii += 1 |
|
294 | ii += 1 | |
295 | startmatches = ii |
|
295 | startmatches = ii | |
296 |
|
296 | |||
297 | # find matches at the end |
|
297 | # find matches at the end | |
298 | ii = 0 |
|
298 | ii = 0 | |
299 |
while ii < alen and ii < blen and |
|
299 | while (ii < alen and ii < blen and | |
300 | self.a[a2 - ii - 1] == self.b[b2 - ii - 1]: |
|
300 | self.a[a2 - ii - 1] == self.b[b2 - ii - 1]): | |
301 | ii += 1 |
|
301 | ii += 1 | |
302 | endmatches = ii |
|
302 | endmatches = ii | |
303 |
|
303 | |||
@@ -350,8 +350,8 b' class Merge3Text(object):' | |||||
350 | aend = asub + intlen |
|
350 | aend = asub + intlen | |
351 | bend = bsub + intlen |
|
351 | bend = bsub + intlen | |
352 |
|
352 | |||
353 |
assert self.base[intbase:intend] == self.a[asub:aend], |
|
353 | assert self.base[intbase:intend] == self.a[asub:aend], ( | |
354 | (self.base[intbase:intend], self.a[asub:aend]) |
|
354 | (self.base[intbase:intend], self.a[asub:aend])) | |
355 |
|
355 | |||
356 | assert self.base[intbase:intend] == self.b[bsub:bend] |
|
356 | assert self.base[intbase:intend] == self.b[bsub:bend] | |
357 |
|
357 |
@@ -643,8 +643,8 b' def updateconfig(repo, pats, opts, inclu' | |||||
643 | for kindpat in pats: |
|
643 | for kindpat in pats: | |
644 | kind, pat = matchmod._patsplit(kindpat, None) |
|
644 | kind, pat = matchmod._patsplit(kindpat, None) | |
645 | if kind in matchmod.cwdrelativepatternkinds or kind is None: |
|
645 | if kind in matchmod.cwdrelativepatternkinds or kind is None: | |
646 |
ap = (kind + ':' if kind else '') + |
|
646 | ap = ((kind + ':' if kind else '') + | |
647 |
|
|
647 | pathutil.canonpath(root, cwd, pat)) | |
648 | abspats.append(ap) |
|
648 | abspats.append(ap) | |
649 | else: |
|
649 | else: | |
650 | abspats.append(kindpat) |
|
650 | abspats.append(kindpat) |
@@ -369,8 +369,8 b' class abstractsubrepo(object):' | |||||
369 | return 1 |
|
369 | return 1 | |
370 |
|
370 | |||
371 | def revert(self, substate, *pats, **opts): |
|
371 | def revert(self, substate, *pats, **opts): | |
372 |
self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') |
|
372 | self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') | |
373 | % (substate[0], substate[2])) |
|
373 | % (substate[0], substate[2])) | |
374 | return [] |
|
374 | return [] | |
375 |
|
375 | |||
376 | def shortid(self, revid): |
|
376 | def shortid(self, revid): | |
@@ -697,7 +697,7 b' class hgsubrepo(abstractsubrepo):' | |||||
697 | ctx = urepo[revision] |
|
697 | ctx = urepo[revision] | |
698 | if ctx.hidden(): |
|
698 | if ctx.hidden(): | |
699 | urepo.ui.warn( |
|
699 | urepo.ui.warn( | |
700 |
_('revision %s in subrepository "%s" is hidden\n') |
|
700 | _('revision %s in subrepository "%s" is hidden\n') | |
701 | % (revision[0:12], self._path)) |
|
701 | % (revision[0:12], self._path)) | |
702 | repo = urepo |
|
702 | repo = urepo | |
703 | hg.updaterepo(repo, revision, overwrite) |
|
703 | hg.updaterepo(repo, revision, overwrite) | |
@@ -1787,8 +1787,8 b' class gitsubrepo(abstractsubrepo):' | |||||
1787 | cmd.append('--ignore-all-space') |
|
1787 | cmd.append('--ignore-all-space') | |
1788 | if diffopts.ignorewsamount: |
|
1788 | if diffopts.ignorewsamount: | |
1789 | cmd.append('--ignore-space-change') |
|
1789 | cmd.append('--ignore-space-change') | |
1790 |
if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) |
|
1790 | if (self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) | |
1791 |
|
|
1791 | and diffopts.ignoreblanklines): | |
1792 | cmd.append('--ignore-blank-lines') |
|
1792 | cmd.append('--ignore-blank-lines') | |
1793 |
|
1793 | |||
1794 | cmd.append(node1) |
|
1794 | cmd.append(node1) |
@@ -188,8 +188,8 b' def findglobaltags(ui, repo):' | |||||
188 | return alltags |
|
188 | return alltags | |
189 |
|
189 | |||
190 | for head in reversed(heads): # oldest to newest |
|
190 | for head in reversed(heads): # oldest to newest | |
191 |
assert head in repo.changelog.nodemap, |
|
191 | assert head in repo.changelog.nodemap, ( | |
192 | "tag cache returned bogus head %s" % short(head) |
|
192 | "tag cache returned bogus head %s" % short(head)) | |
193 | fnodes = _filterfnodes(tagfnode, reversed(heads)) |
|
193 | fnodes = _filterfnodes(tagfnode, reversed(heads)) | |
194 | alltags = _tagsfromfnodes(ui, repo, fnodes) |
|
194 | alltags = _tagsfromfnodes(ui, repo, fnodes) | |
195 |
|
195 |
@@ -344,8 +344,8 b' class ui(object):' | |||||
344 | try: |
|
344 | try: | |
345 | yield |
|
345 | yield | |
346 | finally: |
|
346 | finally: | |
347 |
self._blockedtimes[key + '_blocked'] += |
|
347 | self._blockedtimes[key + '_blocked'] += ( | |
348 | (util.timer() - starttime) * 1000 |
|
348 | (util.timer() - starttime) * 1000) | |
349 |
|
349 | |||
350 | @contextlib.contextmanager |
|
350 | @contextlib.contextmanager | |
351 | def uninterruptible(self): |
|
351 | def uninterruptible(self): | |
@@ -1027,8 +1027,8 b' class ui(object):' | |||||
1027 | except IOError as err: |
|
1027 | except IOError as err: | |
1028 | raise error.StdioError(err) |
|
1028 | raise error.StdioError(err) | |
1029 | finally: |
|
1029 | finally: | |
1030 |
self._blockedtimes['stdio_blocked'] += |
|
1030 | self._blockedtimes['stdio_blocked'] += ( | |
1031 | (util.timer() - starttime) * 1000 |
|
1031 | (util.timer() - starttime) * 1000) | |
1032 |
|
1032 | |||
1033 | def write_err(self, *args, **opts): |
|
1033 | def write_err(self, *args, **opts): | |
1034 | self._write(self._ferr, *args, **opts) |
|
1034 | self._write(self._ferr, *args, **opts) | |
@@ -1078,8 +1078,8 b' class ui(object):' | |||||
1078 | return |
|
1078 | return | |
1079 | raise error.StdioError(err) |
|
1079 | raise error.StdioError(err) | |
1080 | finally: |
|
1080 | finally: | |
1081 |
self._blockedtimes['stdio_blocked'] += |
|
1081 | self._blockedtimes['stdio_blocked'] += ( | |
1082 | (util.timer() - starttime) * 1000 |
|
1082 | (util.timer() - starttime) * 1000) | |
1083 |
|
1083 | |||
1084 | def _writemsg(self, dest, *args, **opts): |
|
1084 | def _writemsg(self, dest, *args, **opts): | |
1085 | _writemsgwith(self._write, dest, *args, **opts) |
|
1085 | _writemsgwith(self._write, dest, *args, **opts) | |
@@ -1103,8 +1103,8 b' class ui(object):' | |||||
1103 | if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF): |
|
1103 | if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF): | |
1104 | raise error.StdioError(err) |
|
1104 | raise error.StdioError(err) | |
1105 | finally: |
|
1105 | finally: | |
1106 |
self._blockedtimes['stdio_blocked'] += |
|
1106 | self._blockedtimes['stdio_blocked'] += ( | |
1107 | (util.timer() - starttime) * 1000 |
|
1107 | (util.timer() - starttime) * 1000) | |
1108 |
|
1108 | |||
1109 | def _isatty(self, fh): |
|
1109 | def _isatty(self, fh): | |
1110 | if self.configbool('ui', 'nontty'): |
|
1110 | if self.configbool('ui', 'nontty'): |
@@ -437,10 +437,9 b' class hgdist(Distribution):' | |||||
437 | pure = False |
|
437 | pure = False | |
438 | cffi = ispypy |
|
438 | cffi = ispypy | |
439 |
|
439 | |||
440 |
global_options = Distribution.global_options + |
|
440 | global_options = Distribution.global_options + [ | |
441 |
|
|
441 | ('pure', None, "use pure (slow) Python code instead of C extensions"), | |
442 | "code instead of C extensions"), |
|
442 | ] | |
443 | ] |
|
|||
444 |
|
443 | |||
445 | def has_ext_modules(self): |
|
444 | def has_ext_modules(self): | |
446 | # self.ext_modules is emptied in hgbuildpy.finalize_options which is |
|
445 | # self.ext_modules is emptied in hgbuildpy.finalize_options which is |
@@ -343,8 +343,8 b' def has_svn_range(v):' | |||||
343 |
|
343 | |||
344 | @check("svn", "subversion client and admin tools") |
|
344 | @check("svn", "subversion client and admin tools") | |
345 | def has_svn(): |
|
345 | def has_svn(): | |
346 |
return matchoutput('svn --version 2>&1', br'^svn, version') and |
|
346 | return (matchoutput('svn --version 2>&1', br'^svn, version') and | |
347 | matchoutput('svnadmin --version 2>&1', br'^svnadmin, version') |
|
347 | matchoutput('svnadmin --version 2>&1', br'^svnadmin, version')) | |
348 |
|
348 | |||
349 | @check("svn-bindings", "subversion python bindings") |
|
349 | @check("svn-bindings", "subversion python bindings") | |
350 | def has_svn_bindings(): |
|
350 | def has_svn_bindings(): |
@@ -929,8 +929,8 b' class Test(unittest.TestCase):' | |||||
929 | self.fail('no result code from test') |
|
929 | self.fail('no result code from test') | |
930 | elif out != self._refout: |
|
930 | elif out != self._refout: | |
931 | # Diff generation may rely on written .err file. |
|
931 | # Diff generation may rely on written .err file. | |
932 |
if (ret != 0 or out != self._refout) and not self._skipped |
|
932 | if ((ret != 0 or out != self._refout) and not self._skipped | |
933 | and not self._debug: |
|
933 | and not self._debug): | |
934 | with open(self.errpath, 'wb') as f: |
|
934 | with open(self.errpath, 'wb') as f: | |
935 | for line in out: |
|
935 | for line in out: | |
936 | f.write(line) |
|
936 | f.write(line) | |
@@ -978,8 +978,8 b' class Test(unittest.TestCase):' | |||||
978 | # files are deleted |
|
978 | # files are deleted | |
979 | shutil.rmtree(self._chgsockdir, True) |
|
979 | shutil.rmtree(self._chgsockdir, True) | |
980 |
|
980 | |||
981 |
if (self._ret != 0 or self._out != self._refout) and not self._skipped |
|
981 | if ((self._ret != 0 or self._out != self._refout) and not self._skipped | |
982 | and not self._debug and self._out: |
|
982 | and not self._debug and self._out): | |
983 | with open(self.errpath, 'wb') as f: |
|
983 | with open(self.errpath, 'wb') as f: | |
984 | for line in self._out: |
|
984 | for line in self._out: | |
985 | f.write(line) |
|
985 | f.write(line) | |
@@ -1105,8 +1105,8 b' class Test(unittest.TestCase):' | |||||
1105 | if 'HGTESTCATAPULTSERVERPIPE' not in env: |
|
1105 | if 'HGTESTCATAPULTSERVERPIPE' not in env: | |
1106 | # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the |
|
1106 | # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the | |
1107 | # non-test one in as a default, otherwise set to devnull |
|
1107 | # non-test one in as a default, otherwise set to devnull | |
1108 |
env['HGTESTCATAPULTSERVERPIPE'] = |
|
1108 | env['HGTESTCATAPULTSERVERPIPE'] = env.get( | |
1109 |
|
|
1109 | 'HGCATAPULTSERVERPIPE', os.devnull) | |
1110 |
|
1110 | |||
1111 | extraextensions = [] |
|
1111 | extraextensions = [] | |
1112 | for opt in self._extraconfigopts: |
|
1112 | for opt in self._extraconfigopts: |
@@ -41,8 +41,8 b' for cmd, entry in commands.table.items()' | |||||
41 | seenshort = globalshort.copy() |
|
41 | seenshort = globalshort.copy() | |
42 | seenlong = globallong.copy() |
|
42 | seenlong = globallong.copy() | |
43 | for option in entry[1]: |
|
43 | for option in entry[1]: | |
44 |
if (option[0] and option[0] in seenshort) or |
|
44 | if ((option[0] and option[0] in seenshort) or | |
45 | (option[1] and option[1] in seenlong): |
|
45 | (option[1] and option[1] in seenlong)): | |
46 | print("command '" + cmd + "' has duplicate option " + str(option)) |
|
46 | print("command '" + cmd + "' has duplicate option " + str(option)) | |
47 | seenshort.add(option[0]) |
|
47 | seenshort.add(option[0]) | |
48 | seenlong.add(option[1]) |
|
48 | seenlong.add(option[1]) |
@@ -37,8 +37,8 b' def lm(expected, output):' | |||||
37 | """ |
|
37 | """ | |
38 | assert (expected.endswith(b'\n') |
|
38 | assert (expected.endswith(b'\n') | |
39 | and output.endswith(b'\n')), 'missing newline' |
|
39 | and output.endswith(b'\n')), 'missing newline' | |
40 |
assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), |
|
40 | assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), ( | |
41 | b'single backslash or unknown char' |
|
41 | b'single backslash or unknown char') | |
42 | test = run_tests.TTest(b'test-run-test.t', b'.', b'.') |
|
42 | test = run_tests.TTest(b'test-run-test.t', b'.', b'.') | |
43 | match, exact = test.linematch(expected, output) |
|
43 | match, exact = test.linematch(expected, output) | |
44 | if isinstance(match, str): |
|
44 | if isinstance(match, str): |
@@ -82,8 +82,8 b' class testsimplekeyvaluefile(unittest.Te' | |||||
82 | dw = {b'key1': b'value1'} |
|
82 | dw = {b'key1': b'value1'} | |
83 | scmutil.simplekeyvaluefile(self.vfs, b'fl').write(dw, firstline=b'1.0') |
|
83 | scmutil.simplekeyvaluefile(self.vfs, b'fl').write(dw, firstline=b'1.0') | |
84 | self.assertEqual(self.vfs.read(b'fl'), b'1.0\nkey1=value1\n') |
|
84 | self.assertEqual(self.vfs.read(b'fl'), b'1.0\nkey1=value1\n') | |
85 |
dr = scmutil.simplekeyvaluefile( |
|
85 | dr = scmutil.simplekeyvaluefile( | |
86 |
|
|
86 | self.vfs, b'fl').read(firstlinenonkeyval=True) | |
87 | self.assertEqual(dr, {b'__firstline': b'1.0', b'key1': b'value1'}) |
|
87 | self.assertEqual(dr, {b'__firstline': b'1.0', b'key1': b'value1'}) | |
88 |
|
88 | |||
89 | if __name__ == "__main__": |
|
89 | if __name__ == "__main__": |
General Comments 0
You need to be logged in to leave comments.
Login now