shallowutil.py
540 lines
| 15.0 KiB
| text/x-python
|
PythonLexer
Augie Fackler
|
r40530 | # shallowutil.py -- remotefilelog utilities | ||
# | ||||
# Copyright 2014 Facebook, Inc. | ||||
# | ||||
# This software may be used and distributed according to the terms of the | ||||
# GNU General Public License version 2 or any later version. | ||||
import collections | ||||
import os | ||||
import stat | ||||
import struct | ||||
import tempfile | ||||
from mercurial.i18n import _ | ||||
Gregory Szorc
|
r43355 | from mercurial.pycompat import open | ||
Joerg Sonnenberger
|
r46729 | from mercurial.node import hex | ||
Augie Fackler
|
r40530 | from mercurial import ( | ||
error, | ||||
pycompat, | ||||
revlog, | ||||
util, | ||||
) | ||||
from mercurial.utils import ( | ||||
Augie Fackler
|
r44519 | hashutil, | ||
Augie Fackler
|
r40530 | storageutil, | ||
stringutil, | ||||
) | ||||
from . import constants | ||||
if not pycompat.iswindows: | ||||
import grp | ||||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r40549 | def isenabled(repo): | ||
"""returns whether the repository is remotefilelog enabled or not""" | ||||
return constants.SHALLOWREPO_REQUIREMENT in repo.requirements | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getcachekey(reponame, file, id): | ||
Joerg Sonnenberger
|
r46729 | pathhash = hex(hashutil.sha1(file).digest()) | ||
Augie Fackler
|
r40530 | return os.path.join(reponame, pathhash[:2], pathhash[2:], id) | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getlocalkey(file, id): | ||
Joerg Sonnenberger
|
r46729 | pathhash = hex(hashutil.sha1(file).digest()) | ||
Augie Fackler
|
r40530 | return os.path.join(pathhash, id) | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getcachepath(ui, allowempty=False): | ||
Augie Fackler
|
r43347 | cachepath = ui.config(b"remotefilelog", b"cachepath") | ||
Augie Fackler
|
r40530 | if not cachepath: | ||
if allowempty: | ||||
return None | ||||
else: | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Martin von Zweigbergk
|
r43387 | _(b"could not find config option remotefilelog.cachepath") | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | return util.expandpath(cachepath) | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getcachepackpath(repo, category): | ||
cachepath = getcachepath(repo.ui) | ||||
if category != constants.FILEPACK_CATEGORY: | ||||
Augie Fackler
|
r43347 | return os.path.join(cachepath, repo.name, b'packs', category) | ||
Augie Fackler
|
r40530 | else: | ||
Augie Fackler
|
r43347 | return os.path.join(cachepath, repo.name, b'packs') | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getlocalpackpath(base, category): | ||
Augie Fackler
|
r43347 | return os.path.join(base, b'packs', category) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def createrevlogtext(text, copyfrom=None, copyrev=None): | ||
"""returns a string that matches the revlog contents in a | ||||
traditional revlog | ||||
""" | ||||
meta = {} | ||||
Augie Fackler
|
r43347 | if copyfrom or text.startswith(b'\1\n'): | ||
Augie Fackler
|
r40530 | if copyfrom: | ||
Augie Fackler
|
r43347 | meta[b'copy'] = copyfrom | ||
meta[b'copyrev'] = copyrev | ||||
Augie Fackler
|
r40530 | text = storageutil.packmeta(meta, text) | ||
return text | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def parsemeta(text): | ||
"""parse mercurial filelog metadata""" | ||||
meta, size = storageutil.parsemeta(text) | ||||
Augie Fackler
|
r43347 | if text.startswith(b'\1\n'): | ||
s = text.index(b'\1\n', 2) | ||||
Augie Fackler
|
r43346 | text = text[s + 2 :] | ||
Augie Fackler
|
r40530 | return meta or {}, text | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def sumdicts(*dicts): | ||
"""Adds all the values of *dicts together into one dictionary. This assumes | ||||
the values in *dicts are all summable. | ||||
e.g. [{'a': 4', 'b': 2}, {'b': 3, 'c': 1}] -> {'a': 4, 'b': 5, 'c': 1} | ||||
""" | ||||
result = collections.defaultdict(lambda: 0) | ||||
for dict in dicts: | ||||
Gregory Szorc
|
r49768 | for k, v in dict.items(): | ||
Augie Fackler
|
r40530 | result[k] += v | ||
return result | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def prefixkeys(dict, prefix): | ||
"""Returns ``dict`` with ``prefix`` prepended to all its keys.""" | ||||
result = {} | ||||
Gregory Szorc
|
r49768 | for k, v in dict.items(): | ||
Augie Fackler
|
r40530 | result[prefix + k] = v | ||
return result | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def reportpackmetrics(ui, prefix, *stores): | ||
dicts = [s.getmetrics() for s in stores] | ||||
Augie Fackler
|
r43347 | dict = prefixkeys(sumdicts(*dicts), prefix + b'_') | ||
ui.log(prefix + b"_packsizes", b"\n", **pycompat.strkwargs(dict)) | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def _parsepackmeta(metabuf): | ||
"""parse datapack meta, bytes (<metadata-list>) -> dict | ||||
The dict contains raw content - both keys and values are strings. | ||||
Upper-level business may want to convert some of them to other types like | ||||
integers, on their own. | ||||
raise ValueError if the data is corrupted | ||||
""" | ||||
metadict = {} | ||||
offset = 0 | ||||
buflen = len(metabuf) | ||||
while buflen - offset >= 3: | ||||
Augie Fackler
|
r43346 | key = metabuf[offset : offset + 1] | ||
Augie Fackler
|
r40530 | offset += 1 | ||
Augie Fackler
|
r43347 | metalen = struct.unpack_from(b'!H', metabuf, offset)[0] | ||
Augie Fackler
|
r40530 | offset += 2 | ||
if offset + metalen > buflen: | ||||
Augie Fackler
|
r43347 | raise ValueError(b'corrupted metadata: incomplete buffer') | ||
Augie Fackler
|
r43346 | value = metabuf[offset : offset + metalen] | ||
Augie Fackler
|
r40530 | metadict[key] = value | ||
offset += metalen | ||||
if offset != buflen: | ||||
Augie Fackler
|
r43347 | raise ValueError(b'corrupted metadata: redundant data') | ||
Augie Fackler
|
r40530 | return metadict | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def _buildpackmeta(metadict): | ||
"""reverse of _parsepackmeta, dict -> bytes (<metadata-list>) | ||||
The dict contains raw content - both keys and values are strings. | ||||
Upper-level business may want to serialize some of other types (like | ||||
integers) to strings before calling this function. | ||||
raise ProgrammingError when metadata key is illegal, or ValueError if | ||||
length limit is exceeded | ||||
""" | ||||
Augie Fackler
|
r43347 | metabuf = b'' | ||
Gregory Szorc
|
r49773 | for k, v in sorted((metadict or {}).items()): | ||
Augie Fackler
|
r40530 | if len(k) != 1: | ||
Augie Fackler
|
r43347 | raise error.ProgrammingError(b'packmeta: illegal key: %s' % k) | ||
Augie Fackler
|
r43346 | if len(v) > 0xFFFE: | ||
raise ValueError( | ||||
Augie Fackler
|
r43347 | b'metadata value is too long: 0x%x > 0xfffe' % len(v) | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r40530 | metabuf += k | ||
Augie Fackler
|
r43347 | metabuf += struct.pack(b'!H', len(v)) | ||
Augie Fackler
|
r40530 | metabuf += v | ||
# len(metabuf) is guaranteed representable in 4 bytes, because there are | ||||
# only 256 keys, and for each value, len(value) <= 0xfffe. | ||||
return metabuf | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | _metaitemtypes = { | ||
Gregory Szorc
|
r49787 | constants.METAKEYFLAG: (int, int), | ||
constants.METAKEYSIZE: (int, int), | ||||
Augie Fackler
|
r40530 | } | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def buildpackmeta(metadict): | ||
"""like _buildpackmeta, but typechecks metadict and normalize it. | ||||
This means, METAKEYSIZE and METAKEYSIZE should have integers as values, | ||||
and METAKEYFLAG will be dropped if its value is 0. | ||||
""" | ||||
newmeta = {} | ||||
Gregory Szorc
|
r49773 | for k, v in (metadict or {}).items(): | ||
Augie Fackler
|
r40530 | expectedtype = _metaitemtypes.get(k, (bytes,)) | ||
if not isinstance(v, expectedtype): | ||||
Augie Fackler
|
r43347 | raise error.ProgrammingError(b'packmeta: wrong type of key %s' % k) | ||
Augie Fackler
|
r40530 | # normalize int to binary buffer | ||
if int in expectedtype: | ||||
# optimization: remove flag if it's 0 to save space | ||||
if k == constants.METAKEYFLAG and v == 0: | ||||
continue | ||||
v = int2bin(v) | ||||
newmeta[k] = v | ||||
return _buildpackmeta(newmeta) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def parsepackmeta(metabuf): | ||
"""like _parsepackmeta, but convert fields to desired types automatically. | ||||
This means, METAKEYFLAG and METAKEYSIZE fields will be converted to | ||||
integers. | ||||
""" | ||||
metadict = _parsepackmeta(metabuf) | ||||
Gregory Szorc
|
r49768 | for k, v in metadict.items(): | ||
Augie Fackler
|
r40530 | if k in _metaitemtypes and int in _metaitemtypes[k]: | ||
metadict[k] = bin2int(v) | ||||
return metadict | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def int2bin(n): | ||
"""convert a non-negative integer to raw binary buffer""" | ||||
buf = bytearray() | ||||
while n > 0: | ||||
Augie Fackler
|
r43346 | buf.insert(0, n & 0xFF) | ||
Augie Fackler
|
r40530 | n >>= 8 | ||
return bytes(buf) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def bin2int(buf): | ||
"""the reverse of int2bin, convert a binary buffer to an integer""" | ||||
x = 0 | ||||
for b in bytearray(buf): | ||||
x <<= 8 | ||||
x |= b | ||||
return x | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r48344 | class BadRemotefilelogHeader(error.StorageError): | ||
"""Exception raised when parsing a remotefilelog blob header fails.""" | ||||
Augie Fackler
|
r40530 | def parsesizeflags(raw): | ||
"""given a remotefilelog blob, return (headersize, rawtextsize, flags) | ||||
see remotefilelogserver.createfileblob for the format. | ||||
raise RuntimeError if the content is illformed. | ||||
""" | ||||
flags = revlog.REVIDX_DEFAULT_FLAGS | ||||
size = None | ||||
try: | ||||
Augie Fackler
|
r43347 | index = raw.index(b'\0') | ||
Augie Fackler
|
r48345 | except ValueError: | ||
raise BadRemotefilelogHeader( | ||||
Matt Harbison
|
r50754 | b"unexpected remotefilelog header: illegal format" | ||
Augie Fackler
|
r48345 | ) | ||
Augie Fackler
|
r48346 | header = raw[:index] | ||
if header.startswith(b'v'): | ||||
# v1 and above, header starts with 'v' | ||||
if header.startswith(b'v1\n'): | ||||
for s in header.split(b'\n'): | ||||
if s.startswith(constants.METAKEYSIZE): | ||||
size = int(s[len(constants.METAKEYSIZE) :]) | ||||
elif s.startswith(constants.METAKEYFLAG): | ||||
flags = int(s[len(constants.METAKEYFLAG) :]) | ||||
Augie Fackler
|
r40530 | else: | ||
Augie Fackler
|
r48346 | raise BadRemotefilelogHeader( | ||
b'unsupported remotefilelog header: %s' % header | ||||
) | ||||
else: | ||||
# v0, str(int(size)) is the header | ||||
size = int(header) | ||||
Augie Fackler
|
r40530 | if size is None: | ||
Augie Fackler
|
r48344 | raise BadRemotefilelogHeader( | ||
Matt Harbison
|
r50754 | b"unexpected remotefilelog header: no size found" | ||
Augie Fackler
|
r48344 | ) | ||
Augie Fackler
|
r40530 | return index + 1, size, flags | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def buildfileblobheader(size, flags, version=None): | ||
"""return the header of a remotefilelog blob. | ||||
see remotefilelogserver.createfileblob for the format. | ||||
approximately the reverse of parsesizeflags. | ||||
version could be 0 or 1, or None (auto decide). | ||||
""" | ||||
# choose v0 if flags is empty, otherwise v1 | ||||
if version is None: | ||||
version = int(bool(flags)) | ||||
if version == 1: | ||||
Augie Fackler
|
r43347 | header = b'v1\n%s%d\n%s%d' % ( | ||
Augie Fackler
|
r43346 | constants.METAKEYSIZE, | ||
size, | ||||
constants.METAKEYFLAG, | ||||
flags, | ||||
) | ||||
Augie Fackler
|
r40530 | elif version == 0: | ||
if flags: | ||||
Augie Fackler
|
r43347 | raise error.ProgrammingError(b'fileblob v0 does not support flag') | ||
header = b'%d' % size | ||||
Augie Fackler
|
r40530 | else: | ||
Augie Fackler
|
r43347 | raise error.ProgrammingError(b'unknown fileblob version %d' % version) | ||
Augie Fackler
|
r40530 | return header | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def ancestormap(raw): | ||
offset, size, flags = parsesizeflags(raw) | ||||
start = offset + size | ||||
mapping = {} | ||||
while start < len(raw): | ||||
Augie Fackler
|
r43347 | divider = raw.index(b'\0', start + 80) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | currentnode = raw[start : (start + 20)] | ||
p1 = raw[(start + 20) : (start + 40)] | ||||
p2 = raw[(start + 40) : (start + 60)] | ||||
linknode = raw[(start + 60) : (start + 80)] | ||||
copyfrom = raw[(start + 80) : divider] | ||||
Augie Fackler
|
r40530 | |||
mapping[currentnode] = (p1, p2, linknode, copyfrom) | ||||
start = divider + 1 | ||||
return mapping | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readfile(path): | ||
Augie Fackler
|
r43347 | f = open(path, b'rb') | ||
Augie Fackler
|
r40530 | try: | ||
result = f.read() | ||||
# we should never have empty files | ||||
if not result: | ||||
os.remove(path) | ||||
Augie Fackler
|
r43347 | raise IOError(b"empty file: %s" % path) | ||
Augie Fackler
|
r40530 | |||
return result | ||||
finally: | ||||
f.close() | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def unlinkfile(filepath): | ||
if pycompat.iswindows: | ||||
# On Windows, os.unlink cannnot delete readonly files | ||||
os.chmod(filepath, stat.S_IWUSR) | ||||
os.unlink(filepath) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def renamefile(source, destination): | ||
if pycompat.iswindows: | ||||
# On Windows, os.rename cannot rename readonly files | ||||
# and cannot overwrite destination if it exists | ||||
os.chmod(source, stat.S_IWUSR) | ||||
if os.path.isfile(destination): | ||||
os.chmod(destination, stat.S_IWUSR) | ||||
os.unlink(destination) | ||||
os.rename(source, destination) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def writefile(path, content, readonly=False): | ||
dirname, filename = os.path.split(path) | ||||
if not os.path.exists(dirname): | ||||
try: | ||||
os.makedirs(dirname) | ||||
Manuel Jacob
|
r50200 | except FileExistsError: | ||
pass | ||||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43347 | fd, temp = tempfile.mkstemp(prefix=b'.%s-' % filename, dir=dirname) | ||
Augie Fackler
|
r40530 | os.close(fd) | ||
try: | ||||
Augie Fackler
|
r43347 | f = util.posixfile(temp, b'wb') | ||
Augie Fackler
|
r40530 | f.write(content) | ||
f.close() | ||||
if readonly: | ||||
mode = 0o444 | ||||
else: | ||||
# tempfiles are created with 0o600, so we need to manually set the | ||||
# mode. | ||||
oldumask = os.umask(0) | ||||
# there's no way to get the umask without modifying it, so set it | ||||
# back | ||||
os.umask(oldumask) | ||||
mode = ~oldumask | ||||
renamefile(temp, path) | ||||
os.chmod(path, mode) | ||||
except Exception: | ||||
try: | ||||
unlinkfile(temp) | ||||
except OSError: | ||||
pass | ||||
raise | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def sortnodes(nodes, parentfunc): | ||
"""Topologically sorts the nodes, using the parentfunc to find | ||||
the parents of nodes.""" | ||||
nodes = set(nodes) | ||||
childmap = {} | ||||
parentmap = {} | ||||
roots = [] | ||||
# Build a child and parent map | ||||
for n in nodes: | ||||
parents = [p for p in parentfunc(n) if p in nodes] | ||||
parentmap[n] = set(parents) | ||||
for p in parents: | ||||
childmap.setdefault(p, set()).add(n) | ||||
if not parents: | ||||
roots.append(n) | ||||
roots.sort() | ||||
# Process roots, adding children to the queue as they become roots | ||||
results = [] | ||||
while roots: | ||||
n = roots.pop(0) | ||||
results.append(n) | ||||
if n in childmap: | ||||
children = childmap[n] | ||||
for c in children: | ||||
childparents = parentmap[c] | ||||
childparents.remove(n) | ||||
if len(childparents) == 0: | ||||
# insert at the beginning, that way child nodes | ||||
# are likely to be output immediately after their | ||||
# parents. This gives better compression results. | ||||
roots.insert(0, c) | ||||
return results | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readexactly(stream, n): | ||
'''read n bytes from stream.read and abort if less was available''' | ||||
s = stream.read(n) | ||||
if len(s) < n: | ||||
Augie Fackler
|
r43346 | raise error.Abort( | ||
Martin von Zweigbergk
|
r43387 | _(b"stream ended unexpectedly (got %d bytes, expected %d)") | ||
Augie Fackler
|
r43346 | % (len(s), n) | ||
) | ||||
Augie Fackler
|
r40530 | return s | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readunpack(stream, fmt): | ||
data = readexactly(stream, struct.calcsize(fmt)) | ||||
return struct.unpack(fmt, data) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readpath(stream): | ||
rawlen = readexactly(stream, constants.FILENAMESIZE) | ||||
pathlen = struct.unpack(constants.FILENAMESTRUCT, rawlen)[0] | ||||
return readexactly(stream, pathlen) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readnodelist(stream): | ||
rawlen = readexactly(stream, constants.NODECOUNTSIZE) | ||||
nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0] | ||||
Manuel Jacob
|
r50179 | for i in range(nodecount): | ||
Augie Fackler
|
r40530 | yield readexactly(stream, constants.NODESIZE) | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def readpathlist(stream): | ||
rawlen = readexactly(stream, constants.PATHCOUNTSIZE) | ||||
pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0] | ||||
Manuel Jacob
|
r50179 | for i in range(pathcount): | ||
Augie Fackler
|
r40530 | yield readpath(stream) | ||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getgid(groupname): | ||
try: | ||||
Augie Fackler
|
r41294 | gid = grp.getgrnam(pycompat.fsdecode(groupname)).gr_gid | ||
Augie Fackler
|
r40530 | return gid | ||
except KeyError: | ||||
return None | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def setstickygroupdir(path, gid, warn=None): | ||
if gid is None: | ||||
return | ||||
try: | ||||
os.chown(path, -1, gid) | ||||
os.chmod(path, 0o2775) | ||||
except (IOError, OSError) as ex: | ||||
if warn: | ||||
Augie Fackler
|
r43347 | warn(_(b'unable to chown/chmod on %s: %s\n') % (path, ex)) | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def mkstickygroupdir(ui, path): | ||
"""Creates the given directory (if it doesn't exist) and give it a | ||||
particular group with setgid enabled.""" | ||||
gid = None | ||||
Augie Fackler
|
r43347 | groupname = ui.config(b"remotefilelog", b"cachegroup") | ||
Augie Fackler
|
r40530 | if groupname: | ||
gid = getgid(groupname) | ||||
if gid is None: | ||||
Augie Fackler
|
r43347 | ui.warn(_(b'unable to resolve group name: %s\n') % groupname) | ||
Augie Fackler
|
r40530 | |||
# we use a single stat syscall to test the existence and mode / group bit | ||||
st = None | ||||
try: | ||||
st = os.stat(path) | ||||
except OSError: | ||||
pass | ||||
if st: | ||||
# exists | ||||
if (st.st_mode & 0o2775) != 0o2775 or st.st_gid != gid: | ||||
# permission needs to be fixed | ||||
setstickygroupdir(path, gid, ui.warn) | ||||
return | ||||
oldumask = os.umask(0o002) | ||||
try: | ||||
missingdirs = [path] | ||||
path = os.path.dirname(path) | ||||
while path and not os.path.exists(path): | ||||
missingdirs.append(path) | ||||
path = os.path.dirname(path) | ||||
for path in reversed(missingdirs): | ||||
try: | ||||
os.mkdir(path) | ||||
Manuel Jacob
|
r50200 | except FileExistsError: | ||
pass | ||||
Augie Fackler
|
r40530 | |||
for path in missingdirs: | ||||
setstickygroupdir(path, gid, ui.warn) | ||||
finally: | ||||
os.umask(oldumask) | ||||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getusername(ui): | ||
try: | ||||
return stringutil.shortuser(ui.username()) | ||||
except Exception: | ||||
Augie Fackler
|
r43347 | return b'unknown' | ||
Augie Fackler
|
r40530 | |||
Augie Fackler
|
r43346 | |||
Augie Fackler
|
r40530 | def getreponame(ui): | ||
Augie Fackler
|
r43347 | reponame = ui.config(b'paths', b'default') | ||
Augie Fackler
|
r40530 | if reponame: | ||
return os.path.basename(reponame) | ||||
Augie Fackler
|
r43347 | return b"unknown" | ||