diff --git a/docs/changelog.rst b/docs/changelog.rst
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -3,6 +3,12 @@
Changelog
=========
+1.1.0 (**XXXX-XX-XX**)
+----------------------
+- git support
+- performance upgrade for cached repos list
+
+
1.0.0 (**2010-10-xx**)
----------------------
diff --git a/rhodecode/__init__.py b/rhodecode/__init__.py
--- a/rhodecode/__init__.py
+++ b/rhodecode/__init__.py
@@ -24,7 +24,7 @@ versioning implementation: http://semver
@author: marcink
"""
-VERSION = (1, 0, 0, 'rc4')
+VERSION = (1, 1, 0, 'beta')
__version__ = '.'.join((str(each) for each in VERSION[:4]))
diff --git a/rhodecode/config/environment.py b/rhodecode/config/environment.py
--- a/rhodecode/config/environment.py
+++ b/rhodecode/config/environment.py
@@ -20,7 +20,7 @@ def load_environment(global_conf, app_co
object
"""
config = PylonsConfig()
-
+
# Pylons paths
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
paths = dict(root=root,
@@ -34,11 +34,11 @@ def load_environment(global_conf, app_co
config['routes.map'] = make_map(config)
config['pylons.app_globals'] = app_globals.Globals(config)
config['pylons.h'] = rhodecode.lib.helpers
-
+
# Setup cache object as early as possible
import pylons
pylons.cache._push_object(config['pylons.app_globals'].cache)
-
+
# Create the Mako TemplateLookup, with the default auto-escaping
config['pylons.app_globals'].mako_lookup = TemplateLookup(
directories=paths['templates'],
@@ -53,8 +53,8 @@ def load_environment(global_conf, app_co
if test:
from rhodecode.lib.utils import create_test_env, create_test_index
create_test_env('/tmp', config)
- create_test_index('/tmp/*', True)
-
+ create_test_index('/tmp', True)
+
#MULTIPLE DB configs
# Setup the SQLAlchemy database engine
if config['debug'] and not test:
@@ -68,12 +68,12 @@ def load_environment(global_conf, app_co
init_model(sa_engine_db1)
#init baseui
config['pylons.app_globals'].baseui = make_ui('db')
-
+
repo2db_mapper(_get_repos_cached_initial(config['pylons.app_globals'], initial))
set_available_permissions(config)
set_base_path(config)
set_rhodecode_config(config)
# CONFIGURATION OPTIONS HERE (note: all config options will override
# any Pylons config options)
-
+
return config
diff --git a/rhodecode/lib/app_globals.py b/rhodecode/lib/app_globals.py
--- a/rhodecode/lib/app_globals.py
+++ b/rhodecode/lib/app_globals.py
@@ -19,13 +19,13 @@ class Globals(object):
self.cache = CacheManager(**parse_cache_config_options(config))
self.available_permissions = None # propagated after init_model
self.baseui = None # propagated after init_model
-
+
@LazyProperty
def paths(self):
if self.baseui:
return self.baseui.configitems('paths')
-
+
@LazyProperty
def base_path(self):
if self.baseui:
- return self.paths[0][1].replace('*', '')
+ return self.paths[0][1]
diff --git a/rhodecode/lib/celerylib/tasks.py b/rhodecode/lib/celerylib/tasks.py
--- a/rhodecode/lib/celerylib/tasks.py
+++ b/rhodecode/lib/celerylib/tasks.py
@@ -8,7 +8,11 @@ from rhodecode.lib.smtp_mailer import Sm
from rhodecode.lib.utils import OrderedDict
from time import mktime
from vcs.backends.hg import MercurialRepository
+from vcs.backends.git import GitRepository
+import os
import traceback
+from vcs.backends import get_repo
+from vcs.utils.helpers import get_scm
try:
import json
@@ -95,8 +99,9 @@ def get_commits_stats(repo_name, ts_min_
commits_by_day_author_aggregate = {}
commits_by_day_aggregate = {}
- repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
- repo = MercurialRepository(repos_path + repo_name)
+ repos_path = get_hg_ui_settings()['paths_root_path']
+ p = os.path.join(repos_path, repo_name)
+ repo = get_repo(get_scm(p)[0], p)
skip_date_limit = True
parse_limit = 250 #limit for single task changeset parsing optimal for
@@ -305,8 +310,10 @@ def __get_codes_stats(repo_name):
's', 'sh', 'tpl', 'txt', 'vim', 'wss', 'xhtml', 'xml', 'xsl', 'xslt', 'yaws']
- repos_path = get_hg_ui_settings()['paths_root_path'].replace('*', '')
- repo = MercurialRepository(repos_path + repo_name)
+ repos_path = get_hg_ui_settings()['paths_root_path']
+ p = os.path.join(repos_path, repo_name)
+ repo = get_repo(get_scm(p)[0], p)
+
tip = repo.get_changeset()
code_stats = {}
diff --git a/rhodecode/lib/db_manage.py b/rhodecode/lib/db_manage.py
--- a/rhodecode/lib/db_manage.py
+++ b/rhodecode/lib/db_manage.py
@@ -162,7 +162,7 @@ class DbManage(object):
paths = RhodeCodeUi()
paths.ui_section = 'paths'
paths.ui_key = '/'
- paths.ui_value = os.path.join(path, '*')
+ paths.ui_value = path
hgsettings1 = RhodeCodeSettings()
diff --git a/rhodecode/lib/helpers.py b/rhodecode/lib/helpers.py
--- a/rhodecode/lib/helpers.py
+++ b/rhodecode/lib/helpers.py
@@ -64,20 +64,20 @@ def recursive_replace(str, replace=' '):
return str
else:
str = str.replace(replace * 2, replace)
- return recursive_replace(str, replace)
+ return recursive_replace(str, replace)
class _ToolTip(object):
-
+
def __call__(self, tooltip_title, trim_at=50):
"""
Special function just to wrap our text into nice formatted autowrapped
text
:param tooltip_title:
"""
-
+
return wrap_paragraphs(escape(tooltip_title), trim_at)\
.replace('\n', '
')
-
+
def activate(self):
"""
Adds tooltip mechanism to the given Html all tooltips have to have
@@ -85,7 +85,7 @@ class _ToolTip(object):
Then a tooltip will be generated based on that
All with yui js tooltip
"""
-
+
js = '''
YAHOO.util.Event.onDOMReady(function(){
function toolTipsId(){
@@ -190,19 +190,19 @@ class _ToolTip(object):
});
});
- '''
+ '''
return literal(js)
tooltip = _ToolTip()
class _FilesBreadCrumbs(object):
-
+
def __call__(self, repo_name, rev, paths):
url_l = [link_to(repo_name, url('files_home',
repo_name=repo_name,
revision=rev, f_path=''))]
paths_l = paths.split('/')
-
+
for cnt, p in enumerate(paths_l, 1):
if p != '':
url_l.append(link_to(p, url('files_home',
@@ -236,12 +236,12 @@ def pygmentize_annotation(filenode, **kw
pygmentize function for annotation
:param filenode:
"""
-
+
color_dict = {}
def gen_color():
"""generator for getting 10k of evenly distibuted colors using hsv color
and golden ratio.
- """
+ """
import colorsys
n = 10000
golden_ratio = 0.618033988749895
@@ -252,21 +252,21 @@ def pygmentize_annotation(filenode, **kw
h %= 1
HSV_tuple = [h, 0.95, 0.95]
RGB_tuple = colorsys.hsv_to_rgb(*HSV_tuple)
- yield map(lambda x:str(int(x * 256)), RGB_tuple)
+ yield map(lambda x:str(int(x * 256)), RGB_tuple)
cgenerator = gen_color()
-
+
def get_color_string(cs):
if color_dict.has_key(cs):
col = color_dict[cs]
else:
col = color_dict[cs] = cgenerator.next()
return "color: rgb(%s)! important;" % (', '.join(col))
-
+
def url_func(changeset):
tooltip_html = "
Author:" + \
- " %s
Date: %s
Message: %s
"
-
+ " %s
Date: %s
Message: %s
"
+
tooltip_html = tooltip_html % (changeset.author,
changeset.date,
tooltip(changeset.message))
@@ -280,11 +280,11 @@ def pygmentize_annotation(filenode, **kw
class_='tooltip',
tooltip_title=tooltip_html
)
-
+
uri += '\n'
- return uri
+ return uri
return literal(annotate_highlight(filenode, url_func, **kwargs))
-
+
def repo_name_slug(value):
"""Return slug of name of repository
This function is called on each creation/modification
@@ -292,7 +292,7 @@ def repo_name_slug(value):
"""
slug = remove_formatting(value)
slug = strip_tags(slug)
-
+
for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """:
slug = slug.replace(c, '-')
slug = recursive_replace(slug, '-')
@@ -305,7 +305,7 @@ def get_changeset_safe(repo, rev):
if not isinstance(repo, BaseRepository):
raise Exception('You must pass an Repository '
'object as first argument got %s', type(repo))
-
+
try:
cs = repo.get_changeset(rev)
except RepositoryError:
@@ -323,7 +323,7 @@ flash = _Flash()
from mercurial import util
from mercurial.templatefilters import age as _age, person as _person
-age = lambda x:_age(x)
+age = lambda x:x
capitalize = lambda x: x.capitalize()
date = lambda x: util.datestr(x)
email = util.email
@@ -333,8 +333,8 @@ hgdate = lambda x: "%d %d" % x
isodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2')
isodatesec = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2')
localdate = lambda x: (x[0], util.makedate()[1])
-rfc822date = lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
-rfc822date_notz = lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S")
+rfc822date = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2")
+rfc822date_notz = lambda x: x#util.datestr(x, "%a, %d %b %Y %H:%M:%S")
rfc3339date = lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2")
time_ago = lambda x: util.datestr(_age(x), "%a, %d %b %Y %H:%M:%S %1%2")
@@ -358,8 +358,8 @@ def gravatar_url(email_address, size=30)
baseurl_nossl = "http://www.gravatar.com/avatar/"
baseurl_ssl = "https://secure.gravatar.com/avatar/"
baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl
-
-
+
+
# construct the url
gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?"
gravatar_url += urllib.urlencode({'d':default, 's':str(size)})
@@ -370,7 +370,7 @@ def safe_unicode(str):
"""safe unicode function. In case of UnicodeDecode error we try to return
unicode with errors replace, if this failes we return unicode with
string_escape decoding """
-
+
try:
u_str = unicode(str)
except UnicodeDecodeError:
@@ -379,5 +379,5 @@ def safe_unicode(str):
except UnicodeDecodeError:
#incase we have a decode error just represent as byte string
u_str = unicode(str(str).encode('string_escape'))
-
+
return u_str
diff --git a/rhodecode/lib/indexers/__init__.py b/rhodecode/lib/indexers/__init__.py
--- a/rhodecode/lib/indexers/__init__.py
+++ b/rhodecode/lib/indexers/__init__.py
@@ -1,4 +1,10 @@
+import os
+import sys
from os.path import dirname as dn, join as jn
+
+#to get the rhodecode import
+sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
+
from rhodecode.config.environment import load_environment
from rhodecode.model.hg import HgModel
from shutil import rmtree
@@ -9,15 +15,10 @@ from whoosh.analysis import RegexTokeniz
from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
from whoosh.index import create_in, open_dir
from whoosh.formats import Characters
-from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
+from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter
-import os
-import sys
import traceback
-#to get the rhodecode import
-sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
-
#LOCATION WE KEEP THE INDEX
IDX_LOCATION = jn(dn(dn(dn(dn(os.path.abspath(__file__))))), 'data', 'index')
@@ -45,9 +46,62 @@ SCHEMA = Schema(owner=TEXT(),
IDX_NAME = 'HG_INDEX'
-FORMATTER = HtmlFormatter('span', between='\n...\n')
+FORMATTER = HtmlFormatter('span', between='\n...\n')
FRAGMENTER = SimpleFragmenter(200)
-
+
+from paste.script import command
+import ConfigParser
+
+class MakeIndex(command.Command):
+
+ max_args = 1
+ min_args = 1
+
+ usage = "CONFIG_FILE"
+ summary = "Creates index for full text search given configuration file"
+ group_name = "Whoosh indexing"
+
+ parser = command.Command.standard_parser(verbose=True)
+# parser.add_option('--repo-location',
+# action='store',
+# dest='repo_location',
+# help="Specifies repositories location to index",
+# )
+ parser.add_option('-f',
+ action='store_true',
+ dest='full_index',
+ help="Specifies that index should be made full i.e"
+ " destroy old and build from scratch",
+ default=False)
+ def command(self):
+ config_name = self.args[0]
+
+ p = config_name.split('/')
+ if len(p) == 1:
+ root = '.'
+ else:
+ root = '/'.join(p[:-1])
+ print root
+ config = ConfigParser.ConfigParser({'here':root})
+ config.read(config_name)
+ print dict(config.items('app:main'))['index_dir']
+ index_location = dict(config.items('app:main'))['index_dir']
+ #return
+
+ #=======================================================================
+ # WHOOSH DAEMON
+ #=======================================================================
+ from rhodecode.lib.pidlock import LockHeld, DaemonLock
+ from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
+ try:
+ l = DaemonLock()
+ WhooshIndexingDaemon(index_location=index_location)\
+ .run(full_index=self.options.full_index)
+ l.release()
+ except LockHeld:
+ sys.exit(1)
+
+
class ResultWrapper(object):
def __init__(self, search_type, searcher, matcher, highlight_items):
self.search_type = search_type
@@ -55,7 +109,7 @@ class ResultWrapper(object):
self.matcher = matcher
self.highlight_items = highlight_items
self.fragment_size = 200 / 2
-
+
@LazyProperty
def doc_ids(self):
docs_id = []
@@ -64,8 +118,8 @@ class ResultWrapper(object):
chunks = [offsets for offsets in self.get_chunks()]
docs_id.append([docnum, chunks])
self.matcher.next()
- return docs_id
-
+ return docs_id
+
def __str__(self):
return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
@@ -91,32 +145,32 @@ class ResultWrapper(object):
slice = []
for docid in self.doc_ids[i:j]:
slice.append(self.get_full_content(docid))
- return slice
-
+ return slice
+
def get_full_content(self, docid):
res = self.searcher.stored_fields(docid[0])
f_path = res['path'][res['path'].find(res['repository']) \
+ len(res['repository']):].lstrip('/')
-
+
content_short = self.get_short_content(res, docid[1])
res.update({'content_short':content_short,
'content_short_hl':self.highlight(content_short),
'f_path':f_path})
-
- return res
-
+
+ return res
+
def get_short_content(self, res, chunks):
-
+
return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
-
+
def get_chunks(self):
"""
Smart function that implements chunking the content
but not overlap chunks so it doesn't highlight the same
close occurrences twice.
- :param matcher:
- :param size:
+ @param matcher:
+ @param size:
"""
memory = [(0, 0)]
for span in self.matcher.spans():
@@ -124,12 +178,12 @@ class ResultWrapper(object):
end = span.endchar or 0
start_offseted = max(0, start - self.fragment_size)
end_offseted = end + self.fragment_size
-
+
if start_offseted < memory[-1][1]:
start_offseted = memory[-1][1]
- memory.append((start_offseted, end_offseted,))
- yield (start_offseted, end_offseted,)
-
+ memory.append((start_offseted, end_offseted,))
+ yield (start_offseted, end_offseted,)
+
def highlight(self, content, top=5):
if self.search_type != 'content':
return ''
@@ -139,4 +193,4 @@ class ResultWrapper(object):
fragmenter=FRAGMENTER,
formatter=FORMATTER,
top=top)
- return hl
+ return hl
diff --git a/rhodecode/lib/indexers/daemon.py b/rhodecode/lib/indexers/daemon.py
--- a/rhodecode/lib/indexers/daemon.py
+++ b/rhodecode/lib/indexers/daemon.py
@@ -32,12 +32,12 @@ from os.path import join as jn
project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
sys.path.append(project_path)
-from rhodecode.lib.pidlock import LockHeld, DaemonLock
+
from rhodecode.model.hg import HgModel
from rhodecode.lib.helpers import safe_unicode
from whoosh.index import create_in, open_dir
from shutil import rmtree
-from rhodecode.lib.indexers import INDEX_EXTENSIONS, IDX_LOCATION, SCHEMA, IDX_NAME
+from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
from time import mktime
from vcs.exceptions import ChangesetError, RepositoryError
@@ -61,25 +61,37 @@ ch.setFormatter(formatter)
# add ch to logger
log.addHandler(ch)
-def scan_paths(root_location):
- return HgModel.repo_scan('/', root_location, None, True)
+def get_repos_location():
+ return HgModel.get_repos_location()
+
class WhooshIndexingDaemon(object):
"""
Deamon for atomic jobs
"""
- def __init__(self, indexname='HG_INDEX', repo_location=None):
+ def __init__(self, indexname='HG_INDEX', index_location=None,
+ repo_location=None):
self.indexname = indexname
+
+ self.index_location = index_location
+ if not index_location:
+ raise Exception('You have to provide index location')
+
self.repo_location = repo_location
- self.repo_paths = scan_paths(self.repo_location)
+ if not repo_location:
+ raise Exception('You have to provide repositories location')
+
+
+
+ self.repo_paths = HgModel.repo_scan('/', self.repo_location, None, True)
self.initial = False
- if not os.path.isdir(IDX_LOCATION):
- os.mkdir(IDX_LOCATION)
+ if not os.path.isdir(self.index_location):
+ os.mkdir(self.index_location)
log.info('Cannot run incremental index since it does not'
' yet exist running full build')
self.initial = True
-
+
def get_paths(self, repo):
"""
recursive walk in root dir and return a set of all path in that dir
@@ -87,27 +99,25 @@ class WhooshIndexingDaemon(object):
"""
index_paths_ = set()
try:
- tip = repo.get_changeset()
-
- for topnode, dirs, files in tip.walk('/'):
+ for topnode, dirs, files in repo.walk('/', 'tip'):
for f in files:
index_paths_.add(jn(repo.path, f.path))
for dir in dirs:
for f in files:
index_paths_.add(jn(repo.path, f.path))
-
+
except RepositoryError:
pass
- return index_paths_
-
+ return index_paths_
+
def get_node(self, repo, path):
n_path = path[len(repo.path) + 1:]
node = repo.get_changeset().get_node(n_path)
return node
-
+
def get_node_mtime(self, node):
return mktime(node.last_changeset.date.timetuple())
-
+
def add_doc(self, writer, path, repo):
"""Adding doc to writer"""
node = self.get_node(repo, path)
@@ -120,63 +130,63 @@ class WhooshIndexingDaemon(object):
log.debug(' >> %s' % path)
#just index file name without it's content
u_content = u''
-
+
writer.add_document(owner=unicode(repo.contact),
repository=safe_unicode(repo.name),
path=safe_unicode(path),
content=u_content,
modtime=self.get_node_mtime(node),
- extension=node.extension)
+ extension=node.extension)
+
-
def build_index(self):
- if os.path.exists(IDX_LOCATION):
+ if os.path.exists(self.index_location):
log.debug('removing previous index')
- rmtree(IDX_LOCATION)
-
- if not os.path.exists(IDX_LOCATION):
- os.mkdir(IDX_LOCATION)
-
- idx = create_in(IDX_LOCATION, SCHEMA, indexname=IDX_NAME)
+ rmtree(self.index_location)
+
+ if not os.path.exists(self.index_location):
+ os.mkdir(self.index_location)
+
+ idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
writer = idx.writer()
-
+
for cnt, repo in enumerate(self.repo_paths.values()):
log.debug('building index @ %s' % repo.path)
-
+
for idx_path in self.get_paths(repo):
self.add_doc(writer, idx_path, repo)
-
+
log.debug('>> COMMITING CHANGES <<')
writer.commit(merge=True)
log.debug('>>> FINISHED BUILDING INDEX <<<')
-
-
+
+
def update_index(self):
log.debug('STARTING INCREMENTAL INDEXING UPDATE')
-
- idx = open_dir(IDX_LOCATION, indexname=self.indexname)
+
+ idx = open_dir(self.index_location, indexname=self.indexname)
# The set of all paths in the index
indexed_paths = set()
# The set of all paths we need to re-index
to_index = set()
-
+
reader = idx.reader()
writer = idx.writer()
-
+
# Loop over the stored fields in the index
for fields in reader.all_stored_fields():
indexed_path = fields['path']
indexed_paths.add(indexed_path)
-
+
repo = self.repo_paths[fields['repository']]
-
+
try:
node = self.get_node(repo, indexed_path)
except ChangesetError:
# This file was deleted since it was indexed
log.debug('removing from index %s' % indexed_path)
writer.delete_by_term('path', indexed_path)
-
+
else:
# Check if this file was changed since it was indexed
indexed_time = fields['modtime']
@@ -187,7 +197,7 @@ class WhooshIndexingDaemon(object):
log.debug('adding to reindex list %s' % indexed_path)
writer.delete_by_term('path', indexed_path)
to_index.add(indexed_path)
-
+
# Loop over the files in the filesystem
# Assume we have a function that gathers the filenames of the
# documents to be indexed
@@ -198,51 +208,14 @@ class WhooshIndexingDaemon(object):
# that wasn't indexed before. So index it!
self.add_doc(writer, path, repo)
log.debug('re indexing %s' % path)
-
+
log.debug('>> COMMITING CHANGES <<')
writer.commit(merge=True)
log.debug('>>> FINISHED REBUILDING INDEX <<<')
-
+
def run(self, full_index=False):
"""Run daemon"""
if full_index or self.initial:
self.build_index()
else:
self.update_index()
-
-if __name__ == "__main__":
- arg = sys.argv[1:]
- if len(arg) != 2:
- sys.stderr.write('Please specify indexing type [full|incremental]'
- 'and path to repositories as script args \n')
- sys.exit()
-
-
- if arg[0] == 'full':
- full_index = True
- elif arg[0] == 'incremental':
- # False means looking just for changes
- full_index = False
- else:
- sys.stdout.write('Please use [full|incremental]'
- ' as script first arg \n')
- sys.exit()
-
- if not os.path.isdir(arg[1]):
- sys.stderr.write('%s is not a valid path \n' % arg[1])
- sys.exit()
- else:
- if arg[1].endswith('/'):
- repo_location = arg[1] + '*'
- else:
- repo_location = arg[1] + '/*'
-
- try:
- l = DaemonLock()
- WhooshIndexingDaemon(repo_location=repo_location)\
- .run(full_index=full_index)
- l.release()
- reload(logging)
- except LockHeld:
- sys.exit(1)
-
diff --git a/rhodecode/lib/utils.py b/rhodecode/lib/utils.py
--- a/rhodecode/lib/utils.py
+++ b/rhodecode/lib/utils.py
@@ -16,24 +16,28 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
+from UserDict import DictMixin
+from mercurial import ui, config, hg
+from mercurial.error import RepoError
+from rhodecode.model import meta
+from rhodecode.model.caching_query import FromCache
+from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, \
+ UserLog
+from rhodecode.model.repo import RepoModel
+from rhodecode.model.user import UserModel
+from vcs.backends.base import BaseChangeset
+from vcs.backends.git import GitRepository
+from vcs.backends.hg import MercurialRepository
+from vcs.utils.lazy import LazyProperty
+import datetime
+import logging
+import os
"""
Created on April 18, 2010
Utilities for RhodeCode
@author: marcink
"""
-from rhodecode.model.caching_query import FromCache
-from mercurial import ui, config, hg
-from mercurial.error import RepoError
-from rhodecode.model import meta
-from rhodecode.model.user import UserModel
-from rhodecode.model.repo import RepoModel
-from rhodecode.model.db import Repository, User, RhodeCodeUi, RhodeCodeSettings, UserLog
-from vcs.backends.base import BaseChangeset
-from vcs.utils.lazy import LazyProperty
-import logging
-import datetime
-import os
log = logging.getLogger(__name__)
@@ -96,14 +100,30 @@ def action_logger(user, action, repo, ip
sa.rollback()
log.error('could not log user action:%s', str(e))
-def check_repo_dir(paths):
- repos_path = paths[0][1].split('/')
- if repos_path[-1] in ['*', '**']:
- repos_path = repos_path[:-1]
- if repos_path[0] != '/':
- repos_path[0] = '/'
- if not os.path.isdir(os.path.join(*repos_path)):
- raise Exception('Not a valid repository in %s' % paths[0][1])
+def get_repos(path, recursive=False, initial=False):
+ """
+ Scans given path for repos and return (name,(type,path)) tuple
+ :param prefix:
+ :param path:
+ :param recursive:
+ :param initial:
+ """
+ from vcs.utils.helpers import get_scm
+ from vcs.exceptions import VCSError
+ scm = get_scm(path)
+ if scm:
+ raise Exception('The given path %s should not be a repository got %s',
+ path, scm)
+
+ for dirpath in os.listdir(path):
+ try:
+ yield dirpath, get_scm(os.path.join(path, dirpath))
+ except VCSError:
+ pass
+
+if __name__ == '__main__':
+ get_repos('', '/home/marcink/workspace-python')
+
def check_repo_fast(repo_name, base_path):
if os.path.isdir(os.path.join(base_path, repo_name)):return False
@@ -231,8 +251,6 @@ def make_ui(read_from='file', path=None,
for k, v in cfg.items(section):
baseui.setconfig(section, k, v)
log.debug('settings ui from file[%s]%s:%s', section, k, v)
- if checkpaths:check_repo_dir(cfg.items('paths'))
-
elif read_from == 'db':
hg_ui = get_hg_ui_cached()
@@ -284,7 +302,7 @@ class EmptyChangeset(BaseChangeset):
@LazyProperty
def raw_id(self):
"""
- Returns raw string identifing this changeset, useful for web
+ Returns raw string identifying this changeset, useful for web
representation.
"""
return '0' * 40
@@ -308,16 +326,21 @@ def repo2db_mapper(initial_repo_list, re
"""
sa = meta.Session()
+ rm = RepoModel(sa)
user = sa.query(User).filter(User.admin == True).first()
- rm = RepoModel()
+ for name, repo in initial_repo_list.items():
+ if not rm.get(name, cache=False):
+ log.info('repository %s not found creating default', name)
- for name, repo in initial_repo_list.items():
- if not RepoModel(sa).get(name, cache=False):
- log.info('repository %s not found creating default', name)
+ if isinstance(repo, MercurialRepository):
+ repo_type = 'hg'
+ if isinstance(repo, GitRepository):
+ repo_type = 'git'
form_data = {
'repo_name':name,
+ 'repo_type':repo_type,
'description':repo.description if repo.description != 'unknown' else \
'auto description for %s' % name,
'private':False
@@ -335,7 +358,6 @@ def repo2db_mapper(initial_repo_list, re
meta.Session.remove()
-from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py
--- a/rhodecode/model/db.py
+++ b/rhodecode/model/db.py
@@ -22,9 +22,9 @@ class RhodeCodeUi(Base):
ui_key = Column("ui_key", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
ui_value = Column("ui_value", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
ui_active = Column("ui_active", BOOLEAN(), nullable=True, unique=None, default=True)
-
-
-class User(Base):
+
+
+class User(Base):
__tablename__ = 'users'
__table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'useexisting':True})
user_id = Column("user_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
@@ -36,21 +36,21 @@ class User(Base):
lastname = Column("lastname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
email = Column("email", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
last_login = Column("last_login", DATETIME(timezone=False), nullable=True, unique=None, default=None)
-
+
user_log = relation('UserLog')
user_perms = relation('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id")
-
+
@LazyProperty
def full_contact(self):
return '%s %s <%s>' % (self.name, self.lastname, self.email)
-
+
def __repr__(self):
return "" % (self.user_id, self.username)
-
+
def update_lastlogin(self):
"""Update user lastlogin"""
import datetime
-
+
try:
session = Session.object_session(self)
self.last_login = datetime.datetime.now()
@@ -58,48 +58,49 @@ class User(Base):
session.commit()
log.debug('updated user %s lastlogin', self.username)
except Exception:
- session.rollback()
-
-
-class UserLog(Base):
+ session.rollback()
+
+
+class UserLog(Base):
__tablename__ = 'user_logs'
__table_args__ = {'useexisting':True}
user_log_id = Column("user_log_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
repository_id = Column("repository_id", INTEGER(length=None, convert_unicode=False, assert_unicode=None), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
repository_name = Column("repository_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
- user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
+ user_ip = Column("user_ip", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
action = Column("action", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
action_date = Column("action_date", DATETIME(timezone=False), nullable=True, unique=None, default=None)
revision = Column('revision', TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
+
user = relation('User')
repository = relation('Repository')
-
+
class Repository(Base):
__tablename__ = 'repositories'
__table_args__ = (UniqueConstraint('repo_name'), {'useexisting':True},)
repo_id = Column("repo_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
repo_name = Column("repo_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
+ repo_type = Column("repo_type", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=False, default=None)
private = Column("private", BOOLEAN(), nullable=True, unique=None, default=None)
description = Column("description", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
fork_id = Column("fork_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=True, unique=False, default=None)
-
+
user = relation('User')
fork = relation('Repository', remote_side=repo_id)
repo_to_perm = relation('RepoToPerm', cascade='all')
-
+
def __repr__(self):
return "" % (self.repo_id, self.repo_name)
-
+
class Permission(Base):
__tablename__ = 'permissions'
__table_args__ = {'useexisting':True}
permission_id = Column("permission_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
permission_name = Column("permission_name", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
permission_longname = Column("permission_longname", TEXT(length=None, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
-
+
def __repr__(self):
return "" % (self.permission_id, self.permission_name)
@@ -109,8 +110,8 @@ class RepoToPerm(Base):
repo_to_perm_id = Column("repo_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
- repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
-
+ repository_id = Column("repository_id", INTEGER(), ForeignKey(u'repositories.repo_id'), nullable=False, unique=None, default=None)
+
user = relation('User')
permission = relation('Permission')
repository = relation('Repository')
@@ -121,7 +122,7 @@ class UserToPerm(Base):
user_to_perm_id = Column("user_to_perm_id", INTEGER(), nullable=False, unique=True, default=None, primary_key=True)
user_id = Column("user_id", INTEGER(), ForeignKey(u'users.user_id'), nullable=False, unique=None, default=None)
permission_id = Column("permission_id", INTEGER(), ForeignKey(u'permissions.permission_id'), nullable=False, unique=None, default=None)
-
+
user = relation('User')
permission = relation('Permission')
@@ -134,6 +135,6 @@ class Statistics(Base):
commit_activity = Column("commit_activity", BLOB(), nullable=False)#JSON data
commit_activity_combined = Column("commit_activity_combined", BLOB(), nullable=False)#JSON data
languages = Column("languages", BLOB(), nullable=False)#JSON data
-
+
repository = relation('Repository')
diff --git a/rhodecode/model/forms.py b/rhodecode/model/forms.py
--- a/rhodecode/model/forms.py
+++ b/rhodecode/model/forms.py
@@ -194,16 +194,12 @@ class ValidSettings(formencode.validator
class ValidPath(formencode.validators.FancyValidator):
def to_python(self, value, state):
- isdir = os.path.isdir(value.replace('*', ''))
- if (value.endswith('/*') or value.endswith('/**')) and isdir:
- return value
- elif not isdir:
+
+ if not os.path.isdir(value):
msg = _('This is not a valid path')
- else:
- msg = _('You need to specify * or ** at the end of path (ie. /tmp/*)')
-
- raise formencode.Invalid(msg, value, state,
+ raise formencode.Invalid(msg, value, state,
error_dict={'paths_root_path':msg})
+ return value
def UniqSystemEmail(old_data):
class _UniqSystemEmail(formencode.validators.FancyValidator):
diff --git a/rhodecode/model/hg.py b/rhodecode/model/hg.py
--- a/rhodecode/model/hg.py
+++ b/rhodecode/model/hg.py
@@ -24,7 +24,6 @@ Model for RhodeCode
"""
from beaker.cache import cache_region
from mercurial import ui
-from mercurial.hgweb.hgwebdir_mod import findrepos
from rhodecode.lib import helpers as h
from rhodecode.lib.utils import invalidate_cache
from rhodecode.lib.auth import HasRepoPermissionAny
@@ -33,12 +32,12 @@ from rhodecode.model.db import Repositor
from sqlalchemy.orm import joinedload
from vcs.exceptions import RepositoryError, VCSError
import logging
-import os
import sys
log = logging.getLogger(__name__)
try:
from vcs.backends.hg import MercurialRepository
+ from vcs.backends.git import GitRepository
except ImportError:
sys.stderr.write('You have to import vcs module')
raise Exception('Unable to import vcs')
@@ -47,7 +46,7 @@ def _get_repos_cached_initial(app_global
"""return cached dict with repos
"""
g = app_globals
- return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui, initial)
+ return HgModel().repo_scan(g.paths[0][1], g.baseui, initial)
@cache_region('long_term', 'cached_repo_list')
def _get_repos_cached():
@@ -55,7 +54,7 @@ def _get_repos_cached():
"""
log.info('getting all repositories list')
from pylons import app_globals as g
- return HgModel.repo_scan(g.paths[0][0], g.paths[0][1], g.baseui)
+ return HgModel().repo_scan(g.paths[0][1], g.baseui)
@cache_region('super_short_term', 'cached_repos_switcher_list')
def _get_repos_switcher_cached(cached_repo_list):
@@ -73,42 +72,34 @@ def _full_changelog_cached(repo_name):
return list(reversed(list(HgModel().get_repo(repo_name))))
class HgModel(object):
- """Mercurial Model
+ """
+ Mercurial Model
"""
- def __init__(self):
- pass
+ def __init__(self, sa=None):
+ if not sa:
+ self.sa = meta.Session()
+ else:
+ self.sa = sa
- @staticmethod
- def repo_scan(repos_prefix, repos_path, baseui, initial=False):
+ def repo_scan(self, repos_path, baseui, initial=False):
"""
Listing of repositories in given path. This path should not be a
repository itself. Return a dictionary of repository objects
- :param repos_path: path to directory it could take syntax with
- * or ** for deep recursive displaying repositories
+
+ :param repos_path: path to directory containing repositories
+ :param baseui
+ :param initial: initial scann
"""
- sa = meta.Session()
- def check_repo_dir(path):
- """Checks the repository
- :param path:
- """
- repos_path = path.split('/')
- if repos_path[-1] in ['*', '**']:
- repos_path = repos_path[:-1]
- if repos_path[0] != '/':
- repos_path[0] = '/'
- if not os.path.isdir(os.path.join(*repos_path)):
- raise RepositoryError('Not a valid repository in %s' % path)
- if not repos_path.endswith('*'):
- raise VCSError('You need to specify * or ** at the end of path '
- 'for recursive scanning')
+ log.info('scanning for repositories in %s', repos_path)
- check_repo_dir(repos_path)
- log.info('scanning for repositories in %s', repos_path)
- repos = findrepos([(repos_prefix, repos_path)])
if not isinstance(baseui, ui.ui):
baseui = ui.ui()
+ from rhodecode.lib.utils import get_repos
+ repos = get_repos(repos_path)
+
+
repos_list = {}
for name, path in repos:
try:
@@ -117,15 +108,19 @@ class HgModel(object):
raise RepositoryError('Duplicate repository name %s found in'
' %s' % (name, path))
else:
+ if path[0] == 'hg':
+ repos_list[name] = MercurialRepository(path[1], baseui=baseui)
+ repos_list[name].name = name
- repos_list[name] = MercurialRepository(path, baseui=baseui)
- repos_list[name].name = name
+ if path[0] == 'git':
+ repos_list[name] = GitRepository(path[1])
+ repos_list[name].name = name
dbrepo = None
if not initial:
#for initial scann on application first run we don't
#have db repos yet.
- dbrepo = sa.query(Repository)\
+ dbrepo = self.sa.query(Repository)\
.options(joinedload(Repository.fork))\
.filter(Repository.repo_name == name)\
.scalar()
@@ -137,16 +132,17 @@ class HgModel(object):
if dbrepo.user:
repos_list[name].contact = dbrepo.user.full_contact
else:
- repos_list[name].contact = sa.query(User)\
+ repos_list[name].contact = self.sa.query(User)\
.filter(User.admin == True).first().full_contact
except OSError:
continue
- meta.Session.remove()
+
return repos_list
def get_repos(self):
for name, repo in _get_repos_cached().items():
- if repo._get_hidden():
+
+ if isinstance(repo, MercurialRepository) and repo._get_hidden():
#skip hidden web repository
continue
diff --git a/rhodecode/templates/shortlog/shortlog_data.html b/rhodecode/templates/shortlog/shortlog_data.html
--- a/rhodecode/templates/shortlog/shortlog_data.html
+++ b/rhodecode/templates/shortlog/shortlog_data.html
@@ -13,7 +13,7 @@
%for cnt,cs in enumerate(c.repo_changesets):
- ${h.age(cs._ctx.date())} - ${h.rfc822date_notz(cs._ctx.date())} |
+ ${h.age(cs.date)} - ${h.rfc822date_notz(cs.date)} |
${h.person(cs.author)} |
r${cs.revision}:${cs.short_id} |
|