##// END OF EJS Templates
translated resource map into full routes map, added validation for repos_group id
translated resource map into full routes map, added validation for repos_group id

File last commit:

r1341:1881b808 beta
r1348:dccba44e beta
Show More
test_crawler.py
146 lines | 3.8 KiB | text/x-python | PythonLexer
added test for crawling and memory usage
r1332 # -*- coding: utf-8 -*-
"""
rhodecode.tests.test_crawer
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test for crawling a project for memory usage
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 watch -n1 ./rhodecode/tests/mem_watch
added test for crawling and memory usage
r1332
:created_on: Apr 21, 2010
:author: marcink
:copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
:license: GPLv3, see COPYING for more details.
"""
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cookielib
import urllib
import urllib2
import vcs
import time
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 from os.path import join as jn
added test for crawling and memory usage
r1332 BASE_URI = 'http://127.0.0.1:5000/%s'
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 PROJECT = 'CPython'
PROJECT_PATH = jn('/', 'home', 'marcink', 'hg_repos')
added test for crawling and memory usage
r1332
cj = cookielib.FileCookieJar('/tmp/rc_test_cookie.txt')
o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
o.addheaders = [
('User-agent', 'rhodecode-crawler'),
('Accept-Language', 'en - us, en;q = 0.5')
]
urllib2.install_opener(o)
def test_changelog_walk(pages=100):
total_time = 0
for i in range(1, pages):
page = '/'.join((PROJECT, 'changelog',))
full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page':i})
s = time.time()
f = o.open(full_uri)
size = len(f.read())
e = time.time() - s
total_time += e
print 'visited %s size:%s req:%s ms' % (full_uri, size, e)
print 'total_time', total_time
print 'average on req', total_time / float(pages)
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 def test_changeset_walk(limit=None):
print 'processing', jn(PROJECT_PATH, PROJECT)
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 total_time = 0
added test for crawling and memory usage
r1332
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 repo = vcs.get_repo(jn(PROJECT_PATH, PROJECT))
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 cnt = 0
added test for crawling and memory usage
r1332 for i in repo:
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 cnt += 1
added test for crawling and memory usage
r1332 raw_cs = '/'.join((PROJECT, 'changeset', i.raw_id))
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 if limit and limit == cnt:
break
added test for crawling and memory usage
r1332
full_uri = (BASE_URI % raw_cs)
s = time.time()
f = o.open(full_uri)
size = len(f.read())
e = time.time() - s
total_time += e
small fixes for test crawler
r1341 print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)
added test for crawling and memory usage
r1332
print 'total_time', total_time
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 print 'average on req', total_time / float(cnt)
added test for crawling and memory usage
r1332
small fixes for test crawler
r1341 def test_files_walk(limit=100):
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 print 'processing', jn(PROJECT_PATH, PROJECT)
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 total_time = 0
repo = vcs.get_repo(jn(PROJECT_PATH, PROJECT))
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 from rhodecode.lib.oset import OrderedSet
paths_ = OrderedSet([''])
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 try:
tip = repo.get_changeset('tip')
for topnode, dirs, files in tip.walk('/'):
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338
for dir in dirs:
paths_.add(dir.path)
for f in dir:
paths_.add(f.path)
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 for f in files:
paths_.add(f.path)
except vcs.exception.RepositoryError, e:
pass
small fixes for test crawler
r1341 cnt = 0
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 for f in paths_:
small fixes for test crawler
r1341 cnt += 1
if limit and limit == cnt:
break
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334 file_path = '/'.join((PROJECT, 'files', 'tip', f))
full_uri = (BASE_URI % file_path)
s = time.time()
f = o.open(full_uri)
size = len(f.read())
e = time.time() - s
total_time += e
small fixes for test crawler
r1341 print '%s visited %s size:%s req:%s ms' % (cnt, full_uri, size, e)
Added mem_watch script. Test can also walk on file tree. Fixed some path issues
r1334
print 'total_time', total_time
small fixes for test crawler
r1341 print 'average on req', total_time / float(cnt)
added test for crawling and memory usage
r1332
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 test_changelog_walk(40)
small fixes for test crawler
r1341 time.sleep(2)
Updated test_crawler to scan paths in more sensible order using ordered tuple.
r1338 test_changeset_walk(limit=100)
small fixes for test crawler
r1341 time.sleep(2)
test_files_walk(100)