##// END OF EJS Templates
subprocess: Change all imports from `subprocess` -> `subprocess32`
Martin Bornhold -
r1007:e9c22488 default
parent child Browse files
Show More
@@ -1,300 +1,300 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Various version Control System version lib (vcs) management abstraction layer
22 Various version Control System version lib (vcs) management abstraction layer
23 for Python. Build with server client architecture.
23 for Python. Build with server client architecture.
24 """
24 """
25
25
26
26
27 VERSION = (0, 5, 0, 'dev')
27 VERSION = (0, 5, 0, 'dev')
28
28
29 __version__ = '.'.join((str(each) for each in VERSION[:4]))
29 __version__ = '.'.join((str(each) for each in VERSION[:4]))
30
30
31 __all__ = [
31 __all__ = [
32 'get_version', 'get_vcs_instance', 'get_backend',
32 'get_version', 'get_vcs_instance', 'get_backend',
33 'VCSError', 'RepositoryError', 'CommitError'
33 'VCSError', 'RepositoryError', 'CommitError'
34 ]
34 ]
35
35
36 import atexit
36 import atexit
37 import logging
37 import logging
38 import subprocess
38 import subprocess32
39 import time
39 import time
40 import urlparse
40 import urlparse
41 from cStringIO import StringIO
41 from cStringIO import StringIO
42
42
43 import Pyro4
43 import Pyro4
44 from Pyro4.errors import CommunicationError
44 from Pyro4.errors import CommunicationError
45
45
46 from rhodecode.lib.vcs.conf import settings
46 from rhodecode.lib.vcs.conf import settings
47 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
47 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
48 from rhodecode.lib.vcs.exceptions import (
48 from rhodecode.lib.vcs.exceptions import (
49 VCSError, RepositoryError, CommitError, VCSCommunicationError)
49 VCSError, RepositoryError, CommitError, VCSCommunicationError)
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53 # The pycurl library directly accesses C API functions and is not patched by
53 # The pycurl library directly accesses C API functions and is not patched by
54 # gevent. This will potentially lead to deadlocks due to incompatibility to
54 # gevent. This will potentially lead to deadlocks due to incompatibility to
55 # gevent. Therefore we check if gevent is active and import a gevent compatible
55 # gevent. Therefore we check if gevent is active and import a gevent compatible
56 # wrapper in that case.
56 # wrapper in that case.
57 try:
57 try:
58 from gevent import monkey
58 from gevent import monkey
59 if monkey.is_module_patched('__builtin__'):
59 if monkey.is_module_patched('__builtin__'):
60 import geventcurl as pycurl
60 import geventcurl as pycurl
61 log.debug('Using gevent comapatible pycurl: %s', pycurl)
61 log.debug('Using gevent comapatible pycurl: %s', pycurl)
62 else:
62 else:
63 import pycurl
63 import pycurl
64 except ImportError:
64 except ImportError:
65 import pycurl
65 import pycurl
66
66
67
67
68 def get_version():
68 def get_version():
69 """
69 """
70 Returns shorter version (digit parts only) as string.
70 Returns shorter version (digit parts only) as string.
71 """
71 """
72 return '.'.join((str(each) for each in VERSION[:3]))
72 return '.'.join((str(each) for each in VERSION[:3]))
73
73
74
74
75 def connect_pyro4(server_and_port):
75 def connect_pyro4(server_and_port):
76 from rhodecode.lib.vcs import connection, client
76 from rhodecode.lib.vcs import connection, client
77 from rhodecode.lib.middleware.utils import scm_app
77 from rhodecode.lib.middleware.utils import scm_app
78
78
79 git_remote = client.RequestScopeProxyFactory(
79 git_remote = client.RequestScopeProxyFactory(
80 settings.pyro_remote(settings.PYRO_GIT, server_and_port))
80 settings.pyro_remote(settings.PYRO_GIT, server_and_port))
81 hg_remote = client.RequestScopeProxyFactory(
81 hg_remote = client.RequestScopeProxyFactory(
82 settings.pyro_remote(settings.PYRO_HG, server_and_port))
82 settings.pyro_remote(settings.PYRO_HG, server_and_port))
83 svn_remote = client.RequestScopeProxyFactory(
83 svn_remote = client.RequestScopeProxyFactory(
84 settings.pyro_remote(settings.PYRO_SVN, server_and_port))
84 settings.pyro_remote(settings.PYRO_SVN, server_and_port))
85
85
86 connection.Git = client.RepoMaker(proxy_factory=git_remote)
86 connection.Git = client.RepoMaker(proxy_factory=git_remote)
87 connection.Hg = client.RepoMaker(proxy_factory=hg_remote)
87 connection.Hg = client.RepoMaker(proxy_factory=hg_remote)
88 connection.Svn = client.RepoMaker(proxy_factory=svn_remote)
88 connection.Svn = client.RepoMaker(proxy_factory=svn_remote)
89
89
90 scm_app.GIT_REMOTE_WSGI = Pyro4.Proxy(
90 scm_app.GIT_REMOTE_WSGI = Pyro4.Proxy(
91 settings.pyro_remote(
91 settings.pyro_remote(
92 settings.PYRO_GIT_REMOTE_WSGI, server_and_port))
92 settings.PYRO_GIT_REMOTE_WSGI, server_and_port))
93 scm_app.HG_REMOTE_WSGI = Pyro4.Proxy(
93 scm_app.HG_REMOTE_WSGI = Pyro4.Proxy(
94 settings.pyro_remote(
94 settings.pyro_remote(
95 settings.PYRO_HG_REMOTE_WSGI, server_and_port))
95 settings.PYRO_HG_REMOTE_WSGI, server_and_port))
96
96
97 @atexit.register
97 @atexit.register
98 def free_connection_resources():
98 def free_connection_resources():
99 connection.Git = None
99 connection.Git = None
100 connection.Hg = None
100 connection.Hg = None
101 connection.Svn = None
101 connection.Svn = None
102
102
103
103
104 def connect_http(server_and_port):
104 def connect_http(server_and_port):
105 from rhodecode.lib.vcs import connection, client_http
105 from rhodecode.lib.vcs import connection, client_http
106 from rhodecode.lib.middleware.utils import scm_app
106 from rhodecode.lib.middleware.utils import scm_app
107
107
108 session_factory = client_http.ThreadlocalSessionFactory()
108 session_factory = client_http.ThreadlocalSessionFactory()
109
109
110 connection.Git = client_http.RepoMaker(
110 connection.Git = client_http.RepoMaker(
111 server_and_port, '/git', session_factory)
111 server_and_port, '/git', session_factory)
112 connection.Hg = client_http.RepoMaker(
112 connection.Hg = client_http.RepoMaker(
113 server_and_port, '/hg', session_factory)
113 server_and_port, '/hg', session_factory)
114 connection.Svn = client_http.RepoMaker(
114 connection.Svn = client_http.RepoMaker(
115 server_and_port, '/svn', session_factory)
115 server_and_port, '/svn', session_factory)
116
116
117 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
117 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
118 server_and_port, '/proxy/hg')
118 server_and_port, '/proxy/hg')
119 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
119 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
120 server_and_port, '/proxy/git')
120 server_and_port, '/proxy/git')
121
121
122 @atexit.register
122 @atexit.register
123 def free_connection_resources():
123 def free_connection_resources():
124 connection.Git = None
124 connection.Git = None
125 connection.Hg = None
125 connection.Hg = None
126 connection.Svn = None
126 connection.Svn = None
127
127
128
128
129 def connect_vcs(server_and_port, protocol):
129 def connect_vcs(server_and_port, protocol):
130 """
130 """
131 Initializes the connection to the vcs server.
131 Initializes the connection to the vcs server.
132
132
133 :param server_and_port: str, e.g. "localhost:9900"
133 :param server_and_port: str, e.g. "localhost:9900"
134 :param protocol: str, "pyro4" or "http"
134 :param protocol: str, "pyro4" or "http"
135 """
135 """
136 if protocol == 'pyro4':
136 if protocol == 'pyro4':
137 connect_pyro4(server_and_port)
137 connect_pyro4(server_and_port)
138 elif protocol == 'http':
138 elif protocol == 'http':
139 connect_http(server_and_port)
139 connect_http(server_and_port)
140 else:
140 else:
141 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
141 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
142
142
143
143
144 # TODO: johbo: This function should be moved into our test suite, there is
144 # TODO: johbo: This function should be moved into our test suite, there is
145 # no reason to support starting the vcsserver in Enterprise itself.
145 # no reason to support starting the vcsserver in Enterprise itself.
146 def start_vcs_server(server_and_port, protocol, log_level=None):
146 def start_vcs_server(server_and_port, protocol, log_level=None):
147 """
147 """
148 Starts the vcs server in a subprocess.
148 Starts the vcs server in a subprocess.
149 """
149 """
150 log.info('Starting VCSServer as a sub process with %s protocol', protocol)
150 log.info('Starting VCSServer as a sub process with %s protocol', protocol)
151 if protocol == 'http':
151 if protocol == 'http':
152 return _start_http_vcs_server(server_and_port, log_level)
152 return _start_http_vcs_server(server_and_port, log_level)
153 elif protocol == 'pyro4':
153 elif protocol == 'pyro4':
154 return _start_pyro4_vcs_server(server_and_port, log_level)
154 return _start_pyro4_vcs_server(server_and_port, log_level)
155 else:
155 else:
156 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
156 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
157
157
158
158
159 def _start_pyro4_vcs_server(server_and_port, log_level=None):
159 def _start_pyro4_vcs_server(server_and_port, log_level=None):
160 _try_to_shutdown_running_server(server_and_port, protocol='pyro4')
160 _try_to_shutdown_running_server(server_and_port, protocol='pyro4')
161 host, port = server_and_port.rsplit(":", 1)
161 host, port = server_and_port.rsplit(":", 1)
162 host = host.strip('[]')
162 host = host.strip('[]')
163 args = [
163 args = [
164 'vcsserver', '--port', port, '--host', host, '--locale', 'en_US.UTF-8',
164 'vcsserver', '--port', port, '--host', host, '--locale', 'en_US.UTF-8',
165 '--threadpool', '32']
165 '--threadpool', '32']
166 if log_level:
166 if log_level:
167 args += ['--log-level', log_level]
167 args += ['--log-level', log_level]
168 proc = subprocess.Popen(args)
168 proc = subprocess32.Popen(args)
169
169
170 def cleanup_server_process():
170 def cleanup_server_process():
171 proc.kill()
171 proc.kill()
172 atexit.register(cleanup_server_process)
172 atexit.register(cleanup_server_process)
173
173
174 server = create_vcsserver_proxy(server_and_port, protocol='pyro4')
174 server = create_vcsserver_proxy(server_and_port, protocol='pyro4')
175 _wait_until_vcs_server_is_reachable(server)
175 _wait_until_vcs_server_is_reachable(server)
176
176
177
177
178 def _start_http_vcs_server(server_and_port, log_level=None):
178 def _start_http_vcs_server(server_and_port, log_level=None):
179 # TODO: mikhail: shutdown if an http server already runs
179 # TODO: mikhail: shutdown if an http server already runs
180
180
181 host, port = server_and_port.rsplit(":", 1)
181 host, port = server_and_port.rsplit(":", 1)
182 args = [
182 args = [
183 'pserve', 'rhodecode/tests/vcsserver_http.ini',
183 'pserve', 'rhodecode/tests/vcsserver_http.ini',
184 'http_port=%s' % (port, ), 'http_host=%s' % (host, )]
184 'http_port=%s' % (port, ), 'http_host=%s' % (host, )]
185 proc = subprocess.Popen(args)
185 proc = subprocess32.Popen(args)
186
186
187 def cleanup_server_process():
187 def cleanup_server_process():
188 proc.kill()
188 proc.kill()
189 atexit.register(cleanup_server_process)
189 atexit.register(cleanup_server_process)
190
190
191 server = create_vcsserver_proxy(server_and_port, protocol='http')
191 server = create_vcsserver_proxy(server_and_port, protocol='http')
192 _wait_until_vcs_server_is_reachable(server)
192 _wait_until_vcs_server_is_reachable(server)
193
193
194
194
195 def _wait_until_vcs_server_is_reachable(server, timeout=40):
195 def _wait_until_vcs_server_is_reachable(server, timeout=40):
196 begin = time.time()
196 begin = time.time()
197 while (time.time() - begin) < timeout:
197 while (time.time() - begin) < timeout:
198 try:
198 try:
199 server.ping()
199 server.ping()
200 return
200 return
201 except (VCSCommunicationError, CommunicationError, pycurl.error):
201 except (VCSCommunicationError, CommunicationError, pycurl.error):
202 log.debug('VCSServer not started yet, retry to connect.')
202 log.debug('VCSServer not started yet, retry to connect.')
203 time.sleep(0.5)
203 time.sleep(0.5)
204 raise Exception(
204 raise Exception(
205 'Starting the VCSServer failed or took more than {} '
205 'Starting the VCSServer failed or took more than {} '
206 'seconds.'.format(timeout))
206 'seconds.'.format(timeout))
207
207
208
208
209 def _try_to_shutdown_running_server(server_and_port, protocol):
209 def _try_to_shutdown_running_server(server_and_port, protocol):
210 server = create_vcsserver_proxy(server_and_port, protocol)
210 server = create_vcsserver_proxy(server_and_port, protocol)
211 try:
211 try:
212 server.shutdown()
212 server.shutdown()
213 except (CommunicationError, pycurl.error):
213 except (CommunicationError, pycurl.error):
214 return
214 return
215
215
216 # TODO: Not sure why this is important, but without it the following start
216 # TODO: Not sure why this is important, but without it the following start
217 # of the server fails.
217 # of the server fails.
218 server = create_vcsserver_proxy(server_and_port, protocol)
218 server = create_vcsserver_proxy(server_and_port, protocol)
219 server.ping()
219 server.ping()
220
220
221
221
222 def create_vcsserver_proxy(server_and_port, protocol):
222 def create_vcsserver_proxy(server_and_port, protocol):
223 if protocol == 'pyro4':
223 if protocol == 'pyro4':
224 return _create_vcsserver_proxy_pyro4(server_and_port)
224 return _create_vcsserver_proxy_pyro4(server_and_port)
225 elif protocol == 'http':
225 elif protocol == 'http':
226 return _create_vcsserver_proxy_http(server_and_port)
226 return _create_vcsserver_proxy_http(server_and_port)
227 else:
227 else:
228 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
228 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
229
229
230
230
231 def _create_vcsserver_proxy_pyro4(server_and_port):
231 def _create_vcsserver_proxy_pyro4(server_and_port):
232 server = Pyro4.Proxy(
232 server = Pyro4.Proxy(
233 settings.pyro_remote(settings.PYRO_VCSSERVER, server_and_port))
233 settings.pyro_remote(settings.PYRO_VCSSERVER, server_and_port))
234 return server
234 return server
235
235
236
236
237 def _create_vcsserver_proxy_http(server_and_port):
237 def _create_vcsserver_proxy_http(server_and_port):
238 from rhodecode.lib.vcs import client_http
238 from rhodecode.lib.vcs import client_http
239
239
240 session = _create_http_rpc_session()
240 session = _create_http_rpc_session()
241 url = urlparse.urljoin('http://%s' % server_and_port, '/server')
241 url = urlparse.urljoin('http://%s' % server_and_port, '/server')
242 return client_http.RemoteObject(url, session)
242 return client_http.RemoteObject(url, session)
243
243
244
244
245 class CurlSession(object):
245 class CurlSession(object):
246 """
246 """
247 Modeled so that it provides a subset of the requests interface.
247 Modeled so that it provides a subset of the requests interface.
248
248
249 This has been created so that it does only provide a minimal API for our
249 This has been created so that it does only provide a minimal API for our
250 needs. The parts which it provides are based on the API of the library
250 needs. The parts which it provides are based on the API of the library
251 `requests` which allows us to easily benchmark against it.
251 `requests` which allows us to easily benchmark against it.
252
252
253 Please have a look at the class :class:`requests.Session` when you extend
253 Please have a look at the class :class:`requests.Session` when you extend
254 it.
254 it.
255 """
255 """
256
256
257 def __init__(self):
257 def __init__(self):
258 curl = pycurl.Curl()
258 curl = pycurl.Curl()
259 # TODO: johbo: I did test with 7.19 of libcurl. This version has
259 # TODO: johbo: I did test with 7.19 of libcurl. This version has
260 # trouble with 100 - continue being set in the expect header. This
260 # trouble with 100 - continue being set in the expect header. This
261 # can lead to massive performance drops, switching it off here.
261 # can lead to massive performance drops, switching it off here.
262 curl.setopt(curl.HTTPHEADER, ["Expect:"])
262 curl.setopt(curl.HTTPHEADER, ["Expect:"])
263 curl.setopt(curl.TCP_NODELAY, True)
263 curl.setopt(curl.TCP_NODELAY, True)
264 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
264 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
265 self._curl = curl
265 self._curl = curl
266
266
267 def post(self, url, data, allow_redirects=False):
267 def post(self, url, data, allow_redirects=False):
268 response_buffer = StringIO()
268 response_buffer = StringIO()
269
269
270 curl = self._curl
270 curl = self._curl
271 curl.setopt(curl.URL, url)
271 curl.setopt(curl.URL, url)
272 curl.setopt(curl.POST, True)
272 curl.setopt(curl.POST, True)
273 curl.setopt(curl.POSTFIELDS, data)
273 curl.setopt(curl.POSTFIELDS, data)
274 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
274 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
275 curl.setopt(curl.WRITEDATA, response_buffer)
275 curl.setopt(curl.WRITEDATA, response_buffer)
276 curl.perform()
276 curl.perform()
277
277
278 return CurlResponse(response_buffer)
278 return CurlResponse(response_buffer)
279
279
280
280
281 class CurlResponse(object):
281 class CurlResponse(object):
282 """
282 """
283 The response of a request, modeled after the requests API.
283 The response of a request, modeled after the requests API.
284
284
285 This class provides a subset of the response interface known from the
285 This class provides a subset of the response interface known from the
286 library `requests`. It is intentionally kept similar, so that we can use
286 library `requests`. It is intentionally kept similar, so that we can use
287 `requests` as a drop in replacement for benchmarking purposes.
287 `requests` as a drop in replacement for benchmarking purposes.
288 """
288 """
289
289
290 def __init__(self, response_buffer):
290 def __init__(self, response_buffer):
291 self._response_buffer = response_buffer
291 self._response_buffer = response_buffer
292
292
293 @property
293 @property
294 def content(self):
294 def content(self):
295 return self._response_buffer.getvalue()
295 return self._response_buffer.getvalue()
296
296
297
297
298 def _create_http_rpc_session():
298 def _create_http_rpc_session():
299 session = CurlSession()
299 session = CurlSession()
300 return session
300 return session
@@ -1,1025 +1,1025 b''
1 # (c) 2005 Ian Bicking and contributors; written for Paste
1 # (c) 2005 Ian Bicking and contributors; written for Paste
2 # (http://pythonpaste.org) Licensed under the MIT license:
2 # (http://pythonpaste.org) Licensed under the MIT license:
3 # http://www.opensource.org/licenses/mit-license.php
3 # http://www.opensource.org/licenses/mit-license.php
4 #
4 #
5 # For discussion of daemonizing:
5 # For discussion of daemonizing:
6 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/278731
6 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/278731
7 #
7 #
8 # Code taken also from QP: http://www.mems-exchange.org/software/qp/ From
8 # Code taken also from QP: http://www.mems-exchange.org/software/qp/ From
9 # lib/site.py
9 # lib/site.py
10
10
11 import atexit
11 import atexit
12 import errno
12 import errno
13 import fnmatch
13 import fnmatch
14 import logging
14 import logging
15 import optparse
15 import optparse
16 import os
16 import os
17 import re
17 import re
18 import subprocess
18 import subprocess32
19 import sys
19 import sys
20 import textwrap
20 import textwrap
21 import threading
21 import threading
22 import time
22 import time
23 import traceback
23 import traceback
24
24
25 from logging.config import fileConfig
25 from logging.config import fileConfig
26 import ConfigParser as configparser
26 import ConfigParser as configparser
27 from paste.deploy import loadserver
27 from paste.deploy import loadserver
28 from paste.deploy import loadapp
28 from paste.deploy import loadapp
29
29
30 import rhodecode
30 import rhodecode
31 from rhodecode.lib.compat import kill
31 from rhodecode.lib.compat import kill
32
32
33
33
34 def make_web_build_callback(filename):
34 def make_web_build_callback(filename):
35 p = subprocess.Popen('make web-build', shell=True,
35 p = subprocess32.Popen('make web-build', shell=True,
36 stdout=subprocess.PIPE,
36 stdout=subprocess32.PIPE,
37 stderr=subprocess.PIPE,
37 stderr=subprocess32.PIPE,
38 cwd=os.path.dirname(os.path.dirname(__file__)))
38 cwd=os.path.dirname(os.path.dirname(__file__)))
39 stdout, stderr = p.communicate()
39 stdout, stderr = p.communicate()
40 stdout = ''.join(stdout)
40 stdout = ''.join(stdout)
41 stderr = ''.join(stderr)
41 stderr = ''.join(stderr)
42 if stdout:
42 if stdout:
43 print stdout
43 print stdout
44 if stderr:
44 if stderr:
45 print ('%s %s %s' % ('-' * 20, 'ERRORS', '-' * 20))
45 print ('%s %s %s' % ('-' * 20, 'ERRORS', '-' * 20))
46 print stderr
46 print stderr
47
47
48
48
49 MAXFD = 1024
49 MAXFD = 1024
50 HERE = os.path.dirname(os.path.abspath(__file__))
50 HERE = os.path.dirname(os.path.abspath(__file__))
51 SERVER_RUNNING_FILE = None
51 SERVER_RUNNING_FILE = None
52
52
53
53
54 # watch those extra files for changes, server gets restarted if file changes
54 # watch those extra files for changes, server gets restarted if file changes
55 GLOBAL_EXTRA_FILES = {
55 GLOBAL_EXTRA_FILES = {
56 'rhodecode/public/css/*.less': make_web_build_callback,
56 'rhodecode/public/css/*.less': make_web_build_callback,
57 'rhodecode/public/js/src/**/*.js': make_web_build_callback,
57 'rhodecode/public/js/src/**/*.js': make_web_build_callback,
58 }
58 }
59
59
60
60
61
61
62 ## HOOKS - inspired by gunicorn #
62 ## HOOKS - inspired by gunicorn #
63
63
64 def when_ready(server):
64 def when_ready(server):
65 """
65 """
66 Called just after the server is started.
66 Called just after the server is started.
67 """
67 """
68
68
69 def _remove_server_running_file():
69 def _remove_server_running_file():
70 if os.path.isfile(SERVER_RUNNING_FILE):
70 if os.path.isfile(SERVER_RUNNING_FILE):
71 os.remove(SERVER_RUNNING_FILE)
71 os.remove(SERVER_RUNNING_FILE)
72
72
73 if SERVER_RUNNING_FILE:
73 if SERVER_RUNNING_FILE:
74 with open(SERVER_RUNNING_FILE, 'wb') as f:
74 with open(SERVER_RUNNING_FILE, 'wb') as f:
75 f.write(str(os.getpid()))
75 f.write(str(os.getpid()))
76 # register cleanup of that file when server exits
76 # register cleanup of that file when server exits
77 atexit.register(_remove_server_running_file)
77 atexit.register(_remove_server_running_file)
78
78
79
79
80 def setup_logging(config_uri, fileConfig=fileConfig,
80 def setup_logging(config_uri, fileConfig=fileConfig,
81 configparser=configparser):
81 configparser=configparser):
82 """
82 """
83 Set up logging via the logging module's fileConfig function with the
83 Set up logging via the logging module's fileConfig function with the
84 filename specified via ``config_uri`` (a string in the form
84 filename specified via ``config_uri`` (a string in the form
85 ``filename#sectionname``).
85 ``filename#sectionname``).
86
86
87 ConfigParser defaults are specified for the special ``__file__``
87 ConfigParser defaults are specified for the special ``__file__``
88 and ``here`` variables, similar to PasteDeploy config loading.
88 and ``here`` variables, similar to PasteDeploy config loading.
89 """
89 """
90 path, _ = _getpathsec(config_uri, None)
90 path, _ = _getpathsec(config_uri, None)
91 parser = configparser.ConfigParser()
91 parser = configparser.ConfigParser()
92 parser.read([path])
92 parser.read([path])
93 if parser.has_section('loggers'):
93 if parser.has_section('loggers'):
94 config_file = os.path.abspath(path)
94 config_file = os.path.abspath(path)
95 return fileConfig(
95 return fileConfig(
96 config_file,
96 config_file,
97 {'__file__': config_file, 'here': os.path.dirname(config_file)}
97 {'__file__': config_file, 'here': os.path.dirname(config_file)}
98 )
98 )
99
99
100
100
101 def set_rhodecode_is_test(config_uri):
101 def set_rhodecode_is_test(config_uri):
102 """If is_test is defined in the config file sets rhodecode.is_test."""
102 """If is_test is defined in the config file sets rhodecode.is_test."""
103 path, _ = _getpathsec(config_uri, None)
103 path, _ = _getpathsec(config_uri, None)
104 parser = configparser.ConfigParser()
104 parser = configparser.ConfigParser()
105 parser.read(path)
105 parser.read(path)
106 rhodecode.is_test = (
106 rhodecode.is_test = (
107 parser.has_option('app:main', 'is_test') and
107 parser.has_option('app:main', 'is_test') and
108 parser.getboolean('app:main', 'is_test'))
108 parser.getboolean('app:main', 'is_test'))
109
109
110
110
111 def _getpathsec(config_uri, name):
111 def _getpathsec(config_uri, name):
112 if '#' in config_uri:
112 if '#' in config_uri:
113 path, section = config_uri.split('#', 1)
113 path, section = config_uri.split('#', 1)
114 else:
114 else:
115 path, section = config_uri, 'main'
115 path, section = config_uri, 'main'
116 if name:
116 if name:
117 section = name
117 section = name
118 return path, section
118 return path, section
119
119
120
120
121 def parse_vars(args):
121 def parse_vars(args):
122 """
122 """
123 Given variables like ``['a=b', 'c=d']`` turns it into ``{'a':
123 Given variables like ``['a=b', 'c=d']`` turns it into ``{'a':
124 'b', 'c': 'd'}``
124 'b', 'c': 'd'}``
125 """
125 """
126 result = {}
126 result = {}
127 for arg in args:
127 for arg in args:
128 if '=' not in arg:
128 if '=' not in arg:
129 raise ValueError(
129 raise ValueError(
130 'Variable assignment %r invalid (no "=")'
130 'Variable assignment %r invalid (no "=")'
131 % arg)
131 % arg)
132 name, value = arg.split('=', 1)
132 name, value = arg.split('=', 1)
133 result[name] = value
133 result[name] = value
134 return result
134 return result
135
135
136
136
137 def _match_pattern(filename):
137 def _match_pattern(filename):
138 for pattern in GLOBAL_EXTRA_FILES:
138 for pattern in GLOBAL_EXTRA_FILES:
139 if fnmatch.fnmatch(filename, pattern):
139 if fnmatch.fnmatch(filename, pattern):
140 return pattern
140 return pattern
141 return False
141 return False
142
142
143
143
144 def generate_extra_file_list():
144 def generate_extra_file_list():
145
145
146 extra_list = []
146 extra_list = []
147 for root, dirs, files in os.walk(HERE, topdown=True):
147 for root, dirs, files in os.walk(HERE, topdown=True):
148 for fname in files:
148 for fname in files:
149 stripped_src = os.path.join(
149 stripped_src = os.path.join(
150 'rhodecode', os.path.relpath(os.path.join(root, fname), HERE))
150 'rhodecode', os.path.relpath(os.path.join(root, fname), HERE))
151
151
152 if _match_pattern(stripped_src):
152 if _match_pattern(stripped_src):
153 extra_list.append(stripped_src)
153 extra_list.append(stripped_src)
154
154
155 return extra_list
155 return extra_list
156
156
157
157
158 def run_callback_for_pattern(filename):
158 def run_callback_for_pattern(filename):
159 pattern = _match_pattern(filename)
159 pattern = _match_pattern(filename)
160 if pattern:
160 if pattern:
161 _file_callback = GLOBAL_EXTRA_FILES.get(pattern)
161 _file_callback = GLOBAL_EXTRA_FILES.get(pattern)
162 if callable(_file_callback):
162 if callable(_file_callback):
163 _file_callback(filename)
163 _file_callback(filename)
164
164
165
165
166 class DaemonizeException(Exception):
166 class DaemonizeException(Exception):
167 pass
167 pass
168
168
169
169
170 class RcServerCommand(object):
170 class RcServerCommand(object):
171
171
172 usage = '%prog config_uri [start|stop|restart|status] [var=value]'
172 usage = '%prog config_uri [start|stop|restart|status] [var=value]'
173 description = """\
173 description = """\
174 This command serves a web application that uses a PasteDeploy
174 This command serves a web application that uses a PasteDeploy
175 configuration file for the server and application.
175 configuration file for the server and application.
176
176
177 If start/stop/restart is given, then --daemon is implied, and it will
177 If start/stop/restart is given, then --daemon is implied, and it will
178 start (normal operation), stop (--stop-daemon), or do both.
178 start (normal operation), stop (--stop-daemon), or do both.
179
179
180 You can also include variable assignments like 'http_port=8080'
180 You can also include variable assignments like 'http_port=8080'
181 and then use %(http_port)s in your config files.
181 and then use %(http_port)s in your config files.
182 """
182 """
183 default_verbosity = 1
183 default_verbosity = 1
184
184
185 parser = optparse.OptionParser(
185 parser = optparse.OptionParser(
186 usage,
186 usage,
187 description=textwrap.dedent(description)
187 description=textwrap.dedent(description)
188 )
188 )
189 parser.add_option(
189 parser.add_option(
190 '-n', '--app-name',
190 '-n', '--app-name',
191 dest='app_name',
191 dest='app_name',
192 metavar='NAME',
192 metavar='NAME',
193 help="Load the named application (default main)")
193 help="Load the named application (default main)")
194 parser.add_option(
194 parser.add_option(
195 '-s', '--server',
195 '-s', '--server',
196 dest='server',
196 dest='server',
197 metavar='SERVER_TYPE',
197 metavar='SERVER_TYPE',
198 help="Use the named server.")
198 help="Use the named server.")
199 parser.add_option(
199 parser.add_option(
200 '--server-name',
200 '--server-name',
201 dest='server_name',
201 dest='server_name',
202 metavar='SECTION_NAME',
202 metavar='SECTION_NAME',
203 help=("Use the named server as defined in the configuration file "
203 help=("Use the named server as defined in the configuration file "
204 "(default: main)"))
204 "(default: main)"))
205 parser.add_option(
205 parser.add_option(
206 '--with-vcsserver',
206 '--with-vcsserver',
207 dest='vcs_server',
207 dest='vcs_server',
208 action='store_true',
208 action='store_true',
209 help=("Start the vcsserver instance together with the RhodeCode server"))
209 help=("Start the vcsserver instance together with the RhodeCode server"))
210 if hasattr(os, 'fork'):
210 if hasattr(os, 'fork'):
211 parser.add_option(
211 parser.add_option(
212 '--daemon',
212 '--daemon',
213 dest="daemon",
213 dest="daemon",
214 action="store_true",
214 action="store_true",
215 help="Run in daemon (background) mode")
215 help="Run in daemon (background) mode")
216 parser.add_option(
216 parser.add_option(
217 '--pid-file',
217 '--pid-file',
218 dest='pid_file',
218 dest='pid_file',
219 metavar='FILENAME',
219 metavar='FILENAME',
220 help=("Save PID to file (default to pyramid.pid if running in "
220 help=("Save PID to file (default to pyramid.pid if running in "
221 "daemon mode)"))
221 "daemon mode)"))
222 parser.add_option(
222 parser.add_option(
223 '--running-file',
223 '--running-file',
224 dest='running_file',
224 dest='running_file',
225 metavar='RUNNING_FILE',
225 metavar='RUNNING_FILE',
226 help="Create a running file after the server is initalized with "
226 help="Create a running file after the server is initalized with "
227 "stored PID of process")
227 "stored PID of process")
228 parser.add_option(
228 parser.add_option(
229 '--log-file',
229 '--log-file',
230 dest='log_file',
230 dest='log_file',
231 metavar='LOG_FILE',
231 metavar='LOG_FILE',
232 help="Save output to the given log file (redirects stdout)")
232 help="Save output to the given log file (redirects stdout)")
233 parser.add_option(
233 parser.add_option(
234 '--reload',
234 '--reload',
235 dest='reload',
235 dest='reload',
236 action='store_true',
236 action='store_true',
237 help="Use auto-restart file monitor")
237 help="Use auto-restart file monitor")
238 parser.add_option(
238 parser.add_option(
239 '--reload-interval',
239 '--reload-interval',
240 dest='reload_interval',
240 dest='reload_interval',
241 default=1,
241 default=1,
242 help=("Seconds between checking files (low number can cause "
242 help=("Seconds between checking files (low number can cause "
243 "significant CPU usage)"))
243 "significant CPU usage)"))
244 parser.add_option(
244 parser.add_option(
245 '--monitor-restart',
245 '--monitor-restart',
246 dest='monitor_restart',
246 dest='monitor_restart',
247 action='store_true',
247 action='store_true',
248 help="Auto-restart server if it dies")
248 help="Auto-restart server if it dies")
249 parser.add_option(
249 parser.add_option(
250 '--status',
250 '--status',
251 action='store_true',
251 action='store_true',
252 dest='show_status',
252 dest='show_status',
253 help="Show the status of the (presumably daemonized) server")
253 help="Show the status of the (presumably daemonized) server")
254 parser.add_option(
254 parser.add_option(
255 '-v', '--verbose',
255 '-v', '--verbose',
256 default=default_verbosity,
256 default=default_verbosity,
257 dest='verbose',
257 dest='verbose',
258 action='count',
258 action='count',
259 help="Set verbose level (default "+str(default_verbosity)+")")
259 help="Set verbose level (default "+str(default_verbosity)+")")
260 parser.add_option(
260 parser.add_option(
261 '-q', '--quiet',
261 '-q', '--quiet',
262 action='store_const',
262 action='store_const',
263 const=0,
263 const=0,
264 dest='verbose',
264 dest='verbose',
265 help="Suppress verbose output")
265 help="Suppress verbose output")
266
266
267 if hasattr(os, 'setuid'):
267 if hasattr(os, 'setuid'):
268 # I don't think these are available on Windows
268 # I don't think these are available on Windows
269 parser.add_option(
269 parser.add_option(
270 '--user',
270 '--user',
271 dest='set_user',
271 dest='set_user',
272 metavar="USERNAME",
272 metavar="USERNAME",
273 help="Set the user (usually only possible when run as root)")
273 help="Set the user (usually only possible when run as root)")
274 parser.add_option(
274 parser.add_option(
275 '--group',
275 '--group',
276 dest='set_group',
276 dest='set_group',
277 metavar="GROUP",
277 metavar="GROUP",
278 help="Set the group (usually only possible when run as root)")
278 help="Set the group (usually only possible when run as root)")
279
279
280 parser.add_option(
280 parser.add_option(
281 '--stop-daemon',
281 '--stop-daemon',
282 dest='stop_daemon',
282 dest='stop_daemon',
283 action='store_true',
283 action='store_true',
284 help=('Stop a daemonized server (given a PID file, or default '
284 help=('Stop a daemonized server (given a PID file, or default '
285 'pyramid.pid file)'))
285 'pyramid.pid file)'))
286
286
287 _scheme_re = re.compile(r'^[a-z][a-z]+:', re.I)
287 _scheme_re = re.compile(r'^[a-z][a-z]+:', re.I)
288
288
289 _reloader_environ_key = 'PYTHON_RELOADER_SHOULD_RUN'
289 _reloader_environ_key = 'PYTHON_RELOADER_SHOULD_RUN'
290 _monitor_environ_key = 'PASTE_MONITOR_SHOULD_RUN'
290 _monitor_environ_key = 'PASTE_MONITOR_SHOULD_RUN'
291
291
292 possible_subcommands = ('start', 'stop', 'restart', 'status')
292 possible_subcommands = ('start', 'stop', 'restart', 'status')
293
293
294 def __init__(self, argv, quiet=False):
294 def __init__(self, argv, quiet=False):
295 self.options, self.args = self.parser.parse_args(argv[1:])
295 self.options, self.args = self.parser.parse_args(argv[1:])
296 if quiet:
296 if quiet:
297 self.options.verbose = 0
297 self.options.verbose = 0
298
298
299 def out(self, msg): # pragma: no cover
299 def out(self, msg): # pragma: no cover
300 if self.options.verbose > 0:
300 if self.options.verbose > 0:
301 print(msg)
301 print(msg)
302
302
303 def get_options(self):
303 def get_options(self):
304 if (len(self.args) > 1
304 if (len(self.args) > 1
305 and self.args[1] in self.possible_subcommands):
305 and self.args[1] in self.possible_subcommands):
306 restvars = self.args[2:]
306 restvars = self.args[2:]
307 else:
307 else:
308 restvars = self.args[1:]
308 restvars = self.args[1:]
309
309
310 return parse_vars(restvars)
310 return parse_vars(restvars)
311
311
312 def run(self): # pragma: no cover
312 def run(self): # pragma: no cover
313 if self.options.stop_daemon:
313 if self.options.stop_daemon:
314 return self.stop_daemon()
314 return self.stop_daemon()
315
315
316 if not hasattr(self.options, 'set_user'):
316 if not hasattr(self.options, 'set_user'):
317 # Windows case:
317 # Windows case:
318 self.options.set_user = self.options.set_group = None
318 self.options.set_user = self.options.set_group = None
319
319
320 # @@: Is this the right stage to set the user at?
320 # @@: Is this the right stage to set the user at?
321 self.change_user_group(
321 self.change_user_group(
322 self.options.set_user, self.options.set_group)
322 self.options.set_user, self.options.set_group)
323
323
324 if not self.args:
324 if not self.args:
325 self.out('Please provide configuration file as first argument, '
325 self.out('Please provide configuration file as first argument, '
326 'most likely it should be production.ini')
326 'most likely it should be production.ini')
327 return 2
327 return 2
328 app_spec = self.args[0]
328 app_spec = self.args[0]
329
329
330 if (len(self.args) > 1
330 if (len(self.args) > 1
331 and self.args[1] in self.possible_subcommands):
331 and self.args[1] in self.possible_subcommands):
332 cmd = self.args[1]
332 cmd = self.args[1]
333 else:
333 else:
334 cmd = None
334 cmd = None
335
335
336 if self.options.reload:
336 if self.options.reload:
337 if os.environ.get(self._reloader_environ_key):
337 if os.environ.get(self._reloader_environ_key):
338 if self.options.verbose > 1:
338 if self.options.verbose > 1:
339 self.out('Running reloading file monitor')
339 self.out('Running reloading file monitor')
340
340
341 install_reloader(int(self.options.reload_interval),
341 install_reloader(int(self.options.reload_interval),
342 [app_spec] + generate_extra_file_list())
342 [app_spec] + generate_extra_file_list())
343 # if self.requires_config_file:
343 # if self.requires_config_file:
344 # watch_file(self.args[0])
344 # watch_file(self.args[0])
345 else:
345 else:
346 return self.restart_with_reloader()
346 return self.restart_with_reloader()
347
347
348 if cmd not in (None, 'start', 'stop', 'restart', 'status'):
348 if cmd not in (None, 'start', 'stop', 'restart', 'status'):
349 self.out(
349 self.out(
350 'Error: must give start|stop|restart (not %s)' % cmd)
350 'Error: must give start|stop|restart (not %s)' % cmd)
351 return 2
351 return 2
352
352
353 if cmd == 'status' or self.options.show_status:
353 if cmd == 'status' or self.options.show_status:
354 return self.show_status()
354 return self.show_status()
355
355
356 if cmd == 'restart' or cmd == 'stop':
356 if cmd == 'restart' or cmd == 'stop':
357 result = self.stop_daemon()
357 result = self.stop_daemon()
358 if result:
358 if result:
359 if cmd == 'restart':
359 if cmd == 'restart':
360 self.out("Could not stop daemon; aborting")
360 self.out("Could not stop daemon; aborting")
361 else:
361 else:
362 self.out("Could not stop daemon")
362 self.out("Could not stop daemon")
363 return result
363 return result
364 if cmd == 'stop':
364 if cmd == 'stop':
365 return result
365 return result
366 self.options.daemon = True
366 self.options.daemon = True
367
367
368 if cmd == 'start':
368 if cmd == 'start':
369 self.options.daemon = True
369 self.options.daemon = True
370
370
371 app_name = self.options.app_name
371 app_name = self.options.app_name
372
372
373 vars = self.get_options()
373 vars = self.get_options()
374
374
375 if self.options.vcs_server:
375 if self.options.vcs_server:
376 vars['vcs.start_server'] = 'true'
376 vars['vcs.start_server'] = 'true'
377
377
378 if self.options.running_file:
378 if self.options.running_file:
379 global SERVER_RUNNING_FILE
379 global SERVER_RUNNING_FILE
380 SERVER_RUNNING_FILE = self.options.running_file
380 SERVER_RUNNING_FILE = self.options.running_file
381
381
382 if not self._scheme_re.search(app_spec):
382 if not self._scheme_re.search(app_spec):
383 app_spec = 'config:' + app_spec
383 app_spec = 'config:' + app_spec
384 server_name = self.options.server_name
384 server_name = self.options.server_name
385 if self.options.server:
385 if self.options.server:
386 server_spec = 'egg:pyramid'
386 server_spec = 'egg:pyramid'
387 assert server_name is None
387 assert server_name is None
388 server_name = self.options.server
388 server_name = self.options.server
389 else:
389 else:
390 server_spec = app_spec
390 server_spec = app_spec
391 base = os.getcwd()
391 base = os.getcwd()
392
392
393 if getattr(self.options, 'daemon', False):
393 if getattr(self.options, 'daemon', False):
394 if not self.options.pid_file:
394 if not self.options.pid_file:
395 self.options.pid_file = 'pyramid.pid'
395 self.options.pid_file = 'pyramid.pid'
396 if not self.options.log_file:
396 if not self.options.log_file:
397 self.options.log_file = 'pyramid.log'
397 self.options.log_file = 'pyramid.log'
398
398
399 # Ensure the log file is writeable
399 # Ensure the log file is writeable
400 if self.options.log_file:
400 if self.options.log_file:
401 try:
401 try:
402 writeable_log_file = open(self.options.log_file, 'a')
402 writeable_log_file = open(self.options.log_file, 'a')
403 except IOError as ioe:
403 except IOError as ioe:
404 msg = 'Error: Unable to write to log file: %s' % ioe
404 msg = 'Error: Unable to write to log file: %s' % ioe
405 raise ValueError(msg)
405 raise ValueError(msg)
406 writeable_log_file.close()
406 writeable_log_file.close()
407
407
408 # Ensure the pid file is writeable
408 # Ensure the pid file is writeable
409 if self.options.pid_file:
409 if self.options.pid_file:
410 try:
410 try:
411 writeable_pid_file = open(self.options.pid_file, 'a')
411 writeable_pid_file = open(self.options.pid_file, 'a')
412 except IOError as ioe:
412 except IOError as ioe:
413 msg = 'Error: Unable to write to pid file: %s' % ioe
413 msg = 'Error: Unable to write to pid file: %s' % ioe
414 raise ValueError(msg)
414 raise ValueError(msg)
415 writeable_pid_file.close()
415 writeable_pid_file.close()
416
416
417
417
418 if getattr(self.options, 'daemon', False):
418 if getattr(self.options, 'daemon', False):
419 try:
419 try:
420 self.daemonize()
420 self.daemonize()
421 except DaemonizeException as ex:
421 except DaemonizeException as ex:
422 if self.options.verbose > 0:
422 if self.options.verbose > 0:
423 self.out(str(ex))
423 self.out(str(ex))
424 return 2
424 return 2
425
425
426 if (self.options.monitor_restart
426 if (self.options.monitor_restart
427 and not os.environ.get(self._monitor_environ_key)):
427 and not os.environ.get(self._monitor_environ_key)):
428 return self.restart_with_monitor()
428 return self.restart_with_monitor()
429
429
430 if self.options.pid_file:
430 if self.options.pid_file:
431 self.record_pid(self.options.pid_file)
431 self.record_pid(self.options.pid_file)
432
432
433 if self.options.log_file:
433 if self.options.log_file:
434 stdout_log = LazyWriter(self.options.log_file, 'a')
434 stdout_log = LazyWriter(self.options.log_file, 'a')
435 sys.stdout = stdout_log
435 sys.stdout = stdout_log
436 sys.stderr = stdout_log
436 sys.stderr = stdout_log
437 logging.basicConfig(stream=stdout_log)
437 logging.basicConfig(stream=stdout_log)
438
438
439 log_fn = app_spec
439 log_fn = app_spec
440 if log_fn.startswith('config:'):
440 if log_fn.startswith('config:'):
441 log_fn = app_spec[len('config:'):]
441 log_fn = app_spec[len('config:'):]
442 elif log_fn.startswith('egg:'):
442 elif log_fn.startswith('egg:'):
443 log_fn = None
443 log_fn = None
444 if log_fn:
444 if log_fn:
445 log_fn = os.path.join(base, log_fn)
445 log_fn = os.path.join(base, log_fn)
446 setup_logging(log_fn)
446 setup_logging(log_fn)
447 set_rhodecode_is_test(log_fn)
447 set_rhodecode_is_test(log_fn)
448
448
449 server = self.loadserver(server_spec, name=server_name,
449 server = self.loadserver(server_spec, name=server_name,
450 relative_to=base, global_conf=vars)
450 relative_to=base, global_conf=vars)
451 # starting hooks
451 # starting hooks
452 app = self.loadapp(app_spec, name=app_name, relative_to=base,
452 app = self.loadapp(app_spec, name=app_name, relative_to=base,
453 global_conf=vars)
453 global_conf=vars)
454
454
455 if self.options.verbose > 0:
455 if self.options.verbose > 0:
456 if hasattr(os, 'getpid'):
456 if hasattr(os, 'getpid'):
457 msg = 'Starting %s in PID %i.' % (__name__, os.getpid())
457 msg = 'Starting %s in PID %i.' % (__name__, os.getpid())
458 else:
458 else:
459 msg = 'Starting %s.' % (__name__,)
459 msg = 'Starting %s.' % (__name__,)
460 self.out(msg)
460 self.out(msg)
461 if SERVER_RUNNING_FILE:
461 if SERVER_RUNNING_FILE:
462 self.out('PID file written as %s' % (SERVER_RUNNING_FILE, ))
462 self.out('PID file written as %s' % (SERVER_RUNNING_FILE, ))
463 elif not self.options.pid_file:
463 elif not self.options.pid_file:
464 self.out('No PID file written by default.')
464 self.out('No PID file written by default.')
465
465
466 try:
466 try:
467 when_ready(server)
467 when_ready(server)
468 server(app)
468 server(app)
469 except (SystemExit, KeyboardInterrupt) as e:
469 except (SystemExit, KeyboardInterrupt) as e:
470 if self.options.verbose > 1:
470 if self.options.verbose > 1:
471 raise
471 raise
472 if str(e):
472 if str(e):
473 msg = ' ' + str(e)
473 msg = ' ' + str(e)
474 else:
474 else:
475 msg = ''
475 msg = ''
476 self.out('Exiting%s (-v to see traceback)' % msg)
476 self.out('Exiting%s (-v to see traceback)' % msg)
477
477
478
478
479 def loadapp(self, app_spec, name, relative_to, **kw): # pragma: no cover
479 def loadapp(self, app_spec, name, relative_to, **kw): # pragma: no cover
480 return loadapp(app_spec, name=name, relative_to=relative_to, **kw)
480 return loadapp(app_spec, name=name, relative_to=relative_to, **kw)
481
481
482 def loadserver(self, server_spec, name, relative_to, **kw): # pragma:no cover
482 def loadserver(self, server_spec, name, relative_to, **kw): # pragma:no cover
483 return loadserver(
483 return loadserver(
484 server_spec, name=name, relative_to=relative_to, **kw)
484 server_spec, name=name, relative_to=relative_to, **kw)
485
485
486 def quote_first_command_arg(self, arg): # pragma: no cover
486 def quote_first_command_arg(self, arg): # pragma: no cover
487 """
487 """
488 There's a bug in Windows when running an executable that's
488 There's a bug in Windows when running an executable that's
489 located inside a path with a space in it. This method handles
489 located inside a path with a space in it. This method handles
490 that case, or on non-Windows systems or an executable with no
490 that case, or on non-Windows systems or an executable with no
491 spaces, it just leaves well enough alone.
491 spaces, it just leaves well enough alone.
492 """
492 """
493 if sys.platform != 'win32' or ' ' not in arg:
493 if sys.platform != 'win32' or ' ' not in arg:
494 # Problem does not apply:
494 # Problem does not apply:
495 return arg
495 return arg
496 try:
496 try:
497 import win32api
497 import win32api
498 except ImportError:
498 except ImportError:
499 raise ValueError(
499 raise ValueError(
500 "The executable %r contains a space, and in order to "
500 "The executable %r contains a space, and in order to "
501 "handle this issue you must have the win32api module "
501 "handle this issue you must have the win32api module "
502 "installed" % arg)
502 "installed" % arg)
503 arg = win32api.GetShortPathName(arg)
503 arg = win32api.GetShortPathName(arg)
504 return arg
504 return arg
505
505
506 def daemonize(self): # pragma: no cover
506 def daemonize(self): # pragma: no cover
507 pid = live_pidfile(self.options.pid_file)
507 pid = live_pidfile(self.options.pid_file)
508 if pid:
508 if pid:
509 raise DaemonizeException(
509 raise DaemonizeException(
510 "Daemon is already running (PID: %s from PID file %s)"
510 "Daemon is already running (PID: %s from PID file %s)"
511 % (pid, self.options.pid_file))
511 % (pid, self.options.pid_file))
512
512
513 if self.options.verbose > 0:
513 if self.options.verbose > 0:
514 self.out('Entering daemon mode')
514 self.out('Entering daemon mode')
515 pid = os.fork()
515 pid = os.fork()
516 if pid:
516 if pid:
517 # The forked process also has a handle on resources, so we
517 # The forked process also has a handle on resources, so we
518 # *don't* want proper termination of the process, we just
518 # *don't* want proper termination of the process, we just
519 # want to exit quick (which os._exit() does)
519 # want to exit quick (which os._exit() does)
520 os._exit(0)
520 os._exit(0)
521 # Make this the session leader
521 # Make this the session leader
522 os.setsid()
522 os.setsid()
523 # Fork again for good measure!
523 # Fork again for good measure!
524 pid = os.fork()
524 pid = os.fork()
525 if pid:
525 if pid:
526 os._exit(0)
526 os._exit(0)
527
527
528 # @@: Should we set the umask and cwd now?
528 # @@: Should we set the umask and cwd now?
529
529
530 import resource # Resource usage information.
530 import resource # Resource usage information.
531 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
531 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
532 if maxfd == resource.RLIM_INFINITY:
532 if maxfd == resource.RLIM_INFINITY:
533 maxfd = MAXFD
533 maxfd = MAXFD
534 # Iterate through and close all file descriptors.
534 # Iterate through and close all file descriptors.
535 for fd in range(0, maxfd):
535 for fd in range(0, maxfd):
536 try:
536 try:
537 os.close(fd)
537 os.close(fd)
538 except OSError: # ERROR, fd wasn't open to begin with (ignored)
538 except OSError: # ERROR, fd wasn't open to begin with (ignored)
539 pass
539 pass
540
540
541 if hasattr(os, "devnull"):
541 if hasattr(os, "devnull"):
542 REDIRECT_TO = os.devnull
542 REDIRECT_TO = os.devnull
543 else:
543 else:
544 REDIRECT_TO = "/dev/null"
544 REDIRECT_TO = "/dev/null"
545 os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
545 os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
546 # Duplicate standard input to standard output and standard error.
546 # Duplicate standard input to standard output and standard error.
547 os.dup2(0, 1) # standard output (1)
547 os.dup2(0, 1) # standard output (1)
548 os.dup2(0, 2) # standard error (2)
548 os.dup2(0, 2) # standard error (2)
549
549
550 def _remove_pid_file(self, written_pid, filename, verbosity):
550 def _remove_pid_file(self, written_pid, filename, verbosity):
551 current_pid = os.getpid()
551 current_pid = os.getpid()
552 if written_pid != current_pid:
552 if written_pid != current_pid:
553 # A forked process must be exiting, not the process that
553 # A forked process must be exiting, not the process that
554 # wrote the PID file
554 # wrote the PID file
555 return
555 return
556 if not os.path.exists(filename):
556 if not os.path.exists(filename):
557 return
557 return
558 with open(filename) as f:
558 with open(filename) as f:
559 content = f.read().strip()
559 content = f.read().strip()
560 try:
560 try:
561 pid_in_file = int(content)
561 pid_in_file = int(content)
562 except ValueError:
562 except ValueError:
563 pass
563 pass
564 else:
564 else:
565 if pid_in_file != current_pid:
565 if pid_in_file != current_pid:
566 msg = "PID file %s contains %s, not expected PID %s"
566 msg = "PID file %s contains %s, not expected PID %s"
567 self.out(msg % (filename, pid_in_file, current_pid))
567 self.out(msg % (filename, pid_in_file, current_pid))
568 return
568 return
569 if verbosity > 0:
569 if verbosity > 0:
570 self.out("Removing PID file %s" % filename)
570 self.out("Removing PID file %s" % filename)
571 try:
571 try:
572 os.unlink(filename)
572 os.unlink(filename)
573 return
573 return
574 except OSError as e:
574 except OSError as e:
575 # Record, but don't give traceback
575 # Record, but don't give traceback
576 self.out("Cannot remove PID file: (%s)" % e)
576 self.out("Cannot remove PID file: (%s)" % e)
577 # well, at least lets not leave the invalid PID around...
577 # well, at least lets not leave the invalid PID around...
578 try:
578 try:
579 with open(filename, 'w') as f:
579 with open(filename, 'w') as f:
580 f.write('')
580 f.write('')
581 except OSError as e:
581 except OSError as e:
582 self.out('Stale PID left in file: %s (%s)' % (filename, e))
582 self.out('Stale PID left in file: %s (%s)' % (filename, e))
583 else:
583 else:
584 self.out('Stale PID removed')
584 self.out('Stale PID removed')
585
585
586 def record_pid(self, pid_file):
586 def record_pid(self, pid_file):
587 pid = os.getpid()
587 pid = os.getpid()
588 if self.options.verbose > 1:
588 if self.options.verbose > 1:
589 self.out('Writing PID %s to %s' % (pid, pid_file))
589 self.out('Writing PID %s to %s' % (pid, pid_file))
590 with open(pid_file, 'w') as f:
590 with open(pid_file, 'w') as f:
591 f.write(str(pid))
591 f.write(str(pid))
592 atexit.register(self._remove_pid_file, pid, pid_file, self.options.verbose)
592 atexit.register(self._remove_pid_file, pid, pid_file, self.options.verbose)
593
593
594 def stop_daemon(self): # pragma: no cover
594 def stop_daemon(self): # pragma: no cover
595 pid_file = self.options.pid_file or 'pyramid.pid'
595 pid_file = self.options.pid_file or 'pyramid.pid'
596 if not os.path.exists(pid_file):
596 if not os.path.exists(pid_file):
597 self.out('No PID file exists in %s' % pid_file)
597 self.out('No PID file exists in %s' % pid_file)
598 return 1
598 return 1
599 pid = read_pidfile(pid_file)
599 pid = read_pidfile(pid_file)
600 if not pid:
600 if not pid:
601 self.out("Not a valid PID file in %s" % pid_file)
601 self.out("Not a valid PID file in %s" % pid_file)
602 return 1
602 return 1
603 pid = live_pidfile(pid_file)
603 pid = live_pidfile(pid_file)
604 if not pid:
604 if not pid:
605 self.out("PID in %s is not valid (deleting)" % pid_file)
605 self.out("PID in %s is not valid (deleting)" % pid_file)
606 try:
606 try:
607 os.unlink(pid_file)
607 os.unlink(pid_file)
608 except (OSError, IOError) as e:
608 except (OSError, IOError) as e:
609 self.out("Could not delete: %s" % e)
609 self.out("Could not delete: %s" % e)
610 return 2
610 return 2
611 return 1
611 return 1
612 for j in range(10):
612 for j in range(10):
613 if not live_pidfile(pid_file):
613 if not live_pidfile(pid_file):
614 break
614 break
615 import signal
615 import signal
616 kill(pid, signal.SIGTERM)
616 kill(pid, signal.SIGTERM)
617 time.sleep(1)
617 time.sleep(1)
618 else:
618 else:
619 self.out("failed to kill web process %s" % pid)
619 self.out("failed to kill web process %s" % pid)
620 return 3
620 return 3
621 if os.path.exists(pid_file):
621 if os.path.exists(pid_file):
622 os.unlink(pid_file)
622 os.unlink(pid_file)
623 return 0
623 return 0
624
624
625 def show_status(self): # pragma: no cover
625 def show_status(self): # pragma: no cover
626 pid_file = self.options.pid_file or 'pyramid.pid'
626 pid_file = self.options.pid_file or 'pyramid.pid'
627 if not os.path.exists(pid_file):
627 if not os.path.exists(pid_file):
628 self.out('No PID file %s' % pid_file)
628 self.out('No PID file %s' % pid_file)
629 return 1
629 return 1
630 pid = read_pidfile(pid_file)
630 pid = read_pidfile(pid_file)
631 if not pid:
631 if not pid:
632 self.out('No PID in file %s' % pid_file)
632 self.out('No PID in file %s' % pid_file)
633 return 1
633 return 1
634 pid = live_pidfile(pid_file)
634 pid = live_pidfile(pid_file)
635 if not pid:
635 if not pid:
636 self.out('PID %s in %s is not running' % (pid, pid_file))
636 self.out('PID %s in %s is not running' % (pid, pid_file))
637 return 1
637 return 1
638 self.out('Server running in PID %s' % pid)
638 self.out('Server running in PID %s' % pid)
639 return 0
639 return 0
640
640
641 def restart_with_reloader(self): # pragma: no cover
641 def restart_with_reloader(self): # pragma: no cover
642 self.restart_with_monitor(reloader=True)
642 self.restart_with_monitor(reloader=True)
643
643
644 def restart_with_monitor(self, reloader=False): # pragma: no cover
644 def restart_with_monitor(self, reloader=False): # pragma: no cover
645 if self.options.verbose > 0:
645 if self.options.verbose > 0:
646 if reloader:
646 if reloader:
647 self.out('Starting subprocess with file monitor')
647 self.out('Starting subprocess with file monitor')
648 else:
648 else:
649 self.out('Starting subprocess with monitor parent')
649 self.out('Starting subprocess with monitor parent')
650 while 1:
650 while 1:
651 args = [self.quote_first_command_arg(sys.executable)] + sys.argv
651 args = [self.quote_first_command_arg(sys.executable)] + sys.argv
652 new_environ = os.environ.copy()
652 new_environ = os.environ.copy()
653 if reloader:
653 if reloader:
654 new_environ[self._reloader_environ_key] = 'true'
654 new_environ[self._reloader_environ_key] = 'true'
655 else:
655 else:
656 new_environ[self._monitor_environ_key] = 'true'
656 new_environ[self._monitor_environ_key] = 'true'
657 proc = None
657 proc = None
658 try:
658 try:
659 try:
659 try:
660 _turn_sigterm_into_systemexit()
660 _turn_sigterm_into_systemexit()
661 proc = subprocess.Popen(args, env=new_environ)
661 proc = subprocess32.Popen(args, env=new_environ)
662 exit_code = proc.wait()
662 exit_code = proc.wait()
663 proc = None
663 proc = None
664 except KeyboardInterrupt:
664 except KeyboardInterrupt:
665 self.out('^C caught in monitor process')
665 self.out('^C caught in monitor process')
666 if self.options.verbose > 1:
666 if self.options.verbose > 1:
667 raise
667 raise
668 return 1
668 return 1
669 finally:
669 finally:
670 if proc is not None:
670 if proc is not None:
671 import signal
671 import signal
672 try:
672 try:
673 kill(proc.pid, signal.SIGTERM)
673 kill(proc.pid, signal.SIGTERM)
674 except (OSError, IOError):
674 except (OSError, IOError):
675 pass
675 pass
676
676
677 if reloader:
677 if reloader:
678 # Reloader always exits with code 3; but if we are
678 # Reloader always exits with code 3; but if we are
679 # a monitor, any exit code will restart
679 # a monitor, any exit code will restart
680 if exit_code != 3:
680 if exit_code != 3:
681 return exit_code
681 return exit_code
682 if self.options.verbose > 0:
682 if self.options.verbose > 0:
683 self.out('%s %s %s' % ('-' * 20, 'Restarting', '-' * 20))
683 self.out('%s %s %s' % ('-' * 20, 'Restarting', '-' * 20))
684
684
685 def change_user_group(self, user, group): # pragma: no cover
685 def change_user_group(self, user, group): # pragma: no cover
686 if not user and not group:
686 if not user and not group:
687 return
687 return
688 import pwd
688 import pwd
689 import grp
689 import grp
690 uid = gid = None
690 uid = gid = None
691 if group:
691 if group:
692 try:
692 try:
693 gid = int(group)
693 gid = int(group)
694 group = grp.getgrgid(gid).gr_name
694 group = grp.getgrgid(gid).gr_name
695 except ValueError:
695 except ValueError:
696 try:
696 try:
697 entry = grp.getgrnam(group)
697 entry = grp.getgrnam(group)
698 except KeyError:
698 except KeyError:
699 raise ValueError(
699 raise ValueError(
700 "Bad group: %r; no such group exists" % group)
700 "Bad group: %r; no such group exists" % group)
701 gid = entry.gr_gid
701 gid = entry.gr_gid
702 try:
702 try:
703 uid = int(user)
703 uid = int(user)
704 user = pwd.getpwuid(uid).pw_name
704 user = pwd.getpwuid(uid).pw_name
705 except ValueError:
705 except ValueError:
706 try:
706 try:
707 entry = pwd.getpwnam(user)
707 entry = pwd.getpwnam(user)
708 except KeyError:
708 except KeyError:
709 raise ValueError(
709 raise ValueError(
710 "Bad username: %r; no such user exists" % user)
710 "Bad username: %r; no such user exists" % user)
711 if not gid:
711 if not gid:
712 gid = entry.pw_gid
712 gid = entry.pw_gid
713 uid = entry.pw_uid
713 uid = entry.pw_uid
714 if self.options.verbose > 0:
714 if self.options.verbose > 0:
715 self.out('Changing user to %s:%s (%s:%s)' % (
715 self.out('Changing user to %s:%s (%s:%s)' % (
716 user, group or '(unknown)', uid, gid))
716 user, group or '(unknown)', uid, gid))
717 if gid:
717 if gid:
718 os.setgid(gid)
718 os.setgid(gid)
719 if uid:
719 if uid:
720 os.setuid(uid)
720 os.setuid(uid)
721
721
722
722
723 class LazyWriter(object):
723 class LazyWriter(object):
724
724
725 """
725 """
726 File-like object that opens a file lazily when it is first written
726 File-like object that opens a file lazily when it is first written
727 to.
727 to.
728 """
728 """
729
729
730 def __init__(self, filename, mode='w'):
730 def __init__(self, filename, mode='w'):
731 self.filename = filename
731 self.filename = filename
732 self.fileobj = None
732 self.fileobj = None
733 self.lock = threading.Lock()
733 self.lock = threading.Lock()
734 self.mode = mode
734 self.mode = mode
735
735
736 def open(self):
736 def open(self):
737 if self.fileobj is None:
737 if self.fileobj is None:
738 with self.lock:
738 with self.lock:
739 self.fileobj = open(self.filename, self.mode)
739 self.fileobj = open(self.filename, self.mode)
740 return self.fileobj
740 return self.fileobj
741
741
742 def close(self):
742 def close(self):
743 fileobj = self.fileobj
743 fileobj = self.fileobj
744 if fileobj is not None:
744 if fileobj is not None:
745 fileobj.close()
745 fileobj.close()
746
746
747 def __del__(self):
747 def __del__(self):
748 self.close()
748 self.close()
749
749
750 def write(self, text):
750 def write(self, text):
751 fileobj = self.open()
751 fileobj = self.open()
752 fileobj.write(text)
752 fileobj.write(text)
753 fileobj.flush()
753 fileobj.flush()
754
754
755 def writelines(self, text):
755 def writelines(self, text):
756 fileobj = self.open()
756 fileobj = self.open()
757 fileobj.writelines(text)
757 fileobj.writelines(text)
758 fileobj.flush()
758 fileobj.flush()
759
759
760 def flush(self):
760 def flush(self):
761 self.open().flush()
761 self.open().flush()
762
762
763
763
764 def live_pidfile(pidfile): # pragma: no cover
764 def live_pidfile(pidfile): # pragma: no cover
765 """
765 """
766 (pidfile:str) -> int | None
766 (pidfile:str) -> int | None
767 Returns an int found in the named file, if there is one,
767 Returns an int found in the named file, if there is one,
768 and if there is a running process with that process id.
768 and if there is a running process with that process id.
769 Return None if no such process exists.
769 Return None if no such process exists.
770 """
770 """
771 pid = read_pidfile(pidfile)
771 pid = read_pidfile(pidfile)
772 if pid:
772 if pid:
773 try:
773 try:
774 kill(int(pid), 0)
774 kill(int(pid), 0)
775 return pid
775 return pid
776 except OSError as e:
776 except OSError as e:
777 if e.errno == errno.EPERM:
777 if e.errno == errno.EPERM:
778 return pid
778 return pid
779 return None
779 return None
780
780
781
781
782 def read_pidfile(filename):
782 def read_pidfile(filename):
783 if os.path.exists(filename):
783 if os.path.exists(filename):
784 try:
784 try:
785 with open(filename) as f:
785 with open(filename) as f:
786 content = f.read()
786 content = f.read()
787 return int(content.strip())
787 return int(content.strip())
788 except (ValueError, IOError):
788 except (ValueError, IOError):
789 return None
789 return None
790 else:
790 else:
791 return None
791 return None
792
792
793
793
794 def ensure_port_cleanup(
794 def ensure_port_cleanup(
795 bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover
795 bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover
796 """
796 """
797 This makes sure any open ports are closed.
797 This makes sure any open ports are closed.
798
798
799 Does this by connecting to them until they give connection
799 Does this by connecting to them until they give connection
800 refused. Servers should call like::
800 refused. Servers should call like::
801
801
802 ensure_port_cleanup([80, 443])
802 ensure_port_cleanup([80, 443])
803 """
803 """
804 atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries,
804 atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries,
805 sleeptime=sleeptime)
805 sleeptime=sleeptime)
806
806
807
807
808 def _cleanup_ports(
808 def _cleanup_ports(
809 bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover
809 bound_addresses, maxtries=30, sleeptime=2): # pragma: no cover
810 # Wait for the server to bind to the port.
810 # Wait for the server to bind to the port.
811 import socket
811 import socket
812 import errno
812 import errno
813 for bound_address in bound_addresses:
813 for bound_address in bound_addresses:
814 for attempt in range(maxtries):
814 for attempt in range(maxtries):
815 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
815 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
816 try:
816 try:
817 sock.connect(bound_address)
817 sock.connect(bound_address)
818 except socket.error as e:
818 except socket.error as e:
819 if e.args[0] != errno.ECONNREFUSED:
819 if e.args[0] != errno.ECONNREFUSED:
820 raise
820 raise
821 break
821 break
822 else:
822 else:
823 time.sleep(sleeptime)
823 time.sleep(sleeptime)
824 else:
824 else:
825 raise SystemExit('Timeout waiting for port.')
825 raise SystemExit('Timeout waiting for port.')
826 sock.close()
826 sock.close()
827
827
828
828
829 def _turn_sigterm_into_systemexit(): # pragma: no cover
829 def _turn_sigterm_into_systemexit(): # pragma: no cover
830 """
830 """
831 Attempts to turn a SIGTERM exception into a SystemExit exception.
831 Attempts to turn a SIGTERM exception into a SystemExit exception.
832 """
832 """
833 try:
833 try:
834 import signal
834 import signal
835 except ImportError:
835 except ImportError:
836 return
836 return
837 def handle_term(signo, frame):
837 def handle_term(signo, frame):
838 raise SystemExit
838 raise SystemExit
839 signal.signal(signal.SIGTERM, handle_term)
839 signal.signal(signal.SIGTERM, handle_term)
840
840
841
841
842 def install_reloader(poll_interval=1, extra_files=None): # pragma: no cover
842 def install_reloader(poll_interval=1, extra_files=None): # pragma: no cover
843 """
843 """
844 Install the reloading monitor.
844 Install the reloading monitor.
845
845
846 On some platforms server threads may not terminate when the main
846 On some platforms server threads may not terminate when the main
847 thread does, causing ports to remain open/locked. The
847 thread does, causing ports to remain open/locked. The
848 ``raise_keyboard_interrupt`` option creates a unignorable signal
848 ``raise_keyboard_interrupt`` option creates a unignorable signal
849 which causes the whole application to shut-down (rudely).
849 which causes the whole application to shut-down (rudely).
850 """
850 """
851 mon = Monitor(poll_interval=poll_interval)
851 mon = Monitor(poll_interval=poll_interval)
852 if extra_files is None:
852 if extra_files is None:
853 extra_files = []
853 extra_files = []
854 mon.extra_files.extend(extra_files)
854 mon.extra_files.extend(extra_files)
855 t = threading.Thread(target=mon.periodic_reload)
855 t = threading.Thread(target=mon.periodic_reload)
856 t.setDaemon(True)
856 t.setDaemon(True)
857 t.start()
857 t.start()
858
858
859
859
860 class classinstancemethod(object):
860 class classinstancemethod(object):
861 """
861 """
862 Acts like a class method when called from a class, like an
862 Acts like a class method when called from a class, like an
863 instance method when called by an instance. The method should
863 instance method when called by an instance. The method should
864 take two arguments, 'self' and 'cls'; one of these will be None
864 take two arguments, 'self' and 'cls'; one of these will be None
865 depending on how the method was called.
865 depending on how the method was called.
866 """
866 """
867
867
868 def __init__(self, func):
868 def __init__(self, func):
869 self.func = func
869 self.func = func
870 self.__doc__ = func.__doc__
870 self.__doc__ = func.__doc__
871
871
872 def __get__(self, obj, type=None):
872 def __get__(self, obj, type=None):
873 return _methodwrapper(self.func, obj=obj, type=type)
873 return _methodwrapper(self.func, obj=obj, type=type)
874
874
875
875
876 class _methodwrapper(object):
876 class _methodwrapper(object):
877
877
878 def __init__(self, func, obj, type):
878 def __init__(self, func, obj, type):
879 self.func = func
879 self.func = func
880 self.obj = obj
880 self.obj = obj
881 self.type = type
881 self.type = type
882
882
883 def __call__(self, *args, **kw):
883 def __call__(self, *args, **kw):
884 assert not 'self' in kw and not 'cls' in kw, (
884 assert not 'self' in kw and not 'cls' in kw, (
885 "You cannot use 'self' or 'cls' arguments to a "
885 "You cannot use 'self' or 'cls' arguments to a "
886 "classinstancemethod")
886 "classinstancemethod")
887 return self.func(*((self.obj, self.type) + args), **kw)
887 return self.func(*((self.obj, self.type) + args), **kw)
888
888
889
889
890 class Monitor(object): # pragma: no cover
890 class Monitor(object): # pragma: no cover
891 """
891 """
892 A file monitor and server restarter.
892 A file monitor and server restarter.
893
893
894 Use this like:
894 Use this like:
895
895
896 ..code-block:: Python
896 ..code-block:: Python
897
897
898 install_reloader()
898 install_reloader()
899
899
900 Then make sure your server is installed with a shell script like::
900 Then make sure your server is installed with a shell script like::
901
901
902 err=3
902 err=3
903 while test "$err" -eq 3 ; do
903 while test "$err" -eq 3 ; do
904 python server.py
904 python server.py
905 err="$?"
905 err="$?"
906 done
906 done
907
907
908 or is run from this .bat file (if you use Windows)::
908 or is run from this .bat file (if you use Windows)::
909
909
910 @echo off
910 @echo off
911 :repeat
911 :repeat
912 python server.py
912 python server.py
913 if %errorlevel% == 3 goto repeat
913 if %errorlevel% == 3 goto repeat
914
914
915 or run a monitoring process in Python (``pserve --reload`` does
915 or run a monitoring process in Python (``pserve --reload`` does
916 this).
916 this).
917
917
918 Use the ``watch_file(filename)`` function to cause a reload/restart for
918 Use the ``watch_file(filename)`` function to cause a reload/restart for
919 other non-Python files (e.g., configuration files). If you have
919 other non-Python files (e.g., configuration files). If you have
920 a dynamic set of files that grows over time you can use something like::
920 a dynamic set of files that grows over time you can use something like::
921
921
922 def watch_config_files():
922 def watch_config_files():
923 return CONFIG_FILE_CACHE.keys()
923 return CONFIG_FILE_CACHE.keys()
924 add_file_callback(watch_config_files)
924 add_file_callback(watch_config_files)
925
925
926 Then every time the reloader polls files it will call
926 Then every time the reloader polls files it will call
927 ``watch_config_files`` and check all the filenames it returns.
927 ``watch_config_files`` and check all the filenames it returns.
928 """
928 """
929 instances = []
929 instances = []
930 global_extra_files = []
930 global_extra_files = []
931 global_file_callbacks = []
931 global_file_callbacks = []
932
932
933 def __init__(self, poll_interval):
933 def __init__(self, poll_interval):
934 self.module_mtimes = {}
934 self.module_mtimes = {}
935 self.keep_running = True
935 self.keep_running = True
936 self.poll_interval = poll_interval
936 self.poll_interval = poll_interval
937 self.extra_files = list(self.global_extra_files)
937 self.extra_files = list(self.global_extra_files)
938 self.instances.append(self)
938 self.instances.append(self)
939 self.file_callbacks = list(self.global_file_callbacks)
939 self.file_callbacks = list(self.global_file_callbacks)
940
940
941 def _exit(self):
941 def _exit(self):
942 # use os._exit() here and not sys.exit() since within a
942 # use os._exit() here and not sys.exit() since within a
943 # thread sys.exit() just closes the given thread and
943 # thread sys.exit() just closes the given thread and
944 # won't kill the process; note os._exit does not call
944 # won't kill the process; note os._exit does not call
945 # any atexit callbacks, nor does it do finally blocks,
945 # any atexit callbacks, nor does it do finally blocks,
946 # flush open files, etc. In otherwords, it is rude.
946 # flush open files, etc. In otherwords, it is rude.
947 os._exit(3)
947 os._exit(3)
948
948
949 def periodic_reload(self):
949 def periodic_reload(self):
950 while True:
950 while True:
951 if not self.check_reload():
951 if not self.check_reload():
952 self._exit()
952 self._exit()
953 break
953 break
954 time.sleep(self.poll_interval)
954 time.sleep(self.poll_interval)
955
955
956 def check_reload(self):
956 def check_reload(self):
957 filenames = list(self.extra_files)
957 filenames = list(self.extra_files)
958 for file_callback in self.file_callbacks:
958 for file_callback in self.file_callbacks:
959 try:
959 try:
960 filenames.extend(file_callback())
960 filenames.extend(file_callback())
961 except:
961 except:
962 print(
962 print(
963 "Error calling reloader callback %r:" % file_callback)
963 "Error calling reloader callback %r:" % file_callback)
964 traceback.print_exc()
964 traceback.print_exc()
965 for module in list(sys.modules.values()):
965 for module in list(sys.modules.values()):
966 try:
966 try:
967 filename = module.__file__
967 filename = module.__file__
968 except (AttributeError, ImportError):
968 except (AttributeError, ImportError):
969 continue
969 continue
970 if filename is not None:
970 if filename is not None:
971 filenames.append(filename)
971 filenames.append(filename)
972
972
973 for filename in filenames:
973 for filename in filenames:
974 try:
974 try:
975 stat = os.stat(filename)
975 stat = os.stat(filename)
976 if stat:
976 if stat:
977 mtime = stat.st_mtime
977 mtime = stat.st_mtime
978 else:
978 else:
979 mtime = 0
979 mtime = 0
980 except (OSError, IOError):
980 except (OSError, IOError):
981 continue
981 continue
982 if filename.endswith('.pyc') and os.path.exists(filename[:-1]):
982 if filename.endswith('.pyc') and os.path.exists(filename[:-1]):
983 mtime = max(os.stat(filename[:-1]).st_mtime, mtime)
983 mtime = max(os.stat(filename[:-1]).st_mtime, mtime)
984 if not filename in self.module_mtimes:
984 if not filename in self.module_mtimes:
985 self.module_mtimes[filename] = mtime
985 self.module_mtimes[filename] = mtime
986 elif self.module_mtimes[filename] < mtime:
986 elif self.module_mtimes[filename] < mtime:
987 print("%s changed; reloading..." % filename)
987 print("%s changed; reloading..." % filename)
988 run_callback_for_pattern(filename)
988 run_callback_for_pattern(filename)
989 return False
989 return False
990 return True
990 return True
991
991
992 def watch_file(self, cls, filename):
992 def watch_file(self, cls, filename):
993 """Watch the named file for changes"""
993 """Watch the named file for changes"""
994 filename = os.path.abspath(filename)
994 filename = os.path.abspath(filename)
995 if self is None:
995 if self is None:
996 for instance in cls.instances:
996 for instance in cls.instances:
997 instance.watch_file(filename)
997 instance.watch_file(filename)
998 cls.global_extra_files.append(filename)
998 cls.global_extra_files.append(filename)
999 else:
999 else:
1000 self.extra_files.append(filename)
1000 self.extra_files.append(filename)
1001
1001
1002 watch_file = classinstancemethod(watch_file)
1002 watch_file = classinstancemethod(watch_file)
1003
1003
1004 def add_file_callback(self, cls, callback):
1004 def add_file_callback(self, cls, callback):
1005 """Add a callback -- a function that takes no parameters -- that will
1005 """Add a callback -- a function that takes no parameters -- that will
1006 return a list of filenames to watch for changes."""
1006 return a list of filenames to watch for changes."""
1007 if self is None:
1007 if self is None:
1008 for instance in cls.instances:
1008 for instance in cls.instances:
1009 instance.add_file_callback(callback)
1009 instance.add_file_callback(callback)
1010 cls.global_file_callbacks.append(callback)
1010 cls.global_file_callbacks.append(callback)
1011 else:
1011 else:
1012 self.file_callbacks.append(callback)
1012 self.file_callbacks.append(callback)
1013
1013
1014 add_file_callback = classinstancemethod(add_file_callback)
1014 add_file_callback = classinstancemethod(add_file_callback)
1015
1015
1016 watch_file = Monitor.watch_file
1016 watch_file = Monitor.watch_file
1017 add_file_callback = Monitor.add_file_callback
1017 add_file_callback = Monitor.add_file_callback
1018
1018
1019
1019
1020 def main(argv=sys.argv, quiet=False):
1020 def main(argv=sys.argv, quiet=False):
1021 command = RcServerCommand(argv, quiet=quiet)
1021 command = RcServerCommand(argv, quiet=quiet)
1022 return command.run()
1022 return command.run()
1023
1023
1024 if __name__ == '__main__': # pragma: no cover
1024 if __name__ == '__main__': # pragma: no cover
1025 sys.exit(main() or 0)
1025 sys.exit(main() or 0)
@@ -1,273 +1,273 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from subprocess import Popen, PIPE
21 from subprocess32 import Popen, PIPE
22 import os
22 import os
23 import shutil
23 import shutil
24 import sys
24 import sys
25 import tempfile
25 import tempfile
26
26
27 import pytest
27 import pytest
28 from sqlalchemy.engine import url
28 from sqlalchemy.engine import url
29
29
30 from rhodecode.tests.fixture import TestINI
30 from rhodecode.tests.fixture import TestINI
31
31
32
32
33 def _get_dbs_from_metafunc(metafunc):
33 def _get_dbs_from_metafunc(metafunc):
34 if hasattr(metafunc.function, 'dbs'):
34 if hasattr(metafunc.function, 'dbs'):
35 # Supported backends by this test function, created from
35 # Supported backends by this test function, created from
36 # pytest.mark.dbs
36 # pytest.mark.dbs
37 backends = metafunc.function.dbs.args
37 backends = metafunc.function.dbs.args
38 else:
38 else:
39 backends = metafunc.config.getoption('--dbs')
39 backends = metafunc.config.getoption('--dbs')
40 return backends
40 return backends
41
41
42
42
43 def pytest_generate_tests(metafunc):
43 def pytest_generate_tests(metafunc):
44 # Support test generation based on --dbs parameter
44 # Support test generation based on --dbs parameter
45 if 'db_backend' in metafunc.fixturenames:
45 if 'db_backend' in metafunc.fixturenames:
46 requested_backends = set(metafunc.config.getoption('--dbs'))
46 requested_backends = set(metafunc.config.getoption('--dbs'))
47 backends = _get_dbs_from_metafunc(metafunc)
47 backends = _get_dbs_from_metafunc(metafunc)
48 backends = requested_backends.intersection(backends)
48 backends = requested_backends.intersection(backends)
49 # TODO: johbo: Disabling a backend did not work out with
49 # TODO: johbo: Disabling a backend did not work out with
50 # parametrization, find better way to achieve this.
50 # parametrization, find better way to achieve this.
51 if not backends:
51 if not backends:
52 metafunc.function._skip = True
52 metafunc.function._skip = True
53 metafunc.parametrize('db_backend_name', backends)
53 metafunc.parametrize('db_backend_name', backends)
54
54
55
55
56 def pytest_collection_modifyitems(session, config, items):
56 def pytest_collection_modifyitems(session, config, items):
57 remaining = [
57 remaining = [
58 i for i in items if not getattr(i.obj, '_skip', False)]
58 i for i in items if not getattr(i.obj, '_skip', False)]
59 items[:] = remaining
59 items[:] = remaining
60
60
61
61
62 @pytest.fixture
62 @pytest.fixture
63 def db_backend(
63 def db_backend(
64 request, db_backend_name, pylons_config, tmpdir_factory):
64 request, db_backend_name, pylons_config, tmpdir_factory):
65 basetemp = tmpdir_factory.getbasetemp().strpath
65 basetemp = tmpdir_factory.getbasetemp().strpath
66 klass = _get_backend(db_backend_name)
66 klass = _get_backend(db_backend_name)
67
67
68 option_name = '--{}-connection-string'.format(db_backend_name)
68 option_name = '--{}-connection-string'.format(db_backend_name)
69 connection_string = request.config.getoption(option_name) or None
69 connection_string = request.config.getoption(option_name) or None
70
70
71 return klass(
71 return klass(
72 config_file=pylons_config, basetemp=basetemp,
72 config_file=pylons_config, basetemp=basetemp,
73 connection_string=connection_string)
73 connection_string=connection_string)
74
74
75
75
76 def _get_backend(backend_type):
76 def _get_backend(backend_type):
77 return {
77 return {
78 'sqlite': SQLiteDBBackend,
78 'sqlite': SQLiteDBBackend,
79 'postgres': PostgresDBBackend,
79 'postgres': PostgresDBBackend,
80 'mysql': MySQLDBBackend,
80 'mysql': MySQLDBBackend,
81 '': EmptyDBBackend
81 '': EmptyDBBackend
82 }[backend_type]
82 }[backend_type]
83
83
84
84
85 class DBBackend(object):
85 class DBBackend(object):
86 _store = os.path.dirname(os.path.abspath(__file__))
86 _store = os.path.dirname(os.path.abspath(__file__))
87 _type = None
87 _type = None
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false'}}]
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false'}}]
89 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
89 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
90 _base_db_name = 'rhodecode_test_db_backend'
90 _base_db_name = 'rhodecode_test_db_backend'
91
91
92 def __init__(
92 def __init__(
93 self, config_file, db_name=None, basetemp=None,
93 self, config_file, db_name=None, basetemp=None,
94 connection_string=None):
94 connection_string=None):
95 self.fixture_store = os.path.join(self._store, self._type)
95 self.fixture_store = os.path.join(self._store, self._type)
96 self.db_name = db_name or self._base_db_name
96 self.db_name = db_name or self._base_db_name
97 self._base_ini_file = config_file
97 self._base_ini_file = config_file
98 self.stderr = ''
98 self.stderr = ''
99 self.stdout = ''
99 self.stdout = ''
100 self._basetemp = basetemp or tempfile.gettempdir()
100 self._basetemp = basetemp or tempfile.gettempdir()
101 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
101 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
102 self.connection_string = connection_string
102 self.connection_string = connection_string
103
103
104 @property
104 @property
105 def connection_string(self):
105 def connection_string(self):
106 return self._connection_string
106 return self._connection_string
107
107
108 @connection_string.setter
108 @connection_string.setter
109 def connection_string(self, new_connection_string):
109 def connection_string(self, new_connection_string):
110 if not new_connection_string:
110 if not new_connection_string:
111 new_connection_string = self.get_default_connection_string()
111 new_connection_string = self.get_default_connection_string()
112 else:
112 else:
113 new_connection_string = new_connection_string.format(
113 new_connection_string = new_connection_string.format(
114 db_name=self.db_name)
114 db_name=self.db_name)
115 url_parts = url.make_url(new_connection_string)
115 url_parts = url.make_url(new_connection_string)
116 self._connection_string = new_connection_string
116 self._connection_string = new_connection_string
117 self.user = url_parts.username
117 self.user = url_parts.username
118 self.password = url_parts.password
118 self.password = url_parts.password
119 self.host = url_parts.host
119 self.host = url_parts.host
120
120
121 def get_default_connection_string(self):
121 def get_default_connection_string(self):
122 raise NotImplementedError('default connection_string is required.')
122 raise NotImplementedError('default connection_string is required.')
123
123
124 def execute(self, cmd, env=None, *args):
124 def execute(self, cmd, env=None, *args):
125 """
125 """
126 Runs command on the system with given ``args``.
126 Runs command on the system with given ``args``.
127 """
127 """
128
128
129 command = cmd + ' ' + ' '.join(args)
129 command = cmd + ' ' + ' '.join(args)
130 sys.stdout.write(command)
130 sys.stdout.write(command)
131
131
132 # Tell Python to use UTF-8 encoding out stdout
132 # Tell Python to use UTF-8 encoding out stdout
133 _env = os.environ.copy()
133 _env = os.environ.copy()
134 _env['PYTHONIOENCODING'] = 'UTF-8'
134 _env['PYTHONIOENCODING'] = 'UTF-8'
135 if env:
135 if env:
136 _env.update(env)
136 _env.update(env)
137 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
137 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
138 self.stdout, self.stderr = self.p.communicate()
138 self.stdout, self.stderr = self.p.communicate()
139 sys.stdout.write('COMMAND:'+command+'\n')
139 sys.stdout.write('COMMAND:'+command+'\n')
140 sys.stdout.write(self.stdout)
140 sys.stdout.write(self.stdout)
141 return self.stdout, self.stderr
141 return self.stdout, self.stderr
142
142
143 def assert_returncode_success(self):
143 def assert_returncode_success(self):
144 assert self.p.returncode == 0, self.stderr
144 assert self.p.returncode == 0, self.stderr
145
145
146 def setup_rhodecode_db(self, ini_params=None, env=None):
146 def setup_rhodecode_db(self, ini_params=None, env=None):
147 if not ini_params:
147 if not ini_params:
148 ini_params = self._base_ini_config
148 ini_params = self._base_ini_config
149
149
150 ini_params.extend(self._db_url)
150 ini_params.extend(self._db_url)
151 with TestINI(self._base_ini_file, ini_params,
151 with TestINI(self._base_ini_file, ini_params,
152 self._type, destroy=True) as _ini_file:
152 self._type, destroy=True) as _ini_file:
153 if not os.path.isdir(self._repos_location):
153 if not os.path.isdir(self._repos_location):
154 os.makedirs(self._repos_location)
154 os.makedirs(self._repos_location)
155 self.execute(
155 self.execute(
156 "paster setup-rhodecode {0} --user=marcink "
156 "paster setup-rhodecode {0} --user=marcink "
157 "--email=marcin@rhodeocode.com --password={1} "
157 "--email=marcin@rhodeocode.com --password={1} "
158 "--repos={2} --force-yes".format(
158 "--repos={2} --force-yes".format(
159 _ini_file, 'qweqwe', self._repos_location), env=env)
159 _ini_file, 'qweqwe', self._repos_location), env=env)
160
160
161 def upgrade_database(self, ini_params=None):
161 def upgrade_database(self, ini_params=None):
162 if not ini_params:
162 if not ini_params:
163 ini_params = self._base_ini_config
163 ini_params = self._base_ini_config
164 ini_params.extend(self._db_url)
164 ini_params.extend(self._db_url)
165
165
166 test_ini = TestINI(
166 test_ini = TestINI(
167 self._base_ini_file, ini_params, self._type, destroy=True)
167 self._base_ini_file, ini_params, self._type, destroy=True)
168 with test_ini as ini_file:
168 with test_ini as ini_file:
169 if not os.path.isdir(self._repos_location):
169 if not os.path.isdir(self._repos_location):
170 os.makedirs(self._repos_location)
170 os.makedirs(self._repos_location)
171 self.execute(
171 self.execute(
172 "paster upgrade-db {} --force-yes".format(ini_file))
172 "paster upgrade-db {} --force-yes".format(ini_file))
173
173
174 def setup_db(self):
174 def setup_db(self):
175 raise NotImplementedError
175 raise NotImplementedError
176
176
177 def teardown_db(self):
177 def teardown_db(self):
178 raise NotImplementedError
178 raise NotImplementedError
179
179
180 def import_dump(self, dumpname):
180 def import_dump(self, dumpname):
181 raise NotImplementedError
181 raise NotImplementedError
182
182
183
183
184 class EmptyDBBackend(DBBackend):
184 class EmptyDBBackend(DBBackend):
185 _type = ''
185 _type = ''
186
186
187 def setup_db(self):
187 def setup_db(self):
188 pass
188 pass
189
189
190 def teardown_db(self):
190 def teardown_db(self):
191 pass
191 pass
192
192
193 def import_dump(self, dumpname):
193 def import_dump(self, dumpname):
194 pass
194 pass
195
195
196 def assert_returncode_success(self):
196 def assert_returncode_success(self):
197 assert True
197 assert True
198
198
199
199
200 class SQLiteDBBackend(DBBackend):
200 class SQLiteDBBackend(DBBackend):
201 _type = 'sqlite'
201 _type = 'sqlite'
202
202
203 def get_default_connection_string(self):
203 def get_default_connection_string(self):
204 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
204 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
205
205
206 def setup_db(self):
206 def setup_db(self):
207 # dump schema for tests
207 # dump schema for tests
208 # cp -v $TEST_DB_NAME
208 # cp -v $TEST_DB_NAME
209 self._db_url = [{'app:main': {
209 self._db_url = [{'app:main': {
210 'sqlalchemy.db1.url': self.connection_string}}]
210 'sqlalchemy.db1.url': self.connection_string}}]
211
211
212 def import_dump(self, dumpname):
212 def import_dump(self, dumpname):
213 dump = os.path.join(self.fixture_store, dumpname)
213 dump = os.path.join(self.fixture_store, dumpname)
214 shutil.copy(
214 shutil.copy(
215 dump,
215 dump,
216 os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self)))
216 os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self)))
217
217
218 def teardown_db(self):
218 def teardown_db(self):
219 self.execute("rm -rf {}.sqlite".format(
219 self.execute("rm -rf {}.sqlite".format(
220 os.path.join(self._basetemp, self.db_name)))
220 os.path.join(self._basetemp, self.db_name)))
221
221
222
222
223 class MySQLDBBackend(DBBackend):
223 class MySQLDBBackend(DBBackend):
224 _type = 'mysql'
224 _type = 'mysql'
225
225
226 def get_default_connection_string(self):
226 def get_default_connection_string(self):
227 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
227 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
228
228
229 def setup_db(self):
229 def setup_db(self):
230 # dump schema for tests
230 # dump schema for tests
231 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
231 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
232 self._db_url = [{'app:main': {
232 self._db_url = [{'app:main': {
233 'sqlalchemy.db1.url': self.connection_string}}]
233 'sqlalchemy.db1.url': self.connection_string}}]
234 self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
234 self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
235 self.user, self.password, self.db_name))
235 self.user, self.password, self.db_name))
236
236
237 def import_dump(self, dumpname):
237 def import_dump(self, dumpname):
238 dump = os.path.join(self.fixture_store, dumpname)
238 dump = os.path.join(self.fixture_store, dumpname)
239 self.execute("mysql -u{} -p{} {} < {}".format(
239 self.execute("mysql -u{} -p{} {} < {}".format(
240 self.user, self.password, self.db_name, dump))
240 self.user, self.password, self.db_name, dump))
241
241
242 def teardown_db(self):
242 def teardown_db(self):
243 self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
243 self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
244 self.user, self.password, self.db_name))
244 self.user, self.password, self.db_name))
245
245
246
246
247 class PostgresDBBackend(DBBackend):
247 class PostgresDBBackend(DBBackend):
248 _type = 'postgres'
248 _type = 'postgres'
249
249
250 def get_default_connection_string(self):
250 def get_default_connection_string(self):
251 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
251 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
252
252
253 def setup_db(self):
253 def setup_db(self):
254 # dump schema for tests
254 # dump schema for tests
255 # pg_dump -U postgres -h localhost $TEST_DB_NAME
255 # pg_dump -U postgres -h localhost $TEST_DB_NAME
256 self._db_url = [{'app:main': {
256 self._db_url = [{'app:main': {
257 'sqlalchemy.db1.url':
257 'sqlalchemy.db1.url':
258 self.connection_string}}]
258 self.connection_string}}]
259 self.execute("PGPASSWORD={} psql -U {} -h localhost "
259 self.execute("PGPASSWORD={} psql -U {} -h localhost "
260 "-c 'create database '{}';'".format(
260 "-c 'create database '{}';'".format(
261 self.password, self.user, self.db_name))
261 self.password, self.user, self.db_name))
262
262
263 def teardown_db(self):
263 def teardown_db(self):
264 self.execute("PGPASSWORD={} psql -U {} -h localhost "
264 self.execute("PGPASSWORD={} psql -U {} -h localhost "
265 "-c 'drop database if exists '{}';'".format(
265 "-c 'drop database if exists '{}';'".format(
266 self.password, self.user, self.db_name))
266 self.password, self.user, self.db_name))
267
267
268 def import_dump(self, dumpname):
268 def import_dump(self, dumpname):
269 dump = os.path.join(self.fixture_store, dumpname)
269 dump = os.path.join(self.fixture_store, dumpname)
270 self.execute(
270 self.execute(
271 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
271 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
272 "-f {}".format(
272 "-f {}".format(
273 self.password, self.user, self.db_name, dump))
273 self.password, self.user, self.db_name, dump))
@@ -1,136 +1,136 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Checking the chunked data transfer via HTTP
22 Checking the chunked data transfer via HTTP
23 """
23 """
24
24
25 import os
25 import os
26 import time
26 import time
27 import subprocess
27 import subprocess32
28
28
29 import pytest
29 import pytest
30 import requests
30 import requests
31
31
32 from rhodecode.lib.middleware.utils import scm_app_http
32 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.tests.utils import wait_for_url
33 from rhodecode.tests.utils import wait_for_url
34
34
35
35
36 def test_does_chunked_end_to_end_transfer(scm_app):
36 def test_does_chunked_end_to_end_transfer(scm_app):
37 response = requests.post(scm_app, data='', stream=True)
37 response = requests.post(scm_app, data='', stream=True)
38 assert response.headers['Transfer-Encoding'] == 'chunked'
38 assert response.headers['Transfer-Encoding'] == 'chunked'
39 times = [time.time() for chunk in response.raw.read_chunked()]
39 times = [time.time() for chunk in response.raw.read_chunked()]
40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
41
41
42
42
43 @pytest.fixture
43 @pytest.fixture
44 def echo_app_chunking(request, available_port_factory):
44 def echo_app_chunking(request, available_port_factory):
45 """
45 """
46 Run the EchoApp via Waitress in a subprocess.
46 Run the EchoApp via Waitress in a subprocess.
47
47
48 Return the URL endpoint to reach the app.
48 Return the URL endpoint to reach the app.
49 """
49 """
50 port = available_port_factory()
50 port = available_port_factory()
51 command = (
51 command = (
52 'waitress-serve --send-bytes 1 --port {port} --call '
52 'waitress-serve --send-bytes 1 --port {port} --call '
53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
54 ':create_echo_app')
54 ':create_echo_app')
55 command = command.format(port=port)
55 command = command.format(port=port)
56 proc = subprocess.Popen(command.split(' '), bufsize=0)
56 proc = subprocess32.Popen(command.split(' '), bufsize=0)
57 echo_app_url = 'http://localhost:' + str(port)
57 echo_app_url = 'http://localhost:' + str(port)
58
58
59 @request.addfinalizer
59 @request.addfinalizer
60 def stop_echo_app():
60 def stop_echo_app():
61 proc.kill()
61 proc.kill()
62
62
63 return echo_app_url
63 return echo_app_url
64
64
65
65
66 @pytest.fixture
66 @pytest.fixture
67 def scm_app(request, available_port_factory, echo_app_chunking):
67 def scm_app(request, available_port_factory, echo_app_chunking):
68 """
68 """
69 Run the scm_app in Waitress.
69 Run the scm_app in Waitress.
70
70
71 Returns the URL endpoint where this app can be reached.
71 Returns the URL endpoint where this app can be reached.
72 """
72 """
73 port = available_port_factory()
73 port = available_port_factory()
74 command = (
74 command = (
75 'waitress-serve --send-bytes 1 --port {port} --call '
75 'waitress-serve --send-bytes 1 --port {port} --call '
76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
77 ':create_scm_app')
77 ':create_scm_app')
78 command = command.format(port=port)
78 command = command.format(port=port)
79 env = os.environ.copy()
79 env = os.environ.copy()
80 env["RC_ECHO_URL"] = echo_app_chunking
80 env["RC_ECHO_URL"] = echo_app_chunking
81 proc = subprocess.Popen(command.split(' '), bufsize=0, env=env)
81 proc = subprocess32.Popen(command.split(' '), bufsize=0, env=env)
82 scm_app_url = 'http://localhost:' + str(port)
82 scm_app_url = 'http://localhost:' + str(port)
83 wait_for_url(scm_app_url)
83 wait_for_url(scm_app_url)
84
84
85 @request.addfinalizer
85 @request.addfinalizer
86 def stop_echo_app():
86 def stop_echo_app():
87 proc.kill()
87 proc.kill()
88
88
89 return scm_app_url
89 return scm_app_url
90
90
91
91
92 class EchoApp(object):
92 class EchoApp(object):
93 """
93 """
94 Stub WSGI application which returns a chunked response to every request.
94 Stub WSGI application which returns a chunked response to every request.
95 """
95 """
96
96
97 def __init__(self, repo_path, repo_name, config):
97 def __init__(self, repo_path, repo_name, config):
98 self._repo_path = repo_path
98 self._repo_path = repo_path
99
99
100 def __call__(self, environ, start_response):
100 def __call__(self, environ, start_response):
101 environ['wsgi.input'].read()
101 environ['wsgi.input'].read()
102 status = '200 OK'
102 status = '200 OK'
103 headers = []
103 headers = []
104 start_response(status, headers)
104 start_response(status, headers)
105 return result_generator()
105 return result_generator()
106
106
107
107
108 def result_generator():
108 def result_generator():
109 """
109 """
110 Simulate chunked results.
110 Simulate chunked results.
111
111
112 The intended usage is to simulate a chunked response as we would get it
112 The intended usage is to simulate a chunked response as we would get it
113 out of a vcs operation during a call to "hg clone".
113 out of a vcs operation during a call to "hg clone".
114 """
114 """
115 yield 'waiting 2 seconds'
115 yield 'waiting 2 seconds'
116 # Wait long enough so that the first chunk can go out
116 # Wait long enough so that the first chunk can go out
117 time.sleep(2)
117 time.sleep(2)
118 yield 'final chunk'
118 yield 'final chunk'
119 # Another small wait, otherwise they go together
119 # Another small wait, otherwise they go together
120 time.sleep(0.1)
120 time.sleep(0.1)
121
121
122
122
123 def create_echo_app():
123 def create_echo_app():
124 """
124 """
125 Create EchoApp filled with stub data.
125 Create EchoApp filled with stub data.
126 """
126 """
127 return EchoApp('stub_path', 'repo_name', {})
127 return EchoApp('stub_path', 'repo_name', {})
128
128
129
129
130 def create_scm_app():
130 def create_scm_app():
131 """
131 """
132 Create a scm_app hooked up to speak to EchoApp.
132 Create a scm_app hooked up to speak to EchoApp.
133 """
133 """
134 echo_app_url = os.environ["RC_ECHO_URL"]
134 echo_app_url = os.environ["RC_ECHO_URL"]
135 return scm_app_http.VcsHttpProxy(
135 return scm_app_http.VcsHttpProxy(
136 echo_app_url, 'stub_path', 'stub_name', None)
136 echo_app_url, 'stub_path', 'stub_name', None)
@@ -1,463 +1,463 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module to test the performance of pull, push and clone operations.
22 Module to test the performance of pull, push and clone operations.
23
23
24 It works by replaying a group of commits to the repo.
24 It works by replaying a group of commits to the repo.
25 """
25 """
26
26
27 import argparse
27 import argparse
28 import collections
28 import collections
29 import ConfigParser
29 import ConfigParser
30 import functools
30 import functools
31 import itertools
31 import itertools
32 import os
32 import os
33 import pprint
33 import pprint
34 import shutil
34 import shutil
35 import subprocess
35 import subprocess32
36 import sys
36 import sys
37 import time
37 import time
38
38
39 import api
39 import api
40
40
41
41
42 def mean(container):
42 def mean(container):
43 """Return the mean of the container."""
43 """Return the mean of the container."""
44 if not container:
44 if not container:
45 return -1.0
45 return -1.0
46 return sum(container) / len(container)
46 return sum(container) / len(container)
47
47
48
48
49 def keep_cwd(f):
49 def keep_cwd(f):
50 """Decorator that keeps track of the starting working directory."""
50 """Decorator that keeps track of the starting working directory."""
51 @functools.wraps(f)
51 @functools.wraps(f)
52 def wrapped_f(*args, **kwargs):
52 def wrapped_f(*args, **kwargs):
53 cur_dir = os.getcwd()
53 cur_dir = os.getcwd()
54 try:
54 try:
55 return f(*args, **kwargs)
55 return f(*args, **kwargs)
56 finally:
56 finally:
57 os.chdir(cur_dir)
57 os.chdir(cur_dir)
58
58
59 return wrapped_f
59 return wrapped_f
60
60
61
61
62 def timed(f):
62 def timed(f):
63 """Decorator that returns the time it took to execute the function."""
63 """Decorator that returns the time it took to execute the function."""
64 @functools.wraps(f)
64 @functools.wraps(f)
65 def wrapped_f(*args, **kwargs):
65 def wrapped_f(*args, **kwargs):
66 start_time = time.time()
66 start_time = time.time()
67 try:
67 try:
68 f(*args, **kwargs)
68 f(*args, **kwargs)
69 finally:
69 finally:
70 return time.time() - start_time
70 return time.time() - start_time
71
71
72 return wrapped_f
72 return wrapped_f
73
73
74
74
75 def execute(*popenargs, **kwargs):
75 def execute(*popenargs, **kwargs):
76 """Extension of subprocess.check_output to support writing to stdin."""
76 """Extension of subprocess.check_output to support writing to stdin."""
77 input = kwargs.pop('stdin', None)
77 input = kwargs.pop('stdin', None)
78 stdin = None
78 stdin = None
79 if input:
79 if input:
80 stdin = subprocess.PIPE
80 stdin = subprocess32.PIPE
81 #if 'stderr' not in kwargs:
81 #if 'stderr' not in kwargs:
82 # kwargs['stderr'] = subprocess.PIPE
82 # kwargs['stderr'] = subprocess32.PIPE
83 if 'stdout' in kwargs:
83 if 'stdout' in kwargs:
84 raise ValueError('stdout argument not allowed, it will be overridden.')
84 raise ValueError('stdout argument not allowed, it will be overridden.')
85 process = subprocess.Popen(stdin=stdin, stdout=subprocess.PIPE,
85 process = subprocess32.Popen(stdin=stdin, stdout=subprocess32.PIPE,
86 *popenargs, **kwargs)
86 *popenargs, **kwargs)
87 output, error = process.communicate(input=input)
87 output, error = process.communicate(input=input)
88 retcode = process.poll()
88 retcode = process.poll()
89 if retcode:
89 if retcode:
90 cmd = kwargs.get("args")
90 cmd = kwargs.get("args")
91 if cmd is None:
91 if cmd is None:
92 cmd = popenargs[0]
92 cmd = popenargs[0]
93 print cmd, output, error
93 print cmd, output, error
94 raise subprocess.CalledProcessError(retcode, cmd, output=output)
94 raise subprocess32.CalledProcessError(retcode, cmd, output=output)
95 return output
95 return output
96
96
97
97
98 def get_repo_name(repo_url):
98 def get_repo_name(repo_url):
99 """Extract the repo name from its url."""
99 """Extract the repo name from its url."""
100 repo_url = repo_url.rstrip('/')
100 repo_url = repo_url.rstrip('/')
101 return repo_url.split('/')[-1].split('.')[0]
101 return repo_url.split('/')[-1].split('.')[0]
102
102
103
103
104 class TestPerformanceBase(object):
104 class TestPerformanceBase(object):
105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
106 skip_commits):
106 skip_commits):
107 self.repo_url = repo_url
107 self.repo_url = repo_url
108 self.repo_name = get_repo_name(self.repo_url)
108 self.repo_name = get_repo_name(self.repo_url)
109 self.upstream_repo_name = '%s_upstream' % self.repo_name
109 self.upstream_repo_name = '%s_upstream' % self.repo_name
110 self.base_dir = os.path.abspath(base_dir)
110 self.base_dir = os.path.abspath(base_dir)
111 self.n_commits = n_commits
111 self.n_commits = n_commits
112 self.max_commits = max_commits
112 self.max_commits = max_commits
113 self.skip_commits = skip_commits
113 self.skip_commits = skip_commits
114 self.push_times = []
114 self.push_times = []
115 self.pull_times = []
115 self.pull_times = []
116 self.empty_pull_times = []
116 self.empty_pull_times = []
117 self.clone_time = -1.0
117 self.clone_time = -1.0
118 self.last_commit = None
118 self.last_commit = None
119
119
120 self.cloned_repo = ''
120 self.cloned_repo = ''
121 self.pull_repo = ''
121 self.pull_repo = ''
122 self.orig_repo = ''
122 self.orig_repo = ''
123
123
124 def run(self):
124 def run(self):
125 try:
125 try:
126 self.test()
126 self.test()
127 except Exception as error:
127 except Exception as error:
128 print error
128 print error
129 finally:
129 finally:
130 self.cleanup()
130 self.cleanup()
131
131
132 print 'Clone time :', self.clone_time
132 print 'Clone time :', self.clone_time
133 print 'Push time :', mean(self.push_times)
133 print 'Push time :', mean(self.push_times)
134 print 'Pull time :', mean(self.pull_times)
134 print 'Pull time :', mean(self.pull_times)
135 print 'Empty pull time:', mean(self.empty_pull_times)
135 print 'Empty pull time:', mean(self.empty_pull_times)
136
136
137 return {
137 return {
138 'clone': self.clone_time,
138 'clone': self.clone_time,
139 'push': mean(self.push_times),
139 'push': mean(self.push_times),
140 'pull': mean(self.pull_times),
140 'pull': mean(self.pull_times),
141 'empty_pull': mean(self.empty_pull_times),
141 'empty_pull': mean(self.empty_pull_times),
142 }
142 }
143
143
144 @keep_cwd
144 @keep_cwd
145 def test(self):
145 def test(self):
146 os.chdir(self.base_dir)
146 os.chdir(self.base_dir)
147
147
148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
149 if not os.path.exists(self.orig_repo):
149 if not os.path.exists(self.orig_repo):
150 self.clone_repo(self.repo_url, default_only=True)
150 self.clone_repo(self.repo_url, default_only=True)
151
151
152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
153
153
154 self.add_remote(self.orig_repo, upstream_url)
154 self.add_remote(self.orig_repo, upstream_url)
155
155
156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
157 self.clone_repo(upstream_url, self.pull_repo)
157 self.clone_repo(upstream_url, self.pull_repo)
158
158
159 commits = self.get_commits(self.orig_repo)
159 commits = self.get_commits(self.orig_repo)
160 self.last_commit = commits[-1]
160 self.last_commit = commits[-1]
161 if self.skip_commits:
161 if self.skip_commits:
162 self.push(
162 self.push(
163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
164 commits = commits[self.skip_commits:self.max_commits]
164 commits = commits[self.skip_commits:self.max_commits]
165
165
166 print 'Working with %d commits' % len(commits)
166 print 'Working with %d commits' % len(commits)
167 for i in xrange(self.n_commits - 1, len(commits), self.n_commits):
167 for i in xrange(self.n_commits - 1, len(commits), self.n_commits):
168 commit = commits[i]
168 commit = commits[i]
169 print 'Processing commit %s (%d)' % (commit, i + 1)
169 print 'Processing commit %s (%d)' % (commit, i + 1)
170 self.push_times.append(
170 self.push_times.append(
171 self.push(self.orig_repo, commit, 'upstream'))
171 self.push(self.orig_repo, commit, 'upstream'))
172 self.check_remote_last_commit_is(commit, upstream_url)
172 self.check_remote_last_commit_is(commit, upstream_url)
173
173
174 self.pull_times.append(self.pull(self.pull_repo))
174 self.pull_times.append(self.pull(self.pull_repo))
175 self.check_local_last_commit_is(commit, self.pull_repo)
175 self.check_local_last_commit_is(commit, self.pull_repo)
176
176
177 self.empty_pull_times.append(self.pull(self.pull_repo))
177 self.empty_pull_times.append(self.pull(self.pull_repo))
178
178
179 self.cloned_repo = os.path.join(self.base_dir,
179 self.cloned_repo = os.path.join(self.base_dir,
180 '%s_clone' % self.repo_name)
180 '%s_clone' % self.repo_name)
181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
182
182
183 def cleanup(self):
183 def cleanup(self):
184 try:
184 try:
185 self.delete_repo(self.upstream_repo_name)
185 self.delete_repo(self.upstream_repo_name)
186 except api.ApiError:
186 except api.ApiError:
187 # Continue in case we could not delete the repo. Maybe we did not
187 # Continue in case we could not delete the repo. Maybe we did not
188 # create it in the first place.
188 # create it in the first place.
189 pass
189 pass
190
190
191 shutil.rmtree(self.pull_repo, ignore_errors=True)
191 shutil.rmtree(self.pull_repo, ignore_errors=True)
192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
193
193
194 if os.path.exists(self.orig_repo):
194 if os.path.exists(self.orig_repo):
195 self.remove_remote(self.orig_repo)
195 self.remove_remote(self.orig_repo)
196
196
197
197
198 class RhodeCodeMixin(object):
198 class RhodeCodeMixin(object):
199 """Mixin providing the methods to create and delete repos in RhodeCode."""
199 """Mixin providing the methods to create and delete repos in RhodeCode."""
200 def __init__(self, api_key):
200 def __init__(self, api_key):
201 self.api = api.RCApi(api_key=api_key)
201 self.api = api.RCApi(api_key=api_key)
202
202
203 def create_repo(self, repo_name, repo_type):
203 def create_repo(self, repo_name, repo_type):
204 return self.api.create_repo(repo_name, repo_type,
204 return self.api.create_repo(repo_name, repo_type,
205 'Repo for perfomance testing')
205 'Repo for perfomance testing')
206
206
207 def delete_repo(self, repo_name):
207 def delete_repo(self, repo_name):
208 return self.api.delete_repo(repo_name)
208 return self.api.delete_repo(repo_name)
209
209
210
210
211 class GitMixin(object):
211 class GitMixin(object):
212 """Mixin providing the git operations."""
212 """Mixin providing the git operations."""
213 @timed
213 @timed
214 def clone_repo(self, repo_url, destination=None, default_only=False):
214 def clone_repo(self, repo_url, destination=None, default_only=False):
215 args = ['git', 'clone']
215 args = ['git', 'clone']
216 if default_only:
216 if default_only:
217 args.extend(['--branch', 'master', '--single-branch'])
217 args.extend(['--branch', 'master', '--single-branch'])
218 args.append(repo_url)
218 args.append(repo_url)
219 if destination:
219 if destination:
220 args.append(destination)
220 args.append(destination)
221 execute(args)
221 execute(args)
222
222
223 @keep_cwd
223 @keep_cwd
224 def add_remote(self, repo, remote_url, remote_name='upstream'):
224 def add_remote(self, repo, remote_url, remote_name='upstream'):
225 self.remove_remote(repo, remote_name)
225 self.remove_remote(repo, remote_name)
226 os.chdir(repo)
226 os.chdir(repo)
227 execute(['git', 'remote', 'add', remote_name, remote_url])
227 execute(['git', 'remote', 'add', remote_name, remote_url])
228
228
229 @keep_cwd
229 @keep_cwd
230 def remove_remote(self, repo, remote_name='upstream'):
230 def remove_remote(self, repo, remote_name='upstream'):
231 os.chdir(repo)
231 os.chdir(repo)
232 remotes = execute(['git', 'remote']).split('\n')
232 remotes = execute(['git', 'remote']).split('\n')
233 if remote_name in remotes:
233 if remote_name in remotes:
234 execute(['git', 'remote', 'remove', remote_name])
234 execute(['git', 'remote', 'remove', remote_name])
235
235
236 @keep_cwd
236 @keep_cwd
237 def get_commits(self, repo, branch='master'):
237 def get_commits(self, repo, branch='master'):
238 os.chdir(repo)
238 os.chdir(repo)
239 commits_list = execute(
239 commits_list = execute(
240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
241 return commits_list.strip().split('\n')[::-1]
241 return commits_list.strip().split('\n')[::-1]
242
242
243 @timed
243 @timed
244 def push(self, repo, commit, remote_name=None):
244 def push(self, repo, commit, remote_name=None):
245 os.chdir(repo)
245 os.chdir(repo)
246 try:
246 try:
247 execute(['git', 'reset', '--soft', commit])
247 execute(['git', 'reset', '--soft', commit])
248 args = ['git', 'push']
248 args = ['git', 'push']
249 if remote_name:
249 if remote_name:
250 args.append(remote_name)
250 args.append(remote_name)
251 execute(args)
251 execute(args)
252 finally:
252 finally:
253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
254
254
255 @timed
255 @timed
256 def pull(self, repo):
256 def pull(self, repo):
257 os.chdir(repo)
257 os.chdir(repo)
258 execute(['git', 'pull'])
258 execute(['git', 'pull'])
259
259
260 def _remote_last_commit(self, repo_url):
260 def _remote_last_commit(self, repo_url):
261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
262 return output.split()[0]
262 return output.split()[0]
263
263
264 def check_remote_last_commit_is(self, commit, repo_url):
264 def check_remote_last_commit_is(self, commit, repo_url):
265 last_remote_commit = self._remote_last_commit(repo_url)
265 last_remote_commit = self._remote_last_commit(repo_url)
266 if last_remote_commit != commit:
266 if last_remote_commit != commit:
267 raise Exception('Push did not work, expected commit %s but got %s' %
267 raise Exception('Push did not work, expected commit %s but got %s' %
268 (commit, last_remote_commit))
268 (commit, last_remote_commit))
269
269
270 @keep_cwd
270 @keep_cwd
271 def _local_last_commit(self, repo):
271 def _local_last_commit(self, repo):
272 os.chdir(repo)
272 os.chdir(repo)
273 return execute(['git', 'rev-parse', 'HEAD']).strip()
273 return execute(['git', 'rev-parse', 'HEAD']).strip()
274
274
275 def check_local_last_commit_is(self, commit, repo):
275 def check_local_last_commit_is(self, commit, repo):
276 last_local_commit = self._local_last_commit(repo)
276 last_local_commit = self._local_last_commit(repo)
277 if last_local_commit != commit:
277 if last_local_commit != commit:
278 raise Exception('Pull did not work, expected commit %s but got %s' %
278 raise Exception('Pull did not work, expected commit %s but got %s' %
279 (commit, last_local_commit))
279 (commit, last_local_commit))
280
280
281
281
282 class HgMixin(object):
282 class HgMixin(object):
283 """Mixin providing the mercurial operations."""
283 """Mixin providing the mercurial operations."""
284 @timed
284 @timed
285 def clone_repo(self, repo_url, destination=None, default_only=False):
285 def clone_repo(self, repo_url, destination=None, default_only=False):
286 args = ['hg', 'clone']
286 args = ['hg', 'clone']
287 if default_only:
287 if default_only:
288 args.extend(['--branch', 'default'])
288 args.extend(['--branch', 'default'])
289 args.append(repo_url)
289 args.append(repo_url)
290 if destination:
290 if destination:
291 args.append(destination)
291 args.append(destination)
292 execute(args)
292 execute(args)
293
293
294 @keep_cwd
294 @keep_cwd
295 def add_remote(self, repo, remote_url, remote_name='upstream'):
295 def add_remote(self, repo, remote_url, remote_name='upstream'):
296 self.remove_remote(repo, remote_name)
296 self.remove_remote(repo, remote_name)
297 os.chdir(repo)
297 os.chdir(repo)
298 hgrc = ConfigParser.RawConfigParser()
298 hgrc = ConfigParser.RawConfigParser()
299 hgrc.read('.hg/hgrc')
299 hgrc.read('.hg/hgrc')
300 hgrc.set('paths', remote_name, remote_url)
300 hgrc.set('paths', remote_name, remote_url)
301 with open('.hg/hgrc', 'w') as f:
301 with open('.hg/hgrc', 'w') as f:
302 hgrc.write(f)
302 hgrc.write(f)
303
303
304 @keep_cwd
304 @keep_cwd
305 def remove_remote(self, repo, remote_name='upstream'):
305 def remove_remote(self, repo, remote_name='upstream'):
306 os.chdir(repo)
306 os.chdir(repo)
307 hgrc = ConfigParser.RawConfigParser()
307 hgrc = ConfigParser.RawConfigParser()
308 hgrc.read('.hg/hgrc')
308 hgrc.read('.hg/hgrc')
309 hgrc.remove_option('paths', remote_name)
309 hgrc.remove_option('paths', remote_name)
310 with open('.hg/hgrc', 'w') as f:
310 with open('.hg/hgrc', 'w') as f:
311 hgrc.write(f)
311 hgrc.write(f)
312
312
313 @keep_cwd
313 @keep_cwd
314 def get_commits(self, repo, branch='default'):
314 def get_commits(self, repo, branch='default'):
315 os.chdir(repo)
315 os.chdir(repo)
316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
318 '{node}\n', '--follow-first'])
318 '{node}\n', '--follow-first'])
319 return commits_list.strip().split('\n')[::-1]
319 return commits_list.strip().split('\n')[::-1]
320
320
321 @timed
321 @timed
322 def push(self, repo, commit, remote_name=None):
322 def push(self, repo, commit, remote_name=None):
323 os.chdir(repo)
323 os.chdir(repo)
324 args = ['hg', 'push', '--rev', commit, '--new-branch']
324 args = ['hg', 'push', '--rev', commit, '--new-branch']
325 if remote_name:
325 if remote_name:
326 args.append(remote_name)
326 args.append(remote_name)
327 execute(args)
327 execute(args)
328
328
329 @timed
329 @timed
330 def pull(self, repo):
330 def pull(self, repo):
331 os.chdir(repo)
331 os.chdir(repo)
332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
333
333
334 def _remote_last_commit(self, repo_url):
334 def _remote_last_commit(self, repo_url):
335 return execute(['hg', 'identify', repo_url])[:12]
335 return execute(['hg', 'identify', repo_url])[:12]
336
336
337 def check_remote_last_commit_is(self, commit, repo_url):
337 def check_remote_last_commit_is(self, commit, repo_url):
338 last_remote_commit = self._remote_last_commit(repo_url)
338 last_remote_commit = self._remote_last_commit(repo_url)
339 if not commit.startswith(last_remote_commit):
339 if not commit.startswith(last_remote_commit):
340 raise Exception('Push did not work, expected commit %s but got %s' %
340 raise Exception('Push did not work, expected commit %s but got %s' %
341 (commit, last_remote_commit))
341 (commit, last_remote_commit))
342
342
343 @keep_cwd
343 @keep_cwd
344 def _local_last_commit(self, repo):
344 def _local_last_commit(self, repo):
345 os.chdir(repo)
345 os.chdir(repo)
346 return execute(['hg', 'identify'])[:12]
346 return execute(['hg', 'identify'])[:12]
347
347
348 def check_local_last_commit_is(self, commit, repo):
348 def check_local_last_commit_is(self, commit, repo):
349 last_local_commit = self._local_last_commit(repo)
349 last_local_commit = self._local_last_commit(repo)
350 if not commit.startswith(last_local_commit):
350 if not commit.startswith(last_local_commit):
351 raise Exception('Pull did not work, expected commit %s but got %s' %
351 raise Exception('Pull did not work, expected commit %s but got %s' %
352 (commit, last_local_commit))
352 (commit, last_local_commit))
353
353
354
354
355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
357 api_key):
357 api_key):
358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
359 max_commits, skip_commits)
359 max_commits, skip_commits)
360 RhodeCodeMixin.__init__(self, api_key)
360 RhodeCodeMixin.__init__(self, api_key)
361 self.repo_type = 'git'
361 self.repo_type = 'git'
362
362
363
363
364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
366 api_key):
366 api_key):
367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
368 max_commits, skip_commits)
368 max_commits, skip_commits)
369 RhodeCodeMixin.__init__(self, api_key)
369 RhodeCodeMixin.__init__(self, api_key)
370 self.repo_type = 'hg'
370 self.repo_type = 'hg'
371
371
372
372
373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
374 api_key):
374 api_key):
375 max_commits = min(10 * step,
375 max_commits = min(10 * step,
376 int((max_commits - skip_commits) / step) * step)
376 int((max_commits - skip_commits) / step) * step)
377 max_commits += skip_commits
377 max_commits += skip_commits
378 if repo_type == 'git':
378 if repo_type == 'git':
379 return GitTestPerformance(
379 return GitTestPerformance(
380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
381 elif repo_type == 'hg':
381 elif repo_type == 'hg':
382 return HgTestPerformance(
382 return HgTestPerformance(
383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
384
384
385
385
386 def main(argv):
386 def main(argv):
387 parser = argparse.ArgumentParser(
387 parser = argparse.ArgumentParser(
388 description='Performance tests for push/pull/clone for git and ' +
388 description='Performance tests for push/pull/clone for git and ' +
389 'mercurial repos.')
389 'mercurial repos.')
390 parser.add_argument(
390 parser.add_argument(
391 '--tests', dest='tests', action='store', required=False, default='all',
391 '--tests', dest='tests', action='store', required=False, default='all',
392 help='The tests to run. Default: all. But could be any comma ' +
392 help='The tests to run. Default: all. But could be any comma ' +
393 'separated list with python, hg, kernel or git')
393 'separated list with python, hg, kernel or git')
394 parser.add_argument(
394 parser.add_argument(
395 '--sizes', dest='sizes', action='store', required=False,
395 '--sizes', dest='sizes', action='store', required=False,
396 default='1,10,100,1000,2500',
396 default='1,10,100,1000,2500',
397 help='The sizes to use. Default: 1,10,100,1000,2500')
397 help='The sizes to use. Default: 1,10,100,1000,2500')
398 parser.add_argument(
398 parser.add_argument(
399 '--dir', dest='dir', action='store', required=True,
399 '--dir', dest='dir', action='store', required=True,
400 help='The dir where to store the repos')
400 help='The dir where to store the repos')
401 parser.add_argument(
401 parser.add_argument(
402 '--api-key', dest='api_key', action='store', required=True,
402 '--api-key', dest='api_key', action='store', required=True,
403 help='The api key of RhodeCode')
403 help='The api key of RhodeCode')
404 options = parser.parse_args(argv[1:])
404 options = parser.parse_args(argv[1:])
405 print options
405 print options
406
406
407 test_config = {
407 test_config = {
408 'python': {
408 'python': {
409 'url': 'https://hg.python.org/cpython/',
409 'url': 'https://hg.python.org/cpython/',
410 'limit': 23322,
410 'limit': 23322,
411 'type': 'hg',
411 'type': 'hg',
412 # Do not time the first commit, as it is HUGE!
412 # Do not time the first commit, as it is HUGE!
413 'skip': 1,
413 'skip': 1,
414 },
414 },
415 'hg': {
415 'hg': {
416 'url': 'http://selenic.com/hg',
416 'url': 'http://selenic.com/hg',
417 'limit': 14396,
417 'limit': 14396,
418 'type': 'hg',
418 'type': 'hg',
419 },
419 },
420 'kernel': {
420 'kernel': {
421 'url': 'https://github.com/torvalds/linux.git',
421 'url': 'https://github.com/torvalds/linux.git',
422 'limit': 46271,
422 'limit': 46271,
423 'type': 'git',
423 'type': 'git',
424 },
424 },
425 'git': {
425 'git': {
426 'url': 'https://github.com/git/git.git',
426 'url': 'https://github.com/git/git.git',
427 'limit': 13525,
427 'limit': 13525,
428 'type': 'git',
428 'type': 'git',
429 }
429 }
430
430
431 }
431 }
432
432
433 test_names = options.tests.split(',')
433 test_names = options.tests.split(',')
434 if test_names == ['all']:
434 if test_names == ['all']:
435 test_names = test_config.keys()
435 test_names = test_config.keys()
436 if not set(test_names) <= set(test_config.keys()):
436 if not set(test_names) <= set(test_config.keys()):
437 print ('Invalid tests: only %s are valid but specified %s' %
437 print ('Invalid tests: only %s are valid but specified %s' %
438 (test_config.keys(), test_names))
438 (test_config.keys(), test_names))
439 return 1
439 return 1
440
440
441 sizes = options.sizes.split(',')
441 sizes = options.sizes.split(',')
442 sizes = map(int, sizes)
442 sizes = map(int, sizes)
443
443
444 base_dir = options.dir
444 base_dir = options.dir
445 api_key = options.api_key
445 api_key = options.api_key
446 results = collections.defaultdict(dict)
446 results = collections.defaultdict(dict)
447 for test_name, size in itertools.product(test_names, sizes):
447 for test_name, size in itertools.product(test_names, sizes):
448 test = get_test(base_dir,
448 test = get_test(base_dir,
449 test_config[test_name]['url'],
449 test_config[test_name]['url'],
450 test_config[test_name]['type'],
450 test_config[test_name]['type'],
451 size,
451 size,
452 test_config[test_name]['limit'],
452 test_config[test_name]['limit'],
453 test_config[test_name].get('skip', 0),
453 test_config[test_name].get('skip', 0),
454 api_key)
454 api_key)
455 print '*' * 80
455 print '*' * 80
456 print 'Running performance test: %s with size %d' % (test_name, size)
456 print 'Running performance test: %s with size %d' % (test_name, size)
457 print '*' * 80
457 print '*' * 80
458 results[test_name][size] = test.run()
458 results[test_name][size] = test.run()
459 pprint.pprint(dict(results))
459 pprint.pprint(dict(results))
460
460
461
461
462 if __name__ == '__main__':
462 if __name__ == '__main__':
463 sys.exit(main(sys.argv))
463 sys.exit(main(sys.argv))
@@ -1,155 +1,155 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 This is a standalone script which will start VCS and RC.
22 This is a standalone script which will start VCS and RC.
23
23
24 Performance numbers will be written on each interval to:
24 Performance numbers will be written on each interval to:
25 vcs_profileX.csv
25 vcs_profileX.csv
26 rc_profileX.csv
26 rc_profileX.csv
27
27
28 To stop the script by press Ctrl-C
28 To stop the script by press Ctrl-C
29 """
29 """
30
30
31 import datetime
31 import datetime
32 import os
32 import os
33 import psutil
33 import psutil
34 import subprocess
34 import subprocess32
35 import sys
35 import sys
36 import time
36 import time
37 import traceback
37 import traceback
38 import urllib
38 import urllib
39
39
40 PROFILING_INTERVAL = 5
40 PROFILING_INTERVAL = 5
41 RC_WEBSITE = "http://localhost:5001/"
41 RC_WEBSITE = "http://localhost:5001/"
42
42
43
43
44 def get_file(prefix):
44 def get_file(prefix):
45 out_file = None
45 out_file = None
46 for i in xrange(100):
46 for i in xrange(100):
47 file_path = "%s_profile%.3d.csv" % (prefix, i)
47 file_path = "%s_profile%.3d.csv" % (prefix, i)
48 if os.path.exists(file_path):
48 if os.path.exists(file_path):
49 continue
49 continue
50 out_file = open(file_path, "w")
50 out_file = open(file_path, "w")
51 out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n")
51 out_file.write("Time; CPU %; Memory (MB); Total FDs; Dulwich FDs; Threads\n")
52 break
52 break
53 return out_file
53 return out_file
54
54
55
55
56 def dump_system():
56 def dump_system():
57 print "System Overview..."
57 print "System Overview..."
58 print "\nCPU Count: %d (%d real)" % \
58 print "\nCPU Count: %d (%d real)" % \
59 (psutil.cpu_count(), psutil.cpu_count(logical=False))
59 (psutil.cpu_count(), psutil.cpu_count(logical=False))
60 print "\nDisk:"
60 print "\nDisk:"
61 print psutil.disk_usage(os.sep)
61 print psutil.disk_usage(os.sep)
62 print "\nMemory:"
62 print "\nMemory:"
63 print psutil.virtual_memory()
63 print psutil.virtual_memory()
64 print "\nMemory (swap):"
64 print "\nMemory (swap):"
65 print psutil.swap_memory()
65 print psutil.swap_memory()
66
66
67
67
68 def count_dulwich_fds(proc):
68 def count_dulwich_fds(proc):
69 p = subprocess.Popen(["lsof", "-p", proc.pid], stdout=subprocess.PIPE)
69 p = subprocess32.Popen(["lsof", "-p", proc.pid], stdout=subprocess32.PIPE)
70 out, err = p.communicate()
70 out, err = p.communicate()
71
71
72 count = 0
72 count = 0
73 for line in out.splitlines():
73 for line in out.splitlines():
74 content = line.split()
74 content = line.split()
75 # http://git-scm.com/book/en/Git-Internals-Packfiles
75 # http://git-scm.com/book/en/Git-Internals-Packfiles
76 if content[-1].endswith(".idx"):
76 if content[-1].endswith(".idx"):
77 count += 1
77 count += 1
78
78
79 return count
79 return count
80
80
81 def dump_process(pid, out_file):
81 def dump_process(pid, out_file):
82 now = datetime.datetime.now()
82 now = datetime.datetime.now()
83 cpu = pid.cpu_percent()
83 cpu = pid.cpu_percent()
84 mem = pid.memory_info()
84 mem = pid.memory_info()
85 fds = pid.num_fds()
85 fds = pid.num_fds()
86 dulwich_fds = count_dulwich_fds(pid)
86 dulwich_fds = count_dulwich_fds(pid)
87 threads = pid.num_threads()
87 threads = pid.num_threads()
88
88
89 content = [now.strftime('%m/%d/%y %H:%M:%S'),
89 content = [now.strftime('%m/%d/%y %H:%M:%S'),
90 cpu,
90 cpu,
91 "%.2f" % (mem[0]/1024.0/1024.0),
91 "%.2f" % (mem[0]/1024.0/1024.0),
92 fds, dulwich_fds, threads]
92 fds, dulwich_fds, threads]
93 out_file.write("; ".join([str(item) for item in content]))
93 out_file.write("; ".join([str(item) for item in content]))
94 out_file.write("\n")
94 out_file.write("\n")
95
95
96
96
97 # Open output files
97 # Open output files
98 vcs_out = get_file("vcs")
98 vcs_out = get_file("vcs")
99 if vcs_out is None:
99 if vcs_out is None:
100 print "Unable to enumerate output file for VCS"
100 print "Unable to enumerate output file for VCS"
101 sys.exit(1)
101 sys.exit(1)
102 rc_out = get_file("rc")
102 rc_out = get_file("rc")
103 if rc_out is None:
103 if rc_out is None:
104 print "Unable to enumerate output file for RC"
104 print "Unable to enumerate output file for RC"
105 sys.exit(1)
105 sys.exit(1)
106
106
107 # Show system information
107 # Show system information
108 dump_system()
108 dump_system()
109
109
110 print "\nStarting VCS..."
110 print "\nStarting VCS..."
111 vcs = psutil.Popen(["vcsserver"])
111 vcs = psutil.Popen(["vcsserver"])
112 time.sleep(1)
112 time.sleep(1)
113 if not vcs.is_running():
113 if not vcs.is_running():
114 print "VCS - Failed to start"
114 print "VCS - Failed to start"
115 sys.exit(1)
115 sys.exit(1)
116 print "VCS - Ok"
116 print "VCS - Ok"
117
117
118 print "\nStarting RhodeCode..."
118 print "\nStarting RhodeCode..."
119 rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini",
119 rc = psutil.Popen("RC_VCSSERVER_TEST_DISABLE=1 paster serve test.ini",
120 shell=True, stdin=subprocess.PIPE)
120 shell=True, stdin=subprocess32.PIPE)
121 time.sleep(1)
121 time.sleep(1)
122 if not rc.is_running():
122 if not rc.is_running():
123 print "RC - Failed to start"
123 print "RC - Failed to start"
124 vcs.terminate()
124 vcs.terminate()
125 sys.exit(1)
125 sys.exit(1)
126
126
127 # Send command to create the databases
127 # Send command to create the databases
128 rc.stdin.write("y\n")
128 rc.stdin.write("y\n")
129
129
130 # Verify that the website is up
130 # Verify that the website is up
131 time.sleep(4)
131 time.sleep(4)
132 try:
132 try:
133 urllib.urlopen(RC_WEBSITE)
133 urllib.urlopen(RC_WEBSITE)
134 except IOError:
134 except IOError:
135 print "RC - Website not started"
135 print "RC - Website not started"
136 vcs.terminate()
136 vcs.terminate()
137 sys.exit(1)
137 sys.exit(1)
138 print "RC - Ok"
138 print "RC - Ok"
139
139
140 print "\nProfiling...\n%s\n" % ("-"*80)
140 print "\nProfiling...\n%s\n" % ("-"*80)
141 while True:
141 while True:
142 try:
142 try:
143 dump_process(vcs, vcs_out)
143 dump_process(vcs, vcs_out)
144 dump_process(rc, rc_out)
144 dump_process(rc, rc_out)
145 time.sleep(PROFILING_INTERVAL)
145 time.sleep(PROFILING_INTERVAL)
146 except Exception:
146 except Exception:
147 print traceback.format_exc()
147 print traceback.format_exc()
148 break
148 break
149
149
150 # Finalize the profiling
150 # Finalize the profiling
151 vcs_out.close()
151 vcs_out.close()
152 rc_out.close()
152 rc_out.close()
153
153
154 vcs.terminate()
154 vcs.terminate()
155 rc.terminate()
155 rc.terminate()
@@ -1,305 +1,305 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 VCS Performance measurement tool
22 VCS Performance measurement tool
23
23
24 Usage:
24 Usage:
25
25
26 - Check that required vcs keys can be found in ~/.hgrc and ~/.netrc
26 - Check that required vcs keys can be found in ~/.hgrc and ~/.netrc
27
27
28 - Start a local instance of RhodeCode Enterprise
28 - Start a local instance of RhodeCode Enterprise
29
29
30 - Launch the script:
30 - Launch the script:
31
31
32 TMPDIR=/tmp python vcs_performance.py \
32 TMPDIR=/tmp python vcs_performance.py \
33 --host=http://vm:5000 \
33 --host=http://vm:5000 \
34 --api-key=55c4a33688577da24183dcac5fde4dddfdbf18dc \
34 --api-key=55c4a33688577da24183dcac5fde4dddfdbf18dc \
35 --commits=10 --repositories=100 --log-level=info
35 --commits=10 --repositories=100 --log-level=info
36 """
36 """
37
37
38 import argparse
38 import argparse
39 import functools
39 import functools
40 import logging
40 import logging
41 import os
41 import os
42 import shutil
42 import shutil
43 import subprocess
43 import subprocess32
44 import tempfile
44 import tempfile
45 import time
45 import time
46 from itertools import chain
46 from itertools import chain
47
47
48 from api import RCApi, ApiError
48 from api import RCApi, ApiError
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 def timed(f):
54 def timed(f):
55 """Decorator that returns the time it took to execute the function."""
55 """Decorator that returns the time it took to execute the function."""
56 @functools.wraps(f)
56 @functools.wraps(f)
57 def wrapped_f(*args, **kwargs):
57 def wrapped_f(*args, **kwargs):
58 start_time = time.time()
58 start_time = time.time()
59 try:
59 try:
60 f(*args, **kwargs)
60 f(*args, **kwargs)
61 finally:
61 finally:
62 return time.time() - start_time
62 return time.time() - start_time
63
63
64 return wrapped_f
64 return wrapped_f
65
65
66
66
67 def mean(container):
67 def mean(container):
68 """Return the mean of the container."""
68 """Return the mean of the container."""
69 if not container:
69 if not container:
70 return -1.0
70 return -1.0
71 return sum(container) / len(container)
71 return sum(container) / len(container)
72
72
73
73
74 class Config(object):
74 class Config(object):
75 args = None
75 args = None
76
76
77 def __init__(self):
77 def __init__(self):
78 parser = argparse.ArgumentParser(description='Runs VCS load tests')
78 parser = argparse.ArgumentParser(description='Runs VCS load tests')
79 parser.add_argument(
79 parser.add_argument(
80 '--host', dest='host', action='store', required=True,
80 '--host', dest='host', action='store', required=True,
81 help='RhodeCode Enterprise host')
81 help='RhodeCode Enterprise host')
82 parser.add_argument(
82 parser.add_argument(
83 '--api-key', dest='api_key', action='store', required=True,
83 '--api-key', dest='api_key', action='store', required=True,
84 help='API Key')
84 help='API Key')
85 parser.add_argument(
85 parser.add_argument(
86 '--file-size', dest='file_size', action='store', required=False,
86 '--file-size', dest='file_size', action='store', required=False,
87 default=1, type=int, help='File size in MB')
87 default=1, type=int, help='File size in MB')
88 parser.add_argument(
88 parser.add_argument(
89 '--repositories', dest='repositories', action='store',
89 '--repositories', dest='repositories', action='store',
90 required=False, default=1, type=int,
90 required=False, default=1, type=int,
91 help='Number of repositories')
91 help='Number of repositories')
92 parser.add_argument(
92 parser.add_argument(
93 '--commits', dest='commits', action='store', required=False,
93 '--commits', dest='commits', action='store', required=False,
94 default=1, type=int, help='Number of commits')
94 default=1, type=int, help='Number of commits')
95 parser.add_argument(
95 parser.add_argument(
96 '--log-level', dest='log_level', action='store', required=False,
96 '--log-level', dest='log_level', action='store', required=False,
97 default='error', help='Logging level')
97 default='error', help='Logging level')
98 self.args = parser.parse_args()
98 self.args = parser.parse_args()
99
99
100 def __getattr__(self, attr):
100 def __getattr__(self, attr):
101 return getattr(self.args, attr)
101 return getattr(self.args, attr)
102
102
103
103
104 class Repository(object):
104 class Repository(object):
105 FILE_NAME_TEMPLATE = "test_{:09d}.bin"
105 FILE_NAME_TEMPLATE = "test_{:09d}.bin"
106
106
107 def __init__(self, name, base_path, api):
107 def __init__(self, name, base_path, api):
108 self.name = name
108 self.name = name
109 self.path = os.path.join(base_path, name)
109 self.path = os.path.join(base_path, name)
110 self.api = api
110 self.api = api
111
111
112 def create(self):
112 def create(self):
113 self._create_filesystem_repo(self.path)
113 self._create_filesystem_repo(self.path)
114 try:
114 try:
115 self.url = self.api.create_repo(
115 self.url = self.api.create_repo(
116 self.name, self.TYPE, 'Performance tests')
116 self.name, self.TYPE, 'Performance tests')
117 except ApiError as e:
117 except ApiError as e:
118 log.error('api: {}'.format(e))
118 log.error('api: {}'.format(e))
119
119
120 def delete(self):
120 def delete(self):
121 self._delete_filesystem_repo()
121 self._delete_filesystem_repo()
122 try:
122 try:
123 self.api.delete_repo(self.name)
123 self.api.delete_repo(self.name)
124 except ApiError as e:
124 except ApiError as e:
125 log.error('api: {}'.format(e))
125 log.error('api: {}'.format(e))
126
126
127 def create_commits(self, number, file_size):
127 def create_commits(self, number, file_size):
128 for i in xrange(number):
128 for i in xrange(number):
129 file_name = self.FILE_NAME_TEMPLATE.format(i)
129 file_name = self.FILE_NAME_TEMPLATE.format(i)
130 log.debug("Create commit {}".format(file_name))
130 log.debug("Create commit {}".format(file_name))
131 self._create_file(file_name, file_size)
131 self._create_file(file_name, file_size)
132 self._create_commit(file_name)
132 self._create_commit(file_name)
133
133
134 @timed
134 @timed
135 def push(self):
135 def push(self):
136 raise NotImplementedError()
136 raise NotImplementedError()
137
137
138 @timed
138 @timed
139 def clone(self, destination_path):
139 def clone(self, destination_path):
140 raise NotImplementedError()
140 raise NotImplementedError()
141
141
142 @timed
142 @timed
143 def pull(self):
143 def pull(self):
144 raise NotImplementedError()
144 raise NotImplementedError()
145
145
146 def _run(self, *args):
146 def _run(self, *args):
147 command = [self.BASE_COMMAND] + list(args)
147 command = [self.BASE_COMMAND] + list(args)
148 process = subprocess.Popen(
148 process = subprocess32.Popen(
149 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
149 command, stdout=subprocess32.PIPE, stderr=subprocess32.PIPE)
150 return process.communicate()
150 return process.communicate()
151
151
152 def _create_file(self, name, size):
152 def _create_file(self, name, size):
153 file_name = os.path.join(self.path, name)
153 file_name = os.path.join(self.path, name)
154 with open(file_name, 'wb') as f:
154 with open(file_name, 'wb') as f:
155 f.write(os.urandom(1024))
155 f.write(os.urandom(1024))
156
156
157 def _delete_filesystem_repo(self):
157 def _delete_filesystem_repo(self):
158 shutil.rmtree(self.path)
158 shutil.rmtree(self.path)
159
159
160 def _create_filesystem_repo(self, path):
160 def _create_filesystem_repo(self, path):
161 raise NotImplementedError()
161 raise NotImplementedError()
162
162
163 def _create_commit(self, file_name):
163 def _create_commit(self, file_name):
164 raise NotImplementedError()
164 raise NotImplementedError()
165
165
166
166
167 class GitRepository(Repository):
167 class GitRepository(Repository):
168 TYPE = 'git'
168 TYPE = 'git'
169 BASE_COMMAND = 'git'
169 BASE_COMMAND = 'git'
170
170
171 @timed
171 @timed
172 def push(self):
172 def push(self):
173 os.chdir(self.path)
173 os.chdir(self.path)
174 self._run('push', '--set-upstream', self.url, 'master')
174 self._run('push', '--set-upstream', self.url, 'master')
175
175
176 @timed
176 @timed
177 def clone(self, destination_path):
177 def clone(self, destination_path):
178 self._run('clone', self.url, os.path.join(destination_path, self.name))
178 self._run('clone', self.url, os.path.join(destination_path, self.name))
179
179
180 @timed
180 @timed
181 def pull(self, destination_path):
181 def pull(self, destination_path):
182 path = os.path.join(destination_path, self.name)
182 path = os.path.join(destination_path, self.name)
183 self._create_filesystem_repo(path)
183 self._create_filesystem_repo(path)
184 os.chdir(path)
184 os.chdir(path)
185 self._run('remote', 'add', 'origin', self.url)
185 self._run('remote', 'add', 'origin', self.url)
186 self._run('pull', 'origin', 'master')
186 self._run('pull', 'origin', 'master')
187
187
188 def _create_filesystem_repo(self, path):
188 def _create_filesystem_repo(self, path):
189 self._run('init', path)
189 self._run('init', path)
190
190
191 def _create_commit(self, file_name):
191 def _create_commit(self, file_name):
192 os.chdir(self.path)
192 os.chdir(self.path)
193 self._run('add', file_name)
193 self._run('add', file_name)
194 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
194 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
195
195
196
196
197 class HgRepository(Repository):
197 class HgRepository(Repository):
198 TYPE = 'hg'
198 TYPE = 'hg'
199 BASE_COMMAND = 'hg'
199 BASE_COMMAND = 'hg'
200
200
201 @timed
201 @timed
202 def push(self):
202 def push(self):
203 os.chdir(self.path)
203 os.chdir(self.path)
204 self._run('push', self.url)
204 self._run('push', self.url)
205
205
206 @timed
206 @timed
207 def clone(self, destination_path):
207 def clone(self, destination_path):
208 self._run('clone', self.url, os.path.join(destination_path, self.name))
208 self._run('clone', self.url, os.path.join(destination_path, self.name))
209
209
210 @timed
210 @timed
211 def pull(self, destination_path):
211 def pull(self, destination_path):
212 path = os.path.join(destination_path, self.name)
212 path = os.path.join(destination_path, self.name)
213 self._create_filesystem_repo(path)
213 self._create_filesystem_repo(path)
214 os.chdir(path)
214 os.chdir(path)
215 self._run('pull', '-r', 'tip', self.url)
215 self._run('pull', '-r', 'tip', self.url)
216
216
217 def _create_filesystem_repo(self, path):
217 def _create_filesystem_repo(self, path):
218 self._run('init', path)
218 self._run('init', path)
219
219
220 def _create_commit(self, file_name):
220 def _create_commit(self, file_name):
221 os.chdir(self.path)
221 os.chdir(self.path)
222 self._run('add', file_name)
222 self._run('add', file_name)
223 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
223 self._run('commit', file_name, '-m', '"Add {}"'.format(file_name))
224
224
225
225
226 class Benchmark(object):
226 class Benchmark(object):
227 REPO_CLASSES = {
227 REPO_CLASSES = {
228 'git': GitRepository,
228 'git': GitRepository,
229 'hg': HgRepository
229 'hg': HgRepository
230 }
230 }
231 REPO_NAME = '{}_performance_{:03d}'
231 REPO_NAME = '{}_performance_{:03d}'
232
232
233 def __init__(self, config):
233 def __init__(self, config):
234 self.api = RCApi(api_key=config.api_key, rc_endpoint=config.host)
234 self.api = RCApi(api_key=config.api_key, rc_endpoint=config.host)
235 self.source_path = tempfile.mkdtemp(suffix='vcsperformance')
235 self.source_path = tempfile.mkdtemp(suffix='vcsperformance')
236
236
237 self.config = config
237 self.config = config
238 self.git_repos = []
238 self.git_repos = []
239 self.hg_repos = []
239 self.hg_repos = []
240
240
241 self._set_log_level()
241 self._set_log_level()
242
242
243 def start(self):
243 def start(self):
244 self._create_repos()
244 self._create_repos()
245 repos = {
245 repos = {
246 'git': self.git_repos,
246 'git': self.git_repos,
247 'hg': self.hg_repos
247 'hg': self.hg_repos
248 }
248 }
249
249
250 clone_destination_path = tempfile.mkdtemp(suffix='clone')
250 clone_destination_path = tempfile.mkdtemp(suffix='clone')
251 pull_destination_path = tempfile.mkdtemp(suffix='pull')
251 pull_destination_path = tempfile.mkdtemp(suffix='pull')
252 operations = [
252 operations = [
253 ('push', ),
253 ('push', ),
254 ('clone', clone_destination_path),
254 ('clone', clone_destination_path),
255 ('pull', pull_destination_path)
255 ('pull', pull_destination_path)
256 ]
256 ]
257
257
258 for operation in operations:
258 for operation in operations:
259 for type_ in repos:
259 for type_ in repos:
260 times = self._measure(repos[type_], *operation)
260 times = self._measure(repos[type_], *operation)
261 print("Mean {} {} time: {:.3f} sec.".format(
261 print("Mean {} {} time: {:.3f} sec.".format(
262 type_, operation[0], mean(times)))
262 type_, operation[0], mean(times)))
263
263
264 def cleanup(self):
264 def cleanup(self):
265 log.info("Cleaning up...")
265 log.info("Cleaning up...")
266 for repo in chain(self.git_repos, self.hg_repos):
266 for repo in chain(self.git_repos, self.hg_repos):
267 repo.delete()
267 repo.delete()
268
268
269 def _measure(self, repos, operation, *args):
269 def _measure(self, repos, operation, *args):
270 times = []
270 times = []
271 for repo in repos:
271 for repo in repos:
272 method = getattr(repo, operation)
272 method = getattr(repo, operation)
273 times.append(method(*args))
273 times.append(method(*args))
274 return times
274 return times
275
275
276 def _create_repos(self):
276 def _create_repos(self):
277 log.info("Creating repositories...")
277 log.info("Creating repositories...")
278 for i in xrange(self.config.repositories):
278 for i in xrange(self.config.repositories):
279 self.git_repos.append(self._create_repo('git', i))
279 self.git_repos.append(self._create_repo('git', i))
280 self.hg_repos.append(self._create_repo('hg', i))
280 self.hg_repos.append(self._create_repo('hg', i))
281
281
282 def _create_repo(self, type_, id_):
282 def _create_repo(self, type_, id_):
283 RepoClass = self.REPO_CLASSES[type_]
283 RepoClass = self.REPO_CLASSES[type_]
284 repo = RepoClass(
284 repo = RepoClass(
285 self.REPO_NAME.format(type_, id_), self.source_path, self.api)
285 self.REPO_NAME.format(type_, id_), self.source_path, self.api)
286 repo.create()
286 repo.create()
287 repo.create_commits(self.config.commits, self.config.file_size)
287 repo.create_commits(self.config.commits, self.config.file_size)
288 return repo
288 return repo
289
289
290 def _set_log_level(self):
290 def _set_log_level(self):
291 try:
291 try:
292 log_level = getattr(logging, config.log_level.upper())
292 log_level = getattr(logging, config.log_level.upper())
293 except:
293 except:
294 log_level = logging.ERROR
294 log_level = logging.ERROR
295 handler = logging.StreamHandler()
295 handler = logging.StreamHandler()
296 log.addHandler(handler)
296 log.addHandler(handler)
297 log.setLevel(log_level)
297 log.setLevel(log_level)
298
298
299 if __name__ == '__main__':
299 if __name__ == '__main__':
300 config = Config()
300 config = Config()
301 benchmark = Benchmark(config)
301 benchmark = Benchmark(config)
302 try:
302 try:
303 benchmark.start()
303 benchmark.start()
304 finally:
304 finally:
305 benchmark.cleanup()
305 benchmark.cleanup()
@@ -1,147 +1,147 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base for test suite for making push/pull operations.
22 Base for test suite for making push/pull operations.
23
23
24 .. important::
24 .. important::
25
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
27 to redirect things to stderr instead of stdout.
28 """
28 """
29
29
30 from os.path import join as jn
30 from os.path import join as jn
31 from subprocess import Popen, PIPE
31 from subprocess32 import Popen, PIPE
32 import logging
32 import logging
33 import os
33 import os
34 import tempfile
34 import tempfile
35
35
36 from rhodecode.tests import GIT_REPO, HG_REPO
36 from rhodecode.tests import GIT_REPO, HG_REPO
37
37
38 DEBUG = True
38 DEBUG = True
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 REPO_GROUP = 'a_repo_group'
40 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 class Command(object):
47 class Command(object):
48
48
49 def __init__(self, cwd):
49 def __init__(self, cwd):
50 self.cwd = cwd
50 self.cwd = cwd
51 self.process = None
51 self.process = None
52
52
53 def execute(self, cmd, *args):
53 def execute(self, cmd, *args):
54 """
54 """
55 Runs command on the system with given ``args``.
55 Runs command on the system with given ``args``.
56 """
56 """
57
57
58 command = cmd + ' ' + ' '.join(args)
58 command = cmd + ' ' + ' '.join(args)
59 if DEBUG:
59 if DEBUG:
60 log.debug('*** CMD %s ***' % (command,))
60 log.debug('*** CMD %s ***' % (command,))
61
61
62 env = dict(os.environ)
62 env = dict(os.environ)
63 # Delete coverage variables, as they make the test fail for Mercurial
63 # Delete coverage variables, as they make the test fail for Mercurial
64 for key in env.keys():
64 for key in env.keys():
65 if key.startswith('COV_CORE_'):
65 if key.startswith('COV_CORE_'):
66 del env[key]
66 del env[key]
67
67
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 cwd=self.cwd, env=env)
69 cwd=self.cwd, env=env)
70 stdout, stderr = self.process.communicate()
70 stdout, stderr = self.process.communicate()
71 if DEBUG:
71 if DEBUG:
72 log.debug('STDOUT:%s' % (stdout,))
72 log.debug('STDOUT:%s' % (stdout,))
73 log.debug('STDERR:%s' % (stderr,))
73 log.debug('STDERR:%s' % (stderr,))
74 return stdout, stderr
74 return stdout, stderr
75
75
76 def assert_returncode_success(self):
76 def assert_returncode_success(self):
77 assert self.process.returncode == 0
77 assert self.process.returncode == 0
78
78
79
79
80 def _add_files_and_push(vcs, dest, clone_url=None, **kwargs):
80 def _add_files_and_push(vcs, dest, clone_url=None, **kwargs):
81 """
81 """
82 Generate some files, add it to DEST repo and push back
82 Generate some files, add it to DEST repo and push back
83 vcs is git or hg and defines what VCS we want to make those files for
83 vcs is git or hg and defines what VCS we want to make those files for
84 """
84 """
85 # commit some stuff into this repo
85 # commit some stuff into this repo
86 cwd = path = jn(dest)
86 cwd = path = jn(dest)
87 added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next())
87 added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next())
88 Command(cwd).execute('touch %s' % added_file)
88 Command(cwd).execute('touch %s' % added_file)
89 Command(cwd).execute('%s add %s' % (vcs, added_file))
89 Command(cwd).execute('%s add %s' % (vcs, added_file))
90 author_str = 'Marcin KuΕΊminski <me@email.com>'
90 author_str = 'Marcin KuΕΊminski <me@email.com>'
91
91
92 git_ident = "git config user.name {} && git config user.email {}".format(
92 git_ident = "git config user.name {} && git config user.email {}".format(
93 'Marcin KuΕΊminski', 'me@email.com')
93 'Marcin KuΕΊminski', 'me@email.com')
94
94
95 for i in xrange(kwargs.get('files_no', 3)):
95 for i in xrange(kwargs.get('files_no', 3)):
96 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
96 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
97 Command(cwd).execute(cmd)
97 Command(cwd).execute(cmd)
98 if vcs == 'hg':
98 if vcs == 'hg':
99 cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % (
99 cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % (
100 i, author_str, added_file
100 i, author_str, added_file
101 )
101 )
102 elif vcs == 'git':
102 elif vcs == 'git':
103 cmd = """%s && git commit -m 'commited new %s' %s""" % (
103 cmd = """%s && git commit -m 'commited new %s' %s""" % (
104 git_ident, i, added_file)
104 git_ident, i, added_file)
105 Command(cwd).execute(cmd)
105 Command(cwd).execute(cmd)
106
106
107 # PUSH it back
107 # PUSH it back
108 stdout = stderr = None
108 stdout = stderr = None
109 if vcs == 'hg':
109 if vcs == 'hg':
110 stdout, stderr = Command(cwd).execute(
110 stdout, stderr = Command(cwd).execute(
111 'hg push --verbose', clone_url)
111 'hg push --verbose', clone_url)
112 elif vcs == 'git':
112 elif vcs == 'git':
113 stdout, stderr = Command(cwd).execute(
113 stdout, stderr = Command(cwd).execute(
114 """%s && git push --verbose %s master""" % (
114 """%s && git push --verbose %s master""" % (
115 git_ident, clone_url))
115 git_ident, clone_url))
116
116
117 return stdout, stderr
117 return stdout, stderr
118
118
119
119
120 def _check_proper_git_push(
120 def _check_proper_git_push(
121 stdout, stderr, branch='master', should_set_default_branch=False):
121 stdout, stderr, branch='master', should_set_default_branch=False):
122 # Note: Git is writing most information to stderr intentionally
122 # Note: Git is writing most information to stderr intentionally
123 assert 'fatal' not in stderr
123 assert 'fatal' not in stderr
124 assert 'rejected' not in stderr
124 assert 'rejected' not in stderr
125 assert 'Pushing to' in stderr
125 assert 'Pushing to' in stderr
126 assert '%s -> %s' % (branch, branch) in stderr
126 assert '%s -> %s' % (branch, branch) in stderr
127
127
128 if should_set_default_branch:
128 if should_set_default_branch:
129 assert "Setting default branch to %s" % branch in stderr
129 assert "Setting default branch to %s" % branch in stderr
130 else:
130 else:
131 assert "Setting default branch" not in stderr
131 assert "Setting default branch" not in stderr
132
132
133
133
134 def _check_proper_clone(stdout, stderr, vcs):
134 def _check_proper_clone(stdout, stderr, vcs):
135 if vcs == 'hg':
135 if vcs == 'hg':
136 assert 'requesting all changes' in stdout
136 assert 'requesting all changes' in stdout
137 assert 'adding changesets' in stdout
137 assert 'adding changesets' in stdout
138 assert 'adding manifests' in stdout
138 assert 'adding manifests' in stdout
139 assert 'adding file changes' in stdout
139 assert 'adding file changes' in stdout
140
140
141 assert stderr == ''
141 assert stderr == ''
142
142
143 if vcs == 'git':
143 if vcs == 'git':
144 assert '' == stdout
144 assert '' == stdout
145 assert 'Cloning into' in stderr
145 assert 'Cloning into' in stderr
146 assert 'abort:' not in stderr
146 assert 'abort:' not in stderr
147 assert 'fatal:' not in stderr
147 assert 'fatal:' not in stderr
@@ -1,257 +1,257 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 py.test config for test suite for making push/pull operations.
22 py.test config for test suite for making push/pull operations.
23
23
24 .. important::
24 .. important::
25
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
27 to redirect things to stderr instead of stdout.
28 """
28 """
29
29
30 import ConfigParser
30 import ConfigParser
31 import os
31 import os
32 import subprocess
32 import subprocess32
33 import tempfile
33 import tempfile
34 import textwrap
34 import textwrap
35 import pytest
35 import pytest
36
36
37 import rhodecode
37 import rhodecode
38 from rhodecode.model.db import Repository
38 from rhodecode.model.db import Repository
39 from rhodecode.model.meta import Session
39 from rhodecode.model.meta import Session
40 from rhodecode.model.settings import SettingsModel
40 from rhodecode.model.settings import SettingsModel
41 from rhodecode.tests import (
41 from rhodecode.tests import (
42 GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,)
42 GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,)
43 from rhodecode.tests.fixture import Fixture
43 from rhodecode.tests.fixture import Fixture
44 from rhodecode.tests.utils import (
44 from rhodecode.tests.utils import (
45 set_anonymous_access, is_url_reachable, wait_for_url)
45 set_anonymous_access, is_url_reachable, wait_for_url)
46
46
47 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
47 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
48 REPO_GROUP = 'a_repo_group'
48 REPO_GROUP = 'a_repo_group'
49 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
49 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
50 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
50 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
51
51
52
52
53 def assert_no_running_instance(url):
53 def assert_no_running_instance(url):
54 if is_url_reachable(url):
54 if is_url_reachable(url):
55 print("Hint: Usually this means another instance of Enterprise "
55 print("Hint: Usually this means another instance of Enterprise "
56 "is running in the background.")
56 "is running in the background.")
57 pytest.fail(
57 pytest.fail(
58 "Port is not free at %s, cannot start web interface" % url)
58 "Port is not free at %s, cannot start web interface" % url)
59
59
60
60
61 def get_host_url(pylons_config):
61 def get_host_url(pylons_config):
62 """Construct the host url using the port in the test configuration."""
62 """Construct the host url using the port in the test configuration."""
63 config = ConfigParser.ConfigParser()
63 config = ConfigParser.ConfigParser()
64 config.read(pylons_config)
64 config.read(pylons_config)
65
65
66 return '127.0.0.1:%s' % config.get('server:main', 'port')
66 return '127.0.0.1:%s' % config.get('server:main', 'port')
67
67
68
68
69 class RcWebServer(object):
69 class RcWebServer(object):
70 """
70 """
71 Represents a running RCE web server used as a test fixture.
71 Represents a running RCE web server used as a test fixture.
72 """
72 """
73 def __init__(self, pylons_config):
73 def __init__(self, pylons_config):
74 self.pylons_config = pylons_config
74 self.pylons_config = pylons_config
75
75
76 def repo_clone_url(self, repo_name, **kwargs):
76 def repo_clone_url(self, repo_name, **kwargs):
77 params = {
77 params = {
78 'user': TEST_USER_ADMIN_LOGIN,
78 'user': TEST_USER_ADMIN_LOGIN,
79 'passwd': TEST_USER_ADMIN_PASS,
79 'passwd': TEST_USER_ADMIN_PASS,
80 'host': get_host_url(self.pylons_config),
80 'host': get_host_url(self.pylons_config),
81 'cloned_repo': repo_name,
81 'cloned_repo': repo_name,
82 }
82 }
83 params.update(**kwargs)
83 params.update(**kwargs)
84 _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params
84 _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params
85 return _url
85 return _url
86
86
87
87
88 @pytest.fixture(scope="module")
88 @pytest.fixture(scope="module")
89 def rcextensions(request, pylonsapp, tmpdir_factory):
89 def rcextensions(request, pylonsapp, tmpdir_factory):
90 """
90 """
91 Installs a testing rcextensions pack to ensure they work as expected.
91 Installs a testing rcextensions pack to ensure they work as expected.
92 """
92 """
93 init_content = textwrap.dedent("""
93 init_content = textwrap.dedent("""
94 # Forward import the example rcextensions to make it
94 # Forward import the example rcextensions to make it
95 # active for our tests.
95 # active for our tests.
96 from rhodecode.tests.other.example_rcextensions import *
96 from rhodecode.tests.other.example_rcextensions import *
97 """)
97 """)
98
98
99 # Note: rcextensions are looked up based on the path of the ini file
99 # Note: rcextensions are looked up based on the path of the ini file
100 root_path = tmpdir_factory.getbasetemp()
100 root_path = tmpdir_factory.getbasetemp()
101 rcextensions_path = root_path.join('rcextensions')
101 rcextensions_path = root_path.join('rcextensions')
102 init_path = rcextensions_path.join('__init__.py')
102 init_path = rcextensions_path.join('__init__.py')
103
103
104 if rcextensions_path.check():
104 if rcextensions_path.check():
105 pytest.fail(
105 pytest.fail(
106 "Path for rcextensions already exists, please clean up before "
106 "Path for rcextensions already exists, please clean up before "
107 "test run this path: %s" % (rcextensions_path, ))
107 "test run this path: %s" % (rcextensions_path, ))
108 return
108 return
109
109
110 request.addfinalizer(rcextensions_path.remove)
110 request.addfinalizer(rcextensions_path.remove)
111 init_path.write_binary(init_content, ensure=True)
111 init_path.write_binary(init_content, ensure=True)
112
112
113
113
114 @pytest.fixture(scope="module")
114 @pytest.fixture(scope="module")
115 def repos(request, pylonsapp):
115 def repos(request, pylonsapp):
116 """Create a copy of each test repo in a repo group."""
116 """Create a copy of each test repo in a repo group."""
117 fixture = Fixture()
117 fixture = Fixture()
118 repo_group = fixture.create_repo_group(REPO_GROUP)
118 repo_group = fixture.create_repo_group(REPO_GROUP)
119 repo_group_id = repo_group.group_id
119 repo_group_id = repo_group.group_id
120 fixture.create_fork(HG_REPO, HG_REPO,
120 fixture.create_fork(HG_REPO, HG_REPO,
121 repo_name_full=HG_REPO_WITH_GROUP,
121 repo_name_full=HG_REPO_WITH_GROUP,
122 repo_group=repo_group_id)
122 repo_group=repo_group_id)
123 fixture.create_fork(GIT_REPO, GIT_REPO,
123 fixture.create_fork(GIT_REPO, GIT_REPO,
124 repo_name_full=GIT_REPO_WITH_GROUP,
124 repo_name_full=GIT_REPO_WITH_GROUP,
125 repo_group=repo_group_id)
125 repo_group=repo_group_id)
126
126
127 @request.addfinalizer
127 @request.addfinalizer
128 def cleanup():
128 def cleanup():
129 fixture.destroy_repo(HG_REPO_WITH_GROUP)
129 fixture.destroy_repo(HG_REPO_WITH_GROUP)
130 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
130 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
131 fixture.destroy_repo_group(repo_group_id)
131 fixture.destroy_repo_group(repo_group_id)
132
132
133
133
134 @pytest.fixture(scope="module")
134 @pytest.fixture(scope="module")
135 def rc_web_server_config(pylons_config):
135 def rc_web_server_config(pylons_config):
136 """
136 """
137 Configuration file used for the fixture `rc_web_server`.
137 Configuration file used for the fixture `rc_web_server`.
138 """
138 """
139 return pylons_config
139 return pylons_config
140
140
141
141
142 @pytest.fixture(scope="module")
142 @pytest.fixture(scope="module")
143 def rc_web_server(
143 def rc_web_server(
144 request, pylonsapp, rc_web_server_config, repos, rcextensions):
144 request, pylonsapp, rc_web_server_config, repos, rcextensions):
145 """
145 """
146 Run the web server as a subprocess.
146 Run the web server as a subprocess.
147
147
148 Since we have already a running vcsserver, this is not spawned again.
148 Since we have already a running vcsserver, this is not spawned again.
149 """
149 """
150 env = os.environ.copy()
150 env = os.environ.copy()
151 env['RC_NO_TMP_PATH'] = '1'
151 env['RC_NO_TMP_PATH'] = '1'
152
152
153 server_out = open(RC_LOG, 'w')
153 server_out = open(RC_LOG, 'w')
154
154
155 # TODO: Would be great to capture the output and err of the subprocess
155 # TODO: Would be great to capture the output and err of the subprocess
156 # and make it available in a section of the py.test report in case of an
156 # and make it available in a section of the py.test report in case of an
157 # error.
157 # error.
158
158
159 host_url = 'http://' + get_host_url(rc_web_server_config)
159 host_url = 'http://' + get_host_url(rc_web_server_config)
160 assert_no_running_instance(host_url)
160 assert_no_running_instance(host_url)
161 command = ['rcserver', rc_web_server_config]
161 command = ['rcserver', rc_web_server_config]
162
162
163 print('Starting rcserver: {}'.format(host_url))
163 print('Starting rcserver: {}'.format(host_url))
164 print('Command: {}'.format(command))
164 print('Command: {}'.format(command))
165 print('Logfile: {}'.format(RC_LOG))
165 print('Logfile: {}'.format(RC_LOG))
166
166
167 proc = subprocess.Popen(
167 proc = subprocess32.Popen(
168 command, bufsize=0, env=env, stdout=server_out, stderr=server_out)
168 command, bufsize=0, env=env, stdout=server_out, stderr=server_out)
169
169
170 wait_for_url(host_url, timeout=30)
170 wait_for_url(host_url, timeout=30)
171
171
172 @request.addfinalizer
172 @request.addfinalizer
173 def stop_web_server():
173 def stop_web_server():
174 # TODO: Find out how to integrate with the reporting of py.test to
174 # TODO: Find out how to integrate with the reporting of py.test to
175 # make this information available.
175 # make this information available.
176 print "\nServer log file written to %s" % (RC_LOG, )
176 print "\nServer log file written to %s" % (RC_LOG, )
177 proc.kill()
177 proc.kill()
178 server_out.close()
178 server_out.close()
179
179
180 return RcWebServer(rc_web_server_config)
180 return RcWebServer(rc_web_server_config)
181
181
182
182
183 @pytest.fixture(scope='class', autouse=True)
183 @pytest.fixture(scope='class', autouse=True)
184 def disable_anonymous_user_access(pylonsapp):
184 def disable_anonymous_user_access(pylonsapp):
185 set_anonymous_access(False)
185 set_anonymous_access(False)
186
186
187
187
188 @pytest.fixture
188 @pytest.fixture
189 def disable_locking(pylonsapp):
189 def disable_locking(pylonsapp):
190 r = Repository.get_by_repo_name(GIT_REPO)
190 r = Repository.get_by_repo_name(GIT_REPO)
191 Repository.unlock(r)
191 Repository.unlock(r)
192 r.enable_locking = False
192 r.enable_locking = False
193 Session().add(r)
193 Session().add(r)
194 Session().commit()
194 Session().commit()
195
195
196 r = Repository.get_by_repo_name(HG_REPO)
196 r = Repository.get_by_repo_name(HG_REPO)
197 Repository.unlock(r)
197 Repository.unlock(r)
198 r.enable_locking = False
198 r.enable_locking = False
199 Session().add(r)
199 Session().add(r)
200 Session().commit()
200 Session().commit()
201
201
202
202
203 @pytest.fixture
203 @pytest.fixture
204 def enable_auth_plugins(request, pylonsapp, csrf_token):
204 def enable_auth_plugins(request, pylonsapp, csrf_token):
205 """
205 """
206 Return a factory object that when called, allows to control which
206 Return a factory object that when called, allows to control which
207 authentication plugins are enabled.
207 authentication plugins are enabled.
208 """
208 """
209 def _enable_plugins(plugins_list, override=None):
209 def _enable_plugins(plugins_list, override=None):
210 override = override or {}
210 override = override or {}
211 params = {
211 params = {
212 'auth_plugins': ','.join(plugins_list),
212 'auth_plugins': ','.join(plugins_list),
213 'csrf_token': csrf_token,
213 'csrf_token': csrf_token,
214 }
214 }
215
215
216 for module in plugins_list:
216 for module in plugins_list:
217 plugin = rhodecode.authentication.base.loadplugin(module)
217 plugin = rhodecode.authentication.base.loadplugin(module)
218 plugin_name = plugin.name
218 plugin_name = plugin.name
219 enabled_plugin = 'auth_%s_enabled' % plugin_name
219 enabled_plugin = 'auth_%s_enabled' % plugin_name
220 cache_ttl = 'auth_%s_cache_ttl' % plugin_name
220 cache_ttl = 'auth_%s_cache_ttl' % plugin_name
221
221
222 # default params that are needed for each plugin,
222 # default params that are needed for each plugin,
223 # `enabled` and `cache_ttl`
223 # `enabled` and `cache_ttl`
224 params.update({
224 params.update({
225 enabled_plugin: True,
225 enabled_plugin: True,
226 cache_ttl: 0
226 cache_ttl: 0
227 })
227 })
228 if override.get:
228 if override.get:
229 params.update(override.get(module, {}))
229 params.update(override.get(module, {}))
230
230
231 validated_params = params
231 validated_params = params
232 for k, v in validated_params.items():
232 for k, v in validated_params.items():
233 setting = SettingsModel().create_or_update_setting(k, v)
233 setting = SettingsModel().create_or_update_setting(k, v)
234 Session().add(setting)
234 Session().add(setting)
235 Session().commit()
235 Session().commit()
236
236
237 def cleanup():
237 def cleanup():
238 _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
238 _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
239
239
240 request.addfinalizer(cleanup)
240 request.addfinalizer(cleanup)
241
241
242 return _enable_plugins
242 return _enable_plugins
243
243
244
244
245 @pytest.fixture
245 @pytest.fixture
246 def fs_repo_only(request, rhodecode_fixtures):
246 def fs_repo_only(request, rhodecode_fixtures):
247 def fs_repo_fabric(repo_name, repo_type):
247 def fs_repo_fabric(repo_name, repo_type):
248 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
248 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
249 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
249 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
250
250
251 def cleanup():
251 def cleanup():
252 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
252 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
253 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
253 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
254
254
255 request.addfinalizer(cleanup)
255 request.addfinalizer(cleanup)
256
256
257 return fs_repo_fabric
257 return fs_repo_fabric
@@ -1,1791 +1,1791 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32
32
33 import mock
33 import mock
34 import pyramid.testing
34 import pyramid.testing
35 import pytest
35 import pytest
36 import colander
36 import colander
37 import requests
37 import requests
38 from webtest.app import TestApp
38 from webtest.app import TestApp
39
39
40 import rhodecode
40 import rhodecode
41 from rhodecode.model.changeset_status import ChangesetStatusModel
41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.comment import ChangesetCommentsModel
42 from rhodecode.model.comment import ChangesetCommentsModel
43 from rhodecode.model.db import (
43 from rhodecode.model.db import (
44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, Integration)
45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.pull_request import PullRequestModel
47 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.repo import RepoModel
48 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo_group import RepoGroupModel
49 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.user import UserModel
50 from rhodecode.model.user import UserModel
51 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.user_group import UserGroupModel
52 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.integration import IntegrationModel
53 from rhodecode.model.integration import IntegrationModel
54 from rhodecode.integrations import integration_type_registry
54 from rhodecode.integrations import integration_type_registry
55 from rhodecode.integrations.types.base import IntegrationTypeBase
55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 from rhodecode.lib.utils import repo2db_mapper
56 from rhodecode.lib.utils import repo2db_mapper
57 from rhodecode.lib.vcs import create_vcsserver_proxy
57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 from rhodecode.lib.vcs.backends import get_backend
58 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.nodes import FileNode
59 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.tests import (
60 from rhodecode.tests import (
61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_REGULAR_PASS)
63 TEST_USER_REGULAR_PASS)
64 from rhodecode.tests.fixture import Fixture
64 from rhodecode.tests.fixture import Fixture
65
65
66
66
67 def _split_comma(value):
67 def _split_comma(value):
68 return value.split(',')
68 return value.split(',')
69
69
70
70
71 def pytest_addoption(parser):
71 def pytest_addoption(parser):
72 parser.addoption(
72 parser.addoption(
73 '--keep-tmp-path', action='store_true',
73 '--keep-tmp-path', action='store_true',
74 help="Keep the test temporary directories")
74 help="Keep the test temporary directories")
75 parser.addoption(
75 parser.addoption(
76 '--backends', action='store', type=_split_comma,
76 '--backends', action='store', type=_split_comma,
77 default=['git', 'hg', 'svn'],
77 default=['git', 'hg', 'svn'],
78 help="Select which backends to test for backend specific tests.")
78 help="Select which backends to test for backend specific tests.")
79 parser.addoption(
79 parser.addoption(
80 '--dbs', action='store', type=_split_comma,
80 '--dbs', action='store', type=_split_comma,
81 default=['sqlite'],
81 default=['sqlite'],
82 help="Select which database to test for database specific tests. "
82 help="Select which database to test for database specific tests. "
83 "Possible options are sqlite,postgres,mysql")
83 "Possible options are sqlite,postgres,mysql")
84 parser.addoption(
84 parser.addoption(
85 '--appenlight', '--ae', action='store_true',
85 '--appenlight', '--ae', action='store_true',
86 help="Track statistics in appenlight.")
86 help="Track statistics in appenlight.")
87 parser.addoption(
87 parser.addoption(
88 '--appenlight-api-key', '--ae-key',
88 '--appenlight-api-key', '--ae-key',
89 help="API key for Appenlight.")
89 help="API key for Appenlight.")
90 parser.addoption(
90 parser.addoption(
91 '--appenlight-url', '--ae-url',
91 '--appenlight-url', '--ae-url',
92 default="https://ae.rhodecode.com",
92 default="https://ae.rhodecode.com",
93 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
93 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
94 parser.addoption(
94 parser.addoption(
95 '--sqlite-connection-string', action='store',
95 '--sqlite-connection-string', action='store',
96 default='', help="Connection string for the dbs tests with SQLite")
96 default='', help="Connection string for the dbs tests with SQLite")
97 parser.addoption(
97 parser.addoption(
98 '--postgres-connection-string', action='store',
98 '--postgres-connection-string', action='store',
99 default='', help="Connection string for the dbs tests with Postgres")
99 default='', help="Connection string for the dbs tests with Postgres")
100 parser.addoption(
100 parser.addoption(
101 '--mysql-connection-string', action='store',
101 '--mysql-connection-string', action='store',
102 default='', help="Connection string for the dbs tests with MySQL")
102 default='', help="Connection string for the dbs tests with MySQL")
103 parser.addoption(
103 parser.addoption(
104 '--repeat', type=int, default=100,
104 '--repeat', type=int, default=100,
105 help="Number of repetitions in performance tests.")
105 help="Number of repetitions in performance tests.")
106
106
107
107
108 def pytest_configure(config):
108 def pytest_configure(config):
109 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
109 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
110 from rhodecode.config import patches
110 from rhodecode.config import patches
111 patches.kombu_1_5_1_python_2_7_11()
111 patches.kombu_1_5_1_python_2_7_11()
112
112
113
113
114 def pytest_collection_modifyitems(session, config, items):
114 def pytest_collection_modifyitems(session, config, items):
115 # nottest marked, compare nose, used for transition from nose to pytest
115 # nottest marked, compare nose, used for transition from nose to pytest
116 remaining = [
116 remaining = [
117 i for i in items if getattr(i.obj, '__test__', True)]
117 i for i in items if getattr(i.obj, '__test__', True)]
118 items[:] = remaining
118 items[:] = remaining
119
119
120
120
121 def pytest_generate_tests(metafunc):
121 def pytest_generate_tests(metafunc):
122 # Support test generation based on --backend parameter
122 # Support test generation based on --backend parameter
123 if 'backend_alias' in metafunc.fixturenames:
123 if 'backend_alias' in metafunc.fixturenames:
124 backends = get_backends_from_metafunc(metafunc)
124 backends = get_backends_from_metafunc(metafunc)
125 scope = None
125 scope = None
126 if not backends:
126 if not backends:
127 pytest.skip("Not enabled for any of selected backends")
127 pytest.skip("Not enabled for any of selected backends")
128 metafunc.parametrize('backend_alias', backends, scope=scope)
128 metafunc.parametrize('backend_alias', backends, scope=scope)
129 elif hasattr(metafunc.function, 'backends'):
129 elif hasattr(metafunc.function, 'backends'):
130 backends = get_backends_from_metafunc(metafunc)
130 backends = get_backends_from_metafunc(metafunc)
131 if not backends:
131 if not backends:
132 pytest.skip("Not enabled for any of selected backends")
132 pytest.skip("Not enabled for any of selected backends")
133
133
134
134
135 def get_backends_from_metafunc(metafunc):
135 def get_backends_from_metafunc(metafunc):
136 requested_backends = set(metafunc.config.getoption('--backends'))
136 requested_backends = set(metafunc.config.getoption('--backends'))
137 if hasattr(metafunc.function, 'backends'):
137 if hasattr(metafunc.function, 'backends'):
138 # Supported backends by this test function, created from
138 # Supported backends by this test function, created from
139 # pytest.mark.backends
139 # pytest.mark.backends
140 backends = metafunc.function.backends.args
140 backends = metafunc.function.backends.args
141 elif hasattr(metafunc.cls, 'backend_alias'):
141 elif hasattr(metafunc.cls, 'backend_alias'):
142 # Support class attribute "backend_alias", this is mainly
142 # Support class attribute "backend_alias", this is mainly
143 # for legacy reasons for tests not yet using pytest.mark.backends
143 # for legacy reasons for tests not yet using pytest.mark.backends
144 backends = [metafunc.cls.backend_alias]
144 backends = [metafunc.cls.backend_alias]
145 else:
145 else:
146 backends = metafunc.config.getoption('--backends')
146 backends = metafunc.config.getoption('--backends')
147 return requested_backends.intersection(backends)
147 return requested_backends.intersection(backends)
148
148
149
149
150 @pytest.fixture(scope='session', autouse=True)
150 @pytest.fixture(scope='session', autouse=True)
151 def activate_example_rcextensions(request):
151 def activate_example_rcextensions(request):
152 """
152 """
153 Patch in an example rcextensions module which verifies passed in kwargs.
153 Patch in an example rcextensions module which verifies passed in kwargs.
154 """
154 """
155 from rhodecode.tests.other import example_rcextensions
155 from rhodecode.tests.other import example_rcextensions
156
156
157 old_extensions = rhodecode.EXTENSIONS
157 old_extensions = rhodecode.EXTENSIONS
158 rhodecode.EXTENSIONS = example_rcextensions
158 rhodecode.EXTENSIONS = example_rcextensions
159
159
160 @request.addfinalizer
160 @request.addfinalizer
161 def cleanup():
161 def cleanup():
162 rhodecode.EXTENSIONS = old_extensions
162 rhodecode.EXTENSIONS = old_extensions
163
163
164
164
165 @pytest.fixture
165 @pytest.fixture
166 def capture_rcextensions():
166 def capture_rcextensions():
167 """
167 """
168 Returns the recorded calls to entry points in rcextensions.
168 Returns the recorded calls to entry points in rcextensions.
169 """
169 """
170 calls = rhodecode.EXTENSIONS.calls
170 calls = rhodecode.EXTENSIONS.calls
171 calls.clear()
171 calls.clear()
172 # Note: At this moment, it is still the empty dict, but that will
172 # Note: At this moment, it is still the empty dict, but that will
173 # be filled during the test run and since it is a reference this
173 # be filled during the test run and since it is a reference this
174 # is enough to make it work.
174 # is enough to make it work.
175 return calls
175 return calls
176
176
177
177
178 @pytest.fixture(scope='session')
178 @pytest.fixture(scope='session')
179 def http_environ_session():
179 def http_environ_session():
180 """
180 """
181 Allow to use "http_environ" in session scope.
181 Allow to use "http_environ" in session scope.
182 """
182 """
183 return http_environ(
183 return http_environ(
184 http_host_stub=http_host_stub())
184 http_host_stub=http_host_stub())
185
185
186
186
187 @pytest.fixture
187 @pytest.fixture
188 def http_host_stub():
188 def http_host_stub():
189 """
189 """
190 Value of HTTP_HOST in the test run.
190 Value of HTTP_HOST in the test run.
191 """
191 """
192 return 'test.example.com:80'
192 return 'test.example.com:80'
193
193
194
194
195 @pytest.fixture
195 @pytest.fixture
196 def http_environ(http_host_stub):
196 def http_environ(http_host_stub):
197 """
197 """
198 HTTP extra environ keys.
198 HTTP extra environ keys.
199
199
200 User by the test application and as well for setting up the pylons
200 User by the test application and as well for setting up the pylons
201 environment. In the case of the fixture "app" it should be possible
201 environment. In the case of the fixture "app" it should be possible
202 to override this for a specific test case.
202 to override this for a specific test case.
203 """
203 """
204 return {
204 return {
205 'SERVER_NAME': http_host_stub.split(':')[0],
205 'SERVER_NAME': http_host_stub.split(':')[0],
206 'SERVER_PORT': http_host_stub.split(':')[1],
206 'SERVER_PORT': http_host_stub.split(':')[1],
207 'HTTP_HOST': http_host_stub,
207 'HTTP_HOST': http_host_stub,
208 }
208 }
209
209
210
210
211 @pytest.fixture(scope='function')
211 @pytest.fixture(scope='function')
212 def app(request, pylonsapp, http_environ):
212 def app(request, pylonsapp, http_environ):
213 app = TestApp(
213 app = TestApp(
214 pylonsapp,
214 pylonsapp,
215 extra_environ=http_environ)
215 extra_environ=http_environ)
216 if request.cls:
216 if request.cls:
217 request.cls.app = app
217 request.cls.app = app
218 return app
218 return app
219
219
220
220
221 @pytest.fixture(scope='session')
221 @pytest.fixture(scope='session')
222 def app_settings(pylonsapp, pylons_config):
222 def app_settings(pylonsapp, pylons_config):
223 """
223 """
224 Settings dictionary used to create the app.
224 Settings dictionary used to create the app.
225
225
226 Parses the ini file and passes the result through the sanitize and apply
226 Parses the ini file and passes the result through the sanitize and apply
227 defaults mechanism in `rhodecode.config.middleware`.
227 defaults mechanism in `rhodecode.config.middleware`.
228 """
228 """
229 from paste.deploy.loadwsgi import loadcontext, APP
229 from paste.deploy.loadwsgi import loadcontext, APP
230 from rhodecode.config.middleware import (
230 from rhodecode.config.middleware import (
231 sanitize_settings_and_apply_defaults)
231 sanitize_settings_and_apply_defaults)
232 context = loadcontext(APP, 'config:' + pylons_config)
232 context = loadcontext(APP, 'config:' + pylons_config)
233 settings = sanitize_settings_and_apply_defaults(context.config())
233 settings = sanitize_settings_and_apply_defaults(context.config())
234 return settings
234 return settings
235
235
236
236
237 @pytest.fixture(scope='session')
237 @pytest.fixture(scope='session')
238 def db(app_settings):
238 def db(app_settings):
239 """
239 """
240 Initializes the database connection.
240 Initializes the database connection.
241
241
242 It uses the same settings which are used to create the ``pylonsapp`` or
242 It uses the same settings which are used to create the ``pylonsapp`` or
243 ``app`` fixtures.
243 ``app`` fixtures.
244 """
244 """
245 from rhodecode.config.utils import initialize_database
245 from rhodecode.config.utils import initialize_database
246 initialize_database(app_settings)
246 initialize_database(app_settings)
247
247
248
248
249 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
249 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
250
250
251
251
252 def _autologin_user(app, *args):
252 def _autologin_user(app, *args):
253 session = login_user_session(app, *args)
253 session = login_user_session(app, *args)
254 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
254 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
255 return LoginData(csrf_token, session['rhodecode_user'])
255 return LoginData(csrf_token, session['rhodecode_user'])
256
256
257
257
258 @pytest.fixture
258 @pytest.fixture
259 def autologin_user(app):
259 def autologin_user(app):
260 """
260 """
261 Utility fixture which makes sure that the admin user is logged in
261 Utility fixture which makes sure that the admin user is logged in
262 """
262 """
263 return _autologin_user(app)
263 return _autologin_user(app)
264
264
265
265
266 @pytest.fixture
266 @pytest.fixture
267 def autologin_regular_user(app):
267 def autologin_regular_user(app):
268 """
268 """
269 Utility fixture which makes sure that the regular user is logged in
269 Utility fixture which makes sure that the regular user is logged in
270 """
270 """
271 return _autologin_user(
271 return _autologin_user(
272 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
272 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
273
273
274
274
275 @pytest.fixture(scope='function')
275 @pytest.fixture(scope='function')
276 def csrf_token(request, autologin_user):
276 def csrf_token(request, autologin_user):
277 return autologin_user.csrf_token
277 return autologin_user.csrf_token
278
278
279
279
280 @pytest.fixture(scope='function')
280 @pytest.fixture(scope='function')
281 def xhr_header(request):
281 def xhr_header(request):
282 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
282 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
283
283
284
284
285 @pytest.fixture
285 @pytest.fixture
286 def real_crypto_backend(monkeypatch):
286 def real_crypto_backend(monkeypatch):
287 """
287 """
288 Switch the production crypto backend on for this test.
288 Switch the production crypto backend on for this test.
289
289
290 During the test run the crypto backend is replaced with a faster
290 During the test run the crypto backend is replaced with a faster
291 implementation based on the MD5 algorithm.
291 implementation based on the MD5 algorithm.
292 """
292 """
293 monkeypatch.setattr(rhodecode, 'is_test', False)
293 monkeypatch.setattr(rhodecode, 'is_test', False)
294
294
295
295
296 @pytest.fixture(scope='class')
296 @pytest.fixture(scope='class')
297 def index_location(request, pylonsapp):
297 def index_location(request, pylonsapp):
298 index_location = pylonsapp.config['app_conf']['search.location']
298 index_location = pylonsapp.config['app_conf']['search.location']
299 if request.cls:
299 if request.cls:
300 request.cls.index_location = index_location
300 request.cls.index_location = index_location
301 return index_location
301 return index_location
302
302
303
303
304 @pytest.fixture(scope='session', autouse=True)
304 @pytest.fixture(scope='session', autouse=True)
305 def tests_tmp_path(request):
305 def tests_tmp_path(request):
306 """
306 """
307 Create temporary directory to be used during the test session.
307 Create temporary directory to be used during the test session.
308 """
308 """
309 if not os.path.exists(TESTS_TMP_PATH):
309 if not os.path.exists(TESTS_TMP_PATH):
310 os.makedirs(TESTS_TMP_PATH)
310 os.makedirs(TESTS_TMP_PATH)
311
311
312 if not request.config.getoption('--keep-tmp-path'):
312 if not request.config.getoption('--keep-tmp-path'):
313 @request.addfinalizer
313 @request.addfinalizer
314 def remove_tmp_path():
314 def remove_tmp_path():
315 shutil.rmtree(TESTS_TMP_PATH)
315 shutil.rmtree(TESTS_TMP_PATH)
316
316
317 return TESTS_TMP_PATH
317 return TESTS_TMP_PATH
318
318
319
319
320 @pytest.fixture(scope='session', autouse=True)
320 @pytest.fixture(scope='session', autouse=True)
321 def patch_pyro_request_scope_proxy_factory(request):
321 def patch_pyro_request_scope_proxy_factory(request):
322 """
322 """
323 Patch the pyro proxy factory to always use the same dummy request object
323 Patch the pyro proxy factory to always use the same dummy request object
324 when under test. This will return the same pyro proxy on every call.
324 when under test. This will return the same pyro proxy on every call.
325 """
325 """
326 dummy_request = pyramid.testing.DummyRequest()
326 dummy_request = pyramid.testing.DummyRequest()
327
327
328 def mocked_call(self, request=None):
328 def mocked_call(self, request=None):
329 return self.getProxy(request=dummy_request)
329 return self.getProxy(request=dummy_request)
330
330
331 patcher = mock.patch(
331 patcher = mock.patch(
332 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
332 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
333 new=mocked_call)
333 new=mocked_call)
334 patcher.start()
334 patcher.start()
335
335
336 @request.addfinalizer
336 @request.addfinalizer
337 def undo_patching():
337 def undo_patching():
338 patcher.stop()
338 patcher.stop()
339
339
340
340
341 @pytest.fixture
341 @pytest.fixture
342 def test_repo_group(request):
342 def test_repo_group(request):
343 """
343 """
344 Create a temporary repository group, and destroy it after
344 Create a temporary repository group, and destroy it after
345 usage automatically
345 usage automatically
346 """
346 """
347 fixture = Fixture()
347 fixture = Fixture()
348 repogroupid = 'test_repo_group_%s' % int(time.time())
348 repogroupid = 'test_repo_group_%s' % int(time.time())
349 repo_group = fixture.create_repo_group(repogroupid)
349 repo_group = fixture.create_repo_group(repogroupid)
350
350
351 def _cleanup():
351 def _cleanup():
352 fixture.destroy_repo_group(repogroupid)
352 fixture.destroy_repo_group(repogroupid)
353
353
354 request.addfinalizer(_cleanup)
354 request.addfinalizer(_cleanup)
355 return repo_group
355 return repo_group
356
356
357
357
358 @pytest.fixture
358 @pytest.fixture
359 def test_user_group(request):
359 def test_user_group(request):
360 """
360 """
361 Create a temporary user group, and destroy it after
361 Create a temporary user group, and destroy it after
362 usage automatically
362 usage automatically
363 """
363 """
364 fixture = Fixture()
364 fixture = Fixture()
365 usergroupid = 'test_user_group_%s' % int(time.time())
365 usergroupid = 'test_user_group_%s' % int(time.time())
366 user_group = fixture.create_user_group(usergroupid)
366 user_group = fixture.create_user_group(usergroupid)
367
367
368 def _cleanup():
368 def _cleanup():
369 fixture.destroy_user_group(user_group)
369 fixture.destroy_user_group(user_group)
370
370
371 request.addfinalizer(_cleanup)
371 request.addfinalizer(_cleanup)
372 return user_group
372 return user_group
373
373
374
374
375 @pytest.fixture(scope='session')
375 @pytest.fixture(scope='session')
376 def test_repo(request):
376 def test_repo(request):
377 container = TestRepoContainer()
377 container = TestRepoContainer()
378 request.addfinalizer(container._cleanup)
378 request.addfinalizer(container._cleanup)
379 return container
379 return container
380
380
381
381
382 class TestRepoContainer(object):
382 class TestRepoContainer(object):
383 """
383 """
384 Container for test repositories which are used read only.
384 Container for test repositories which are used read only.
385
385
386 Repositories will be created on demand and re-used during the lifetime
386 Repositories will be created on demand and re-used during the lifetime
387 of this object.
387 of this object.
388
388
389 Usage to get the svn test repository "minimal"::
389 Usage to get the svn test repository "minimal"::
390
390
391 test_repo = TestContainer()
391 test_repo = TestContainer()
392 repo = test_repo('minimal', 'svn')
392 repo = test_repo('minimal', 'svn')
393
393
394 """
394 """
395
395
396 dump_extractors = {
396 dump_extractors = {
397 'git': utils.extract_git_repo_from_dump,
397 'git': utils.extract_git_repo_from_dump,
398 'hg': utils.extract_hg_repo_from_dump,
398 'hg': utils.extract_hg_repo_from_dump,
399 'svn': utils.extract_svn_repo_from_dump,
399 'svn': utils.extract_svn_repo_from_dump,
400 }
400 }
401
401
402 def __init__(self):
402 def __init__(self):
403 self._cleanup_repos = []
403 self._cleanup_repos = []
404 self._fixture = Fixture()
404 self._fixture = Fixture()
405 self._repos = {}
405 self._repos = {}
406
406
407 def __call__(self, dump_name, backend_alias):
407 def __call__(self, dump_name, backend_alias):
408 key = (dump_name, backend_alias)
408 key = (dump_name, backend_alias)
409 if key not in self._repos:
409 if key not in self._repos:
410 repo = self._create_repo(dump_name, backend_alias)
410 repo = self._create_repo(dump_name, backend_alias)
411 self._repos[key] = repo.repo_id
411 self._repos[key] = repo.repo_id
412 return Repository.get(self._repos[key])
412 return Repository.get(self._repos[key])
413
413
414 def _create_repo(self, dump_name, backend_alias):
414 def _create_repo(self, dump_name, backend_alias):
415 repo_name = '%s-%s' % (backend_alias, dump_name)
415 repo_name = '%s-%s' % (backend_alias, dump_name)
416 backend_class = get_backend(backend_alias)
416 backend_class = get_backend(backend_alias)
417 dump_extractor = self.dump_extractors[backend_alias]
417 dump_extractor = self.dump_extractors[backend_alias]
418 repo_path = dump_extractor(dump_name, repo_name)
418 repo_path = dump_extractor(dump_name, repo_name)
419 vcs_repo = backend_class(repo_path)
419 vcs_repo = backend_class(repo_path)
420 repo2db_mapper({repo_name: vcs_repo})
420 repo2db_mapper({repo_name: vcs_repo})
421 repo = RepoModel().get_by_repo_name(repo_name)
421 repo = RepoModel().get_by_repo_name(repo_name)
422 self._cleanup_repos.append(repo_name)
422 self._cleanup_repos.append(repo_name)
423 return repo
423 return repo
424
424
425 def _cleanup(self):
425 def _cleanup(self):
426 for repo_name in reversed(self._cleanup_repos):
426 for repo_name in reversed(self._cleanup_repos):
427 self._fixture.destroy_repo(repo_name)
427 self._fixture.destroy_repo(repo_name)
428
428
429
429
430 @pytest.fixture
430 @pytest.fixture
431 def backend(request, backend_alias, pylonsapp, test_repo):
431 def backend(request, backend_alias, pylonsapp, test_repo):
432 """
432 """
433 Parametrized fixture which represents a single backend implementation.
433 Parametrized fixture which represents a single backend implementation.
434
434
435 It respects the option `--backends` to focus the test run on specific
435 It respects the option `--backends` to focus the test run on specific
436 backend implementations.
436 backend implementations.
437
437
438 It also supports `pytest.mark.xfail_backends` to mark tests as failing
438 It also supports `pytest.mark.xfail_backends` to mark tests as failing
439 for specific backends. This is intended as a utility for incremental
439 for specific backends. This is intended as a utility for incremental
440 development of a new backend implementation.
440 development of a new backend implementation.
441 """
441 """
442 if backend_alias not in request.config.getoption('--backends'):
442 if backend_alias not in request.config.getoption('--backends'):
443 pytest.skip("Backend %s not selected." % (backend_alias, ))
443 pytest.skip("Backend %s not selected." % (backend_alias, ))
444
444
445 utils.check_xfail_backends(request.node, backend_alias)
445 utils.check_xfail_backends(request.node, backend_alias)
446 utils.check_skip_backends(request.node, backend_alias)
446 utils.check_skip_backends(request.node, backend_alias)
447
447
448 repo_name = 'vcs_test_%s' % (backend_alias, )
448 repo_name = 'vcs_test_%s' % (backend_alias, )
449 backend = Backend(
449 backend = Backend(
450 alias=backend_alias,
450 alias=backend_alias,
451 repo_name=repo_name,
451 repo_name=repo_name,
452 test_name=request.node.name,
452 test_name=request.node.name,
453 test_repo_container=test_repo)
453 test_repo_container=test_repo)
454 request.addfinalizer(backend.cleanup)
454 request.addfinalizer(backend.cleanup)
455 return backend
455 return backend
456
456
457
457
458 @pytest.fixture
458 @pytest.fixture
459 def backend_git(request, pylonsapp, test_repo):
459 def backend_git(request, pylonsapp, test_repo):
460 return backend(request, 'git', pylonsapp, test_repo)
460 return backend(request, 'git', pylonsapp, test_repo)
461
461
462
462
463 @pytest.fixture
463 @pytest.fixture
464 def backend_hg(request, pylonsapp, test_repo):
464 def backend_hg(request, pylonsapp, test_repo):
465 return backend(request, 'hg', pylonsapp, test_repo)
465 return backend(request, 'hg', pylonsapp, test_repo)
466
466
467
467
468 @pytest.fixture
468 @pytest.fixture
469 def backend_svn(request, pylonsapp, test_repo):
469 def backend_svn(request, pylonsapp, test_repo):
470 return backend(request, 'svn', pylonsapp, test_repo)
470 return backend(request, 'svn', pylonsapp, test_repo)
471
471
472
472
473 @pytest.fixture
473 @pytest.fixture
474 def backend_random(backend_git):
474 def backend_random(backend_git):
475 """
475 """
476 Use this to express that your tests need "a backend.
476 Use this to express that your tests need "a backend.
477
477
478 A few of our tests need a backend, so that we can run the code. This
478 A few of our tests need a backend, so that we can run the code. This
479 fixture is intended to be used for such cases. It will pick one of the
479 fixture is intended to be used for such cases. It will pick one of the
480 backends and run the tests.
480 backends and run the tests.
481
481
482 The fixture `backend` would run the test multiple times for each
482 The fixture `backend` would run the test multiple times for each
483 available backend which is a pure waste of time if the test is
483 available backend which is a pure waste of time if the test is
484 independent of the backend type.
484 independent of the backend type.
485 """
485 """
486 # TODO: johbo: Change this to pick a random backend
486 # TODO: johbo: Change this to pick a random backend
487 return backend_git
487 return backend_git
488
488
489
489
490 @pytest.fixture
490 @pytest.fixture
491 def backend_stub(backend_git):
491 def backend_stub(backend_git):
492 """
492 """
493 Use this to express that your tests need a backend stub
493 Use this to express that your tests need a backend stub
494
494
495 TODO: mikhail: Implement a real stub logic instead of returning
495 TODO: mikhail: Implement a real stub logic instead of returning
496 a git backend
496 a git backend
497 """
497 """
498 return backend_git
498 return backend_git
499
499
500
500
501 @pytest.fixture
501 @pytest.fixture
502 def repo_stub(backend_stub):
502 def repo_stub(backend_stub):
503 """
503 """
504 Use this to express that your tests need a repository stub
504 Use this to express that your tests need a repository stub
505 """
505 """
506 return backend_stub.create_repo()
506 return backend_stub.create_repo()
507
507
508
508
509 class Backend(object):
509 class Backend(object):
510 """
510 """
511 Represents the test configuration for one supported backend
511 Represents the test configuration for one supported backend
512
512
513 Provides easy access to different test repositories based on
513 Provides easy access to different test repositories based on
514 `__getitem__`. Such repositories will only be created once per test
514 `__getitem__`. Such repositories will only be created once per test
515 session.
515 session.
516 """
516 """
517
517
518 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
518 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
519 _master_repo = None
519 _master_repo = None
520 _commit_ids = {}
520 _commit_ids = {}
521
521
522 def __init__(self, alias, repo_name, test_name, test_repo_container):
522 def __init__(self, alias, repo_name, test_name, test_repo_container):
523 self.alias = alias
523 self.alias = alias
524 self.repo_name = repo_name
524 self.repo_name = repo_name
525 self._cleanup_repos = []
525 self._cleanup_repos = []
526 self._test_name = test_name
526 self._test_name = test_name
527 self._test_repo_container = test_repo_container
527 self._test_repo_container = test_repo_container
528 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
528 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
529 # Fixture will survive in the end.
529 # Fixture will survive in the end.
530 self._fixture = Fixture()
530 self._fixture = Fixture()
531
531
532 def __getitem__(self, key):
532 def __getitem__(self, key):
533 return self._test_repo_container(key, self.alias)
533 return self._test_repo_container(key, self.alias)
534
534
535 @property
535 @property
536 def repo(self):
536 def repo(self):
537 """
537 """
538 Returns the "current" repository. This is the vcs_test repo or the
538 Returns the "current" repository. This is the vcs_test repo or the
539 last repo which has been created with `create_repo`.
539 last repo which has been created with `create_repo`.
540 """
540 """
541 from rhodecode.model.db import Repository
541 from rhodecode.model.db import Repository
542 return Repository.get_by_repo_name(self.repo_name)
542 return Repository.get_by_repo_name(self.repo_name)
543
543
544 @property
544 @property
545 def default_branch_name(self):
545 def default_branch_name(self):
546 VcsRepository = get_backend(self.alias)
546 VcsRepository = get_backend(self.alias)
547 return VcsRepository.DEFAULT_BRANCH_NAME
547 return VcsRepository.DEFAULT_BRANCH_NAME
548
548
549 @property
549 @property
550 def default_head_id(self):
550 def default_head_id(self):
551 """
551 """
552 Returns the default head id of the underlying backend.
552 Returns the default head id of the underlying backend.
553
553
554 This will be the default branch name in case the backend does have a
554 This will be the default branch name in case the backend does have a
555 default branch. In the other cases it will point to a valid head
555 default branch. In the other cases it will point to a valid head
556 which can serve as the base to create a new commit on top of it.
556 which can serve as the base to create a new commit on top of it.
557 """
557 """
558 vcsrepo = self.repo.scm_instance()
558 vcsrepo = self.repo.scm_instance()
559 head_id = (
559 head_id = (
560 vcsrepo.DEFAULT_BRANCH_NAME or
560 vcsrepo.DEFAULT_BRANCH_NAME or
561 vcsrepo.commit_ids[-1])
561 vcsrepo.commit_ids[-1])
562 return head_id
562 return head_id
563
563
564 @property
564 @property
565 def commit_ids(self):
565 def commit_ids(self):
566 """
566 """
567 Returns the list of commits for the last created repository
567 Returns the list of commits for the last created repository
568 """
568 """
569 return self._commit_ids
569 return self._commit_ids
570
570
571 def create_master_repo(self, commits):
571 def create_master_repo(self, commits):
572 """
572 """
573 Create a repository and remember it as a template.
573 Create a repository and remember it as a template.
574
574
575 This allows to easily create derived repositories to construct
575 This allows to easily create derived repositories to construct
576 more complex scenarios for diff, compare and pull requests.
576 more complex scenarios for diff, compare and pull requests.
577
577
578 Returns a commit map which maps from commit message to raw_id.
578 Returns a commit map which maps from commit message to raw_id.
579 """
579 """
580 self._master_repo = self.create_repo(commits=commits)
580 self._master_repo = self.create_repo(commits=commits)
581 return self._commit_ids
581 return self._commit_ids
582
582
583 def create_repo(
583 def create_repo(
584 self, commits=None, number_of_commits=0, heads=None,
584 self, commits=None, number_of_commits=0, heads=None,
585 name_suffix=u'', **kwargs):
585 name_suffix=u'', **kwargs):
586 """
586 """
587 Create a repository and record it for later cleanup.
587 Create a repository and record it for later cleanup.
588
588
589 :param commits: Optional. A sequence of dict instances.
589 :param commits: Optional. A sequence of dict instances.
590 Will add a commit per entry to the new repository.
590 Will add a commit per entry to the new repository.
591 :param number_of_commits: Optional. If set to a number, this number of
591 :param number_of_commits: Optional. If set to a number, this number of
592 commits will be added to the new repository.
592 commits will be added to the new repository.
593 :param heads: Optional. Can be set to a sequence of of commit
593 :param heads: Optional. Can be set to a sequence of of commit
594 names which shall be pulled in from the master repository.
594 names which shall be pulled in from the master repository.
595
595
596 """
596 """
597 self.repo_name = self._next_repo_name() + name_suffix
597 self.repo_name = self._next_repo_name() + name_suffix
598 repo = self._fixture.create_repo(
598 repo = self._fixture.create_repo(
599 self.repo_name, repo_type=self.alias, **kwargs)
599 self.repo_name, repo_type=self.alias, **kwargs)
600 self._cleanup_repos.append(repo.repo_name)
600 self._cleanup_repos.append(repo.repo_name)
601
601
602 commits = commits or [
602 commits = commits or [
603 {'message': 'Commit %s of %s' % (x, self.repo_name)}
603 {'message': 'Commit %s of %s' % (x, self.repo_name)}
604 for x in xrange(number_of_commits)]
604 for x in xrange(number_of_commits)]
605 self._add_commits_to_repo(repo.scm_instance(), commits)
605 self._add_commits_to_repo(repo.scm_instance(), commits)
606 if heads:
606 if heads:
607 self.pull_heads(repo, heads)
607 self.pull_heads(repo, heads)
608
608
609 return repo
609 return repo
610
610
611 def pull_heads(self, repo, heads):
611 def pull_heads(self, repo, heads):
612 """
612 """
613 Make sure that repo contains all commits mentioned in `heads`
613 Make sure that repo contains all commits mentioned in `heads`
614 """
614 """
615 vcsmaster = self._master_repo.scm_instance()
615 vcsmaster = self._master_repo.scm_instance()
616 vcsrepo = repo.scm_instance()
616 vcsrepo = repo.scm_instance()
617 vcsrepo.config.clear_section('hooks')
617 vcsrepo.config.clear_section('hooks')
618 commit_ids = [self._commit_ids[h] for h in heads]
618 commit_ids = [self._commit_ids[h] for h in heads]
619 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
619 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
620
620
621 def create_fork(self):
621 def create_fork(self):
622 repo_to_fork = self.repo_name
622 repo_to_fork = self.repo_name
623 self.repo_name = self._next_repo_name()
623 self.repo_name = self._next_repo_name()
624 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
624 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
625 self._cleanup_repos.append(self.repo_name)
625 self._cleanup_repos.append(self.repo_name)
626 return repo
626 return repo
627
627
628 def new_repo_name(self, suffix=u''):
628 def new_repo_name(self, suffix=u''):
629 self.repo_name = self._next_repo_name() + suffix
629 self.repo_name = self._next_repo_name() + suffix
630 self._cleanup_repos.append(self.repo_name)
630 self._cleanup_repos.append(self.repo_name)
631 return self.repo_name
631 return self.repo_name
632
632
633 def _next_repo_name(self):
633 def _next_repo_name(self):
634 return u"%s_%s" % (
634 return u"%s_%s" % (
635 self.invalid_repo_name.sub(u'_', self._test_name),
635 self.invalid_repo_name.sub(u'_', self._test_name),
636 len(self._cleanup_repos))
636 len(self._cleanup_repos))
637
637
638 def ensure_file(self, filename, content='Test content\n'):
638 def ensure_file(self, filename, content='Test content\n'):
639 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
639 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
640 commits = [
640 commits = [
641 {'added': [
641 {'added': [
642 FileNode(filename, content=content),
642 FileNode(filename, content=content),
643 ]},
643 ]},
644 ]
644 ]
645 self._add_commits_to_repo(self.repo.scm_instance(), commits)
645 self._add_commits_to_repo(self.repo.scm_instance(), commits)
646
646
647 def enable_downloads(self):
647 def enable_downloads(self):
648 repo = self.repo
648 repo = self.repo
649 repo.enable_downloads = True
649 repo.enable_downloads = True
650 Session().add(repo)
650 Session().add(repo)
651 Session().commit()
651 Session().commit()
652
652
653 def cleanup(self):
653 def cleanup(self):
654 for repo_name in reversed(self._cleanup_repos):
654 for repo_name in reversed(self._cleanup_repos):
655 self._fixture.destroy_repo(repo_name)
655 self._fixture.destroy_repo(repo_name)
656
656
657 def _add_commits_to_repo(self, repo, commits):
657 def _add_commits_to_repo(self, repo, commits):
658 commit_ids = _add_commits_to_repo(repo, commits)
658 commit_ids = _add_commits_to_repo(repo, commits)
659 if not commit_ids:
659 if not commit_ids:
660 return
660 return
661 self._commit_ids = commit_ids
661 self._commit_ids = commit_ids
662
662
663 # Creating refs for Git to allow fetching them from remote repository
663 # Creating refs for Git to allow fetching them from remote repository
664 if self.alias == 'git':
664 if self.alias == 'git':
665 refs = {}
665 refs = {}
666 for message in self._commit_ids:
666 for message in self._commit_ids:
667 # TODO: mikhail: do more special chars replacements
667 # TODO: mikhail: do more special chars replacements
668 ref_name = 'refs/test-refs/{}'.format(
668 ref_name = 'refs/test-refs/{}'.format(
669 message.replace(' ', ''))
669 message.replace(' ', ''))
670 refs[ref_name] = self._commit_ids[message]
670 refs[ref_name] = self._commit_ids[message]
671 self._create_refs(repo, refs)
671 self._create_refs(repo, refs)
672
672
673 def _create_refs(self, repo, refs):
673 def _create_refs(self, repo, refs):
674 for ref_name in refs:
674 for ref_name in refs:
675 repo.set_refs(ref_name, refs[ref_name])
675 repo.set_refs(ref_name, refs[ref_name])
676
676
677
677
678 @pytest.fixture
678 @pytest.fixture
679 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
679 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
680 """
680 """
681 Parametrized fixture which represents a single vcs backend implementation.
681 Parametrized fixture which represents a single vcs backend implementation.
682
682
683 See the fixture `backend` for more details. This one implements the same
683 See the fixture `backend` for more details. This one implements the same
684 concept, but on vcs level. So it does not provide model instances etc.
684 concept, but on vcs level. So it does not provide model instances etc.
685
685
686 Parameters are generated dynamically, see :func:`pytest_generate_tests`
686 Parameters are generated dynamically, see :func:`pytest_generate_tests`
687 for how this works.
687 for how this works.
688 """
688 """
689 if backend_alias not in request.config.getoption('--backends'):
689 if backend_alias not in request.config.getoption('--backends'):
690 pytest.skip("Backend %s not selected." % (backend_alias, ))
690 pytest.skip("Backend %s not selected." % (backend_alias, ))
691
691
692 utils.check_xfail_backends(request.node, backend_alias)
692 utils.check_xfail_backends(request.node, backend_alias)
693 utils.check_skip_backends(request.node, backend_alias)
693 utils.check_skip_backends(request.node, backend_alias)
694
694
695 repo_name = 'vcs_test_%s' % (backend_alias, )
695 repo_name = 'vcs_test_%s' % (backend_alias, )
696 repo_path = os.path.join(tests_tmp_path, repo_name)
696 repo_path = os.path.join(tests_tmp_path, repo_name)
697 backend = VcsBackend(
697 backend = VcsBackend(
698 alias=backend_alias,
698 alias=backend_alias,
699 repo_path=repo_path,
699 repo_path=repo_path,
700 test_name=request.node.name,
700 test_name=request.node.name,
701 test_repo_container=test_repo)
701 test_repo_container=test_repo)
702 request.addfinalizer(backend.cleanup)
702 request.addfinalizer(backend.cleanup)
703 return backend
703 return backend
704
704
705
705
706 @pytest.fixture
706 @pytest.fixture
707 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
707 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
708 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
708 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
709
709
710
710
711 @pytest.fixture
711 @pytest.fixture
712 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
712 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
713 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
713 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
714
714
715
715
716 @pytest.fixture
716 @pytest.fixture
717 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
717 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
718 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
718 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
719
719
720
720
721 @pytest.fixture
721 @pytest.fixture
722 def vcsbackend_random(vcsbackend_git):
722 def vcsbackend_random(vcsbackend_git):
723 """
723 """
724 Use this to express that your tests need "a vcsbackend".
724 Use this to express that your tests need "a vcsbackend".
725
725
726 The fixture `vcsbackend` would run the test multiple times for each
726 The fixture `vcsbackend` would run the test multiple times for each
727 available vcs backend which is a pure waste of time if the test is
727 available vcs backend which is a pure waste of time if the test is
728 independent of the vcs backend type.
728 independent of the vcs backend type.
729 """
729 """
730 # TODO: johbo: Change this to pick a random backend
730 # TODO: johbo: Change this to pick a random backend
731 return vcsbackend_git
731 return vcsbackend_git
732
732
733
733
734 @pytest.fixture
734 @pytest.fixture
735 def vcsbackend_stub(vcsbackend_git):
735 def vcsbackend_stub(vcsbackend_git):
736 """
736 """
737 Use this to express that your test just needs a stub of a vcsbackend.
737 Use this to express that your test just needs a stub of a vcsbackend.
738
738
739 Plan is to eventually implement an in-memory stub to speed tests up.
739 Plan is to eventually implement an in-memory stub to speed tests up.
740 """
740 """
741 return vcsbackend_git
741 return vcsbackend_git
742
742
743
743
744 class VcsBackend(object):
744 class VcsBackend(object):
745 """
745 """
746 Represents the test configuration for one supported vcs backend.
746 Represents the test configuration for one supported vcs backend.
747 """
747 """
748
748
749 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
749 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
750
750
751 def __init__(self, alias, repo_path, test_name, test_repo_container):
751 def __init__(self, alias, repo_path, test_name, test_repo_container):
752 self.alias = alias
752 self.alias = alias
753 self._repo_path = repo_path
753 self._repo_path = repo_path
754 self._cleanup_repos = []
754 self._cleanup_repos = []
755 self._test_name = test_name
755 self._test_name = test_name
756 self._test_repo_container = test_repo_container
756 self._test_repo_container = test_repo_container
757
757
758 def __getitem__(self, key):
758 def __getitem__(self, key):
759 return self._test_repo_container(key, self.alias).scm_instance()
759 return self._test_repo_container(key, self.alias).scm_instance()
760
760
761 @property
761 @property
762 def repo(self):
762 def repo(self):
763 """
763 """
764 Returns the "current" repository. This is the vcs_test repo of the last
764 Returns the "current" repository. This is the vcs_test repo of the last
765 repo which has been created.
765 repo which has been created.
766 """
766 """
767 Repository = get_backend(self.alias)
767 Repository = get_backend(self.alias)
768 return Repository(self._repo_path)
768 return Repository(self._repo_path)
769
769
770 @property
770 @property
771 def backend(self):
771 def backend(self):
772 """
772 """
773 Returns the backend implementation class.
773 Returns the backend implementation class.
774 """
774 """
775 return get_backend(self.alias)
775 return get_backend(self.alias)
776
776
777 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
777 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
778 repo_name = self._next_repo_name()
778 repo_name = self._next_repo_name()
779 self._repo_path = get_new_dir(repo_name)
779 self._repo_path = get_new_dir(repo_name)
780 repo_class = get_backend(self.alias)
780 repo_class = get_backend(self.alias)
781 src_url = None
781 src_url = None
782 if _clone_repo:
782 if _clone_repo:
783 src_url = _clone_repo.path
783 src_url = _clone_repo.path
784 repo = repo_class(self._repo_path, create=True, src_url=src_url)
784 repo = repo_class(self._repo_path, create=True, src_url=src_url)
785 self._cleanup_repos.append(repo)
785 self._cleanup_repos.append(repo)
786
786
787 commits = commits or [
787 commits = commits or [
788 {'message': 'Commit %s of %s' % (x, repo_name)}
788 {'message': 'Commit %s of %s' % (x, repo_name)}
789 for x in xrange(number_of_commits)]
789 for x in xrange(number_of_commits)]
790 _add_commits_to_repo(repo, commits)
790 _add_commits_to_repo(repo, commits)
791 return repo
791 return repo
792
792
793 def clone_repo(self, repo):
793 def clone_repo(self, repo):
794 return self.create_repo(_clone_repo=repo)
794 return self.create_repo(_clone_repo=repo)
795
795
796 def cleanup(self):
796 def cleanup(self):
797 for repo in self._cleanup_repos:
797 for repo in self._cleanup_repos:
798 shutil.rmtree(repo.path)
798 shutil.rmtree(repo.path)
799
799
800 def new_repo_path(self):
800 def new_repo_path(self):
801 repo_name = self._next_repo_name()
801 repo_name = self._next_repo_name()
802 self._repo_path = get_new_dir(repo_name)
802 self._repo_path = get_new_dir(repo_name)
803 return self._repo_path
803 return self._repo_path
804
804
805 def _next_repo_name(self):
805 def _next_repo_name(self):
806 return "%s_%s" % (
806 return "%s_%s" % (
807 self.invalid_repo_name.sub('_', self._test_name),
807 self.invalid_repo_name.sub('_', self._test_name),
808 len(self._cleanup_repos))
808 len(self._cleanup_repos))
809
809
810 def add_file(self, repo, filename, content='Test content\n'):
810 def add_file(self, repo, filename, content='Test content\n'):
811 imc = repo.in_memory_commit
811 imc = repo.in_memory_commit
812 imc.add(FileNode(filename, content=content))
812 imc.add(FileNode(filename, content=content))
813 imc.commit(
813 imc.commit(
814 message=u'Automatic commit from vcsbackend fixture',
814 message=u'Automatic commit from vcsbackend fixture',
815 author=u'Automatic')
815 author=u'Automatic')
816
816
817 def ensure_file(self, filename, content='Test content\n'):
817 def ensure_file(self, filename, content='Test content\n'):
818 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
818 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
819 self.add_file(self.repo, filename, content)
819 self.add_file(self.repo, filename, content)
820
820
821
821
822 def _add_commits_to_repo(vcs_repo, commits):
822 def _add_commits_to_repo(vcs_repo, commits):
823 commit_ids = {}
823 commit_ids = {}
824 if not commits:
824 if not commits:
825 return commit_ids
825 return commit_ids
826
826
827 imc = vcs_repo.in_memory_commit
827 imc = vcs_repo.in_memory_commit
828 commit = None
828 commit = None
829
829
830 for idx, commit in enumerate(commits):
830 for idx, commit in enumerate(commits):
831 message = unicode(commit.get('message', 'Commit %s' % idx))
831 message = unicode(commit.get('message', 'Commit %s' % idx))
832
832
833 for node in commit.get('added', []):
833 for node in commit.get('added', []):
834 imc.add(FileNode(node.path, content=node.content))
834 imc.add(FileNode(node.path, content=node.content))
835 for node in commit.get('changed', []):
835 for node in commit.get('changed', []):
836 imc.change(FileNode(node.path, content=node.content))
836 imc.change(FileNode(node.path, content=node.content))
837 for node in commit.get('removed', []):
837 for node in commit.get('removed', []):
838 imc.remove(FileNode(node.path))
838 imc.remove(FileNode(node.path))
839
839
840 parents = [
840 parents = [
841 vcs_repo.get_commit(commit_id=commit_ids[p])
841 vcs_repo.get_commit(commit_id=commit_ids[p])
842 for p in commit.get('parents', [])]
842 for p in commit.get('parents', [])]
843
843
844 operations = ('added', 'changed', 'removed')
844 operations = ('added', 'changed', 'removed')
845 if not any((commit.get(o) for o in operations)):
845 if not any((commit.get(o) for o in operations)):
846 imc.add(FileNode('file_%s' % idx, content=message))
846 imc.add(FileNode('file_%s' % idx, content=message))
847
847
848 commit = imc.commit(
848 commit = imc.commit(
849 message=message,
849 message=message,
850 author=unicode(commit.get('author', 'Automatic')),
850 author=unicode(commit.get('author', 'Automatic')),
851 date=commit.get('date'),
851 date=commit.get('date'),
852 branch=commit.get('branch'),
852 branch=commit.get('branch'),
853 parents=parents)
853 parents=parents)
854
854
855 commit_ids[commit.message] = commit.raw_id
855 commit_ids[commit.message] = commit.raw_id
856
856
857 return commit_ids
857 return commit_ids
858
858
859
859
860 @pytest.fixture
860 @pytest.fixture
861 def reposerver(request):
861 def reposerver(request):
862 """
862 """
863 Allows to serve a backend repository
863 Allows to serve a backend repository
864 """
864 """
865
865
866 repo_server = RepoServer()
866 repo_server = RepoServer()
867 request.addfinalizer(repo_server.cleanup)
867 request.addfinalizer(repo_server.cleanup)
868 return repo_server
868 return repo_server
869
869
870
870
871 class RepoServer(object):
871 class RepoServer(object):
872 """
872 """
873 Utility to serve a local repository for the duration of a test case.
873 Utility to serve a local repository for the duration of a test case.
874
874
875 Supports only Subversion so far.
875 Supports only Subversion so far.
876 """
876 """
877
877
878 url = None
878 url = None
879
879
880 def __init__(self):
880 def __init__(self):
881 self._cleanup_servers = []
881 self._cleanup_servers = []
882
882
883 def serve(self, vcsrepo):
883 def serve(self, vcsrepo):
884 if vcsrepo.alias != 'svn':
884 if vcsrepo.alias != 'svn':
885 raise TypeError("Backend %s not supported" % vcsrepo.alias)
885 raise TypeError("Backend %s not supported" % vcsrepo.alias)
886
886
887 proc = subprocess.Popen(
887 proc = subprocess32.Popen(
888 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
888 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
889 '--root', vcsrepo.path])
889 '--root', vcsrepo.path])
890 self._cleanup_servers.append(proc)
890 self._cleanup_servers.append(proc)
891 self.url = 'svn://localhost'
891 self.url = 'svn://localhost'
892
892
893 def cleanup(self):
893 def cleanup(self):
894 for proc in self._cleanup_servers:
894 for proc in self._cleanup_servers:
895 proc.terminate()
895 proc.terminate()
896
896
897
897
898 @pytest.fixture
898 @pytest.fixture
899 def pr_util(backend, request):
899 def pr_util(backend, request):
900 """
900 """
901 Utility for tests of models and for functional tests around pull requests.
901 Utility for tests of models and for functional tests around pull requests.
902
902
903 It gives an instance of :class:`PRTestUtility` which provides various
903 It gives an instance of :class:`PRTestUtility` which provides various
904 utility methods around one pull request.
904 utility methods around one pull request.
905
905
906 This fixture uses `backend` and inherits its parameterization.
906 This fixture uses `backend` and inherits its parameterization.
907 """
907 """
908
908
909 util = PRTestUtility(backend)
909 util = PRTestUtility(backend)
910
910
911 @request.addfinalizer
911 @request.addfinalizer
912 def cleanup():
912 def cleanup():
913 util.cleanup()
913 util.cleanup()
914
914
915 return util
915 return util
916
916
917
917
918 class PRTestUtility(object):
918 class PRTestUtility(object):
919
919
920 pull_request = None
920 pull_request = None
921 pull_request_id = None
921 pull_request_id = None
922 mergeable_patcher = None
922 mergeable_patcher = None
923 mergeable_mock = None
923 mergeable_mock = None
924 notification_patcher = None
924 notification_patcher = None
925
925
926 def __init__(self, backend):
926 def __init__(self, backend):
927 self.backend = backend
927 self.backend = backend
928
928
929 def create_pull_request(
929 def create_pull_request(
930 self, commits=None, target_head=None, source_head=None,
930 self, commits=None, target_head=None, source_head=None,
931 revisions=None, approved=False, author=None, mergeable=False,
931 revisions=None, approved=False, author=None, mergeable=False,
932 enable_notifications=True, name_suffix=u'', reviewers=None,
932 enable_notifications=True, name_suffix=u'', reviewers=None,
933 title=u"Test", description=u"Description"):
933 title=u"Test", description=u"Description"):
934 self.set_mergeable(mergeable)
934 self.set_mergeable(mergeable)
935 if not enable_notifications:
935 if not enable_notifications:
936 # mock notification side effect
936 # mock notification side effect
937 self.notification_patcher = mock.patch(
937 self.notification_patcher = mock.patch(
938 'rhodecode.model.notification.NotificationModel.create')
938 'rhodecode.model.notification.NotificationModel.create')
939 self.notification_patcher.start()
939 self.notification_patcher.start()
940
940
941 if not self.pull_request:
941 if not self.pull_request:
942 if not commits:
942 if not commits:
943 commits = [
943 commits = [
944 {'message': 'c1'},
944 {'message': 'c1'},
945 {'message': 'c2'},
945 {'message': 'c2'},
946 {'message': 'c3'},
946 {'message': 'c3'},
947 ]
947 ]
948 target_head = 'c1'
948 target_head = 'c1'
949 source_head = 'c2'
949 source_head = 'c2'
950 revisions = ['c2']
950 revisions = ['c2']
951
951
952 self.commit_ids = self.backend.create_master_repo(commits)
952 self.commit_ids = self.backend.create_master_repo(commits)
953 self.target_repository = self.backend.create_repo(
953 self.target_repository = self.backend.create_repo(
954 heads=[target_head], name_suffix=name_suffix)
954 heads=[target_head], name_suffix=name_suffix)
955 self.source_repository = self.backend.create_repo(
955 self.source_repository = self.backend.create_repo(
956 heads=[source_head], name_suffix=name_suffix)
956 heads=[source_head], name_suffix=name_suffix)
957 self.author = author or UserModel().get_by_username(
957 self.author = author or UserModel().get_by_username(
958 TEST_USER_ADMIN_LOGIN)
958 TEST_USER_ADMIN_LOGIN)
959
959
960 model = PullRequestModel()
960 model = PullRequestModel()
961 self.create_parameters = {
961 self.create_parameters = {
962 'created_by': self.author,
962 'created_by': self.author,
963 'source_repo': self.source_repository.repo_name,
963 'source_repo': self.source_repository.repo_name,
964 'source_ref': self._default_branch_reference(source_head),
964 'source_ref': self._default_branch_reference(source_head),
965 'target_repo': self.target_repository.repo_name,
965 'target_repo': self.target_repository.repo_name,
966 'target_ref': self._default_branch_reference(target_head),
966 'target_ref': self._default_branch_reference(target_head),
967 'revisions': [self.commit_ids[r] for r in revisions],
967 'revisions': [self.commit_ids[r] for r in revisions],
968 'reviewers': reviewers or self._get_reviewers(),
968 'reviewers': reviewers or self._get_reviewers(),
969 'title': title,
969 'title': title,
970 'description': description,
970 'description': description,
971 }
971 }
972 self.pull_request = model.create(**self.create_parameters)
972 self.pull_request = model.create(**self.create_parameters)
973 assert model.get_versions(self.pull_request) == []
973 assert model.get_versions(self.pull_request) == []
974
974
975 self.pull_request_id = self.pull_request.pull_request_id
975 self.pull_request_id = self.pull_request.pull_request_id
976
976
977 if approved:
977 if approved:
978 self.approve()
978 self.approve()
979
979
980 Session().add(self.pull_request)
980 Session().add(self.pull_request)
981 Session().commit()
981 Session().commit()
982
982
983 return self.pull_request
983 return self.pull_request
984
984
985 def approve(self):
985 def approve(self):
986 self.create_status_votes(
986 self.create_status_votes(
987 ChangesetStatus.STATUS_APPROVED,
987 ChangesetStatus.STATUS_APPROVED,
988 *self.pull_request.reviewers)
988 *self.pull_request.reviewers)
989
989
990 def close(self):
990 def close(self):
991 PullRequestModel().close_pull_request(self.pull_request, self.author)
991 PullRequestModel().close_pull_request(self.pull_request, self.author)
992
992
993 def _default_branch_reference(self, commit_message):
993 def _default_branch_reference(self, commit_message):
994 reference = '%s:%s:%s' % (
994 reference = '%s:%s:%s' % (
995 'branch',
995 'branch',
996 self.backend.default_branch_name,
996 self.backend.default_branch_name,
997 self.commit_ids[commit_message])
997 self.commit_ids[commit_message])
998 return reference
998 return reference
999
999
1000 def _get_reviewers(self):
1000 def _get_reviewers(self):
1001 model = UserModel()
1001 model = UserModel()
1002 return [
1002 return [
1003 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1003 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1004 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1004 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1005 ]
1005 ]
1006
1006
1007 def update_source_repository(self, head=None):
1007 def update_source_repository(self, head=None):
1008 heads = [head or 'c3']
1008 heads = [head or 'c3']
1009 self.backend.pull_heads(self.source_repository, heads=heads)
1009 self.backend.pull_heads(self.source_repository, heads=heads)
1010
1010
1011 def add_one_commit(self, head=None):
1011 def add_one_commit(self, head=None):
1012 self.update_source_repository(head=head)
1012 self.update_source_repository(head=head)
1013 old_commit_ids = set(self.pull_request.revisions)
1013 old_commit_ids = set(self.pull_request.revisions)
1014 PullRequestModel().update_commits(self.pull_request)
1014 PullRequestModel().update_commits(self.pull_request)
1015 commit_ids = set(self.pull_request.revisions)
1015 commit_ids = set(self.pull_request.revisions)
1016 new_commit_ids = commit_ids - old_commit_ids
1016 new_commit_ids = commit_ids - old_commit_ids
1017 assert len(new_commit_ids) == 1
1017 assert len(new_commit_ids) == 1
1018 return new_commit_ids.pop()
1018 return new_commit_ids.pop()
1019
1019
1020 def remove_one_commit(self):
1020 def remove_one_commit(self):
1021 assert len(self.pull_request.revisions) == 2
1021 assert len(self.pull_request.revisions) == 2
1022 source_vcs = self.source_repository.scm_instance()
1022 source_vcs = self.source_repository.scm_instance()
1023 removed_commit_id = source_vcs.commit_ids[-1]
1023 removed_commit_id = source_vcs.commit_ids[-1]
1024
1024
1025 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1025 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1026 # remove the if once that's sorted out.
1026 # remove the if once that's sorted out.
1027 if self.backend.alias == "git":
1027 if self.backend.alias == "git":
1028 kwargs = {'branch_name': self.backend.default_branch_name}
1028 kwargs = {'branch_name': self.backend.default_branch_name}
1029 else:
1029 else:
1030 kwargs = {}
1030 kwargs = {}
1031 source_vcs.strip(removed_commit_id, **kwargs)
1031 source_vcs.strip(removed_commit_id, **kwargs)
1032
1032
1033 PullRequestModel().update_commits(self.pull_request)
1033 PullRequestModel().update_commits(self.pull_request)
1034 assert len(self.pull_request.revisions) == 1
1034 assert len(self.pull_request.revisions) == 1
1035 return removed_commit_id
1035 return removed_commit_id
1036
1036
1037 def create_comment(self, linked_to=None):
1037 def create_comment(self, linked_to=None):
1038 comment = ChangesetCommentsModel().create(
1038 comment = ChangesetCommentsModel().create(
1039 text=u"Test comment",
1039 text=u"Test comment",
1040 repo=self.target_repository.repo_name,
1040 repo=self.target_repository.repo_name,
1041 user=self.author,
1041 user=self.author,
1042 pull_request=self.pull_request)
1042 pull_request=self.pull_request)
1043 assert comment.pull_request_version_id is None
1043 assert comment.pull_request_version_id is None
1044
1044
1045 if linked_to:
1045 if linked_to:
1046 PullRequestModel()._link_comments_to_version(linked_to)
1046 PullRequestModel()._link_comments_to_version(linked_to)
1047
1047
1048 return comment
1048 return comment
1049
1049
1050 def create_inline_comment(
1050 def create_inline_comment(
1051 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1051 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1052 comment = ChangesetCommentsModel().create(
1052 comment = ChangesetCommentsModel().create(
1053 text=u"Test comment",
1053 text=u"Test comment",
1054 repo=self.target_repository.repo_name,
1054 repo=self.target_repository.repo_name,
1055 user=self.author,
1055 user=self.author,
1056 line_no=line_no,
1056 line_no=line_no,
1057 f_path=file_path,
1057 f_path=file_path,
1058 pull_request=self.pull_request)
1058 pull_request=self.pull_request)
1059 assert comment.pull_request_version_id is None
1059 assert comment.pull_request_version_id is None
1060
1060
1061 if linked_to:
1061 if linked_to:
1062 PullRequestModel()._link_comments_to_version(linked_to)
1062 PullRequestModel()._link_comments_to_version(linked_to)
1063
1063
1064 return comment
1064 return comment
1065
1065
1066 def create_version_of_pull_request(self):
1066 def create_version_of_pull_request(self):
1067 pull_request = self.create_pull_request()
1067 pull_request = self.create_pull_request()
1068 version = PullRequestModel()._create_version_from_snapshot(
1068 version = PullRequestModel()._create_version_from_snapshot(
1069 pull_request)
1069 pull_request)
1070 return version
1070 return version
1071
1071
1072 def create_status_votes(self, status, *reviewers):
1072 def create_status_votes(self, status, *reviewers):
1073 for reviewer in reviewers:
1073 for reviewer in reviewers:
1074 ChangesetStatusModel().set_status(
1074 ChangesetStatusModel().set_status(
1075 repo=self.pull_request.target_repo,
1075 repo=self.pull_request.target_repo,
1076 status=status,
1076 status=status,
1077 user=reviewer.user_id,
1077 user=reviewer.user_id,
1078 pull_request=self.pull_request)
1078 pull_request=self.pull_request)
1079
1079
1080 def set_mergeable(self, value):
1080 def set_mergeable(self, value):
1081 if not self.mergeable_patcher:
1081 if not self.mergeable_patcher:
1082 self.mergeable_patcher = mock.patch.object(
1082 self.mergeable_patcher = mock.patch.object(
1083 VcsSettingsModel, 'get_general_settings')
1083 VcsSettingsModel, 'get_general_settings')
1084 self.mergeable_mock = self.mergeable_patcher.start()
1084 self.mergeable_mock = self.mergeable_patcher.start()
1085 self.mergeable_mock.return_value = {
1085 self.mergeable_mock.return_value = {
1086 'rhodecode_pr_merge_enabled': value}
1086 'rhodecode_pr_merge_enabled': value}
1087
1087
1088 def cleanup(self):
1088 def cleanup(self):
1089 # In case the source repository is already cleaned up, the pull
1089 # In case the source repository is already cleaned up, the pull
1090 # request will already be deleted.
1090 # request will already be deleted.
1091 pull_request = PullRequest().get(self.pull_request_id)
1091 pull_request = PullRequest().get(self.pull_request_id)
1092 if pull_request:
1092 if pull_request:
1093 PullRequestModel().delete(pull_request)
1093 PullRequestModel().delete(pull_request)
1094 Session().commit()
1094 Session().commit()
1095
1095
1096 if self.notification_patcher:
1096 if self.notification_patcher:
1097 self.notification_patcher.stop()
1097 self.notification_patcher.stop()
1098
1098
1099 if self.mergeable_patcher:
1099 if self.mergeable_patcher:
1100 self.mergeable_patcher.stop()
1100 self.mergeable_patcher.stop()
1101
1101
1102
1102
1103 @pytest.fixture
1103 @pytest.fixture
1104 def user_admin(pylonsapp):
1104 def user_admin(pylonsapp):
1105 """
1105 """
1106 Provides the default admin test user as an instance of `db.User`.
1106 Provides the default admin test user as an instance of `db.User`.
1107 """
1107 """
1108 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1108 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1109 return user
1109 return user
1110
1110
1111
1111
1112 @pytest.fixture
1112 @pytest.fixture
1113 def user_regular(pylonsapp):
1113 def user_regular(pylonsapp):
1114 """
1114 """
1115 Provides the default regular test user as an instance of `db.User`.
1115 Provides the default regular test user as an instance of `db.User`.
1116 """
1116 """
1117 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1117 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1118 return user
1118 return user
1119
1119
1120
1120
1121 @pytest.fixture
1121 @pytest.fixture
1122 def user_util(request, pylonsapp):
1122 def user_util(request, pylonsapp):
1123 """
1123 """
1124 Provides a wired instance of `UserUtility` with integrated cleanup.
1124 Provides a wired instance of `UserUtility` with integrated cleanup.
1125 """
1125 """
1126 utility = UserUtility(test_name=request.node.name)
1126 utility = UserUtility(test_name=request.node.name)
1127 request.addfinalizer(utility.cleanup)
1127 request.addfinalizer(utility.cleanup)
1128 return utility
1128 return utility
1129
1129
1130
1130
1131 # TODO: johbo: Split this up into utilities per domain or something similar
1131 # TODO: johbo: Split this up into utilities per domain or something similar
1132 class UserUtility(object):
1132 class UserUtility(object):
1133
1133
1134 def __init__(self, test_name="test"):
1134 def __init__(self, test_name="test"):
1135 self._test_name = test_name
1135 self._test_name = test_name
1136 self.fixture = Fixture()
1136 self.fixture = Fixture()
1137 self.repo_group_ids = []
1137 self.repo_group_ids = []
1138 self.user_ids = []
1138 self.user_ids = []
1139 self.user_group_ids = []
1139 self.user_group_ids = []
1140 self.user_repo_permission_ids = []
1140 self.user_repo_permission_ids = []
1141 self.user_group_repo_permission_ids = []
1141 self.user_group_repo_permission_ids = []
1142 self.user_repo_group_permission_ids = []
1142 self.user_repo_group_permission_ids = []
1143 self.user_group_repo_group_permission_ids = []
1143 self.user_group_repo_group_permission_ids = []
1144 self.user_user_group_permission_ids = []
1144 self.user_user_group_permission_ids = []
1145 self.user_group_user_group_permission_ids = []
1145 self.user_group_user_group_permission_ids = []
1146 self.user_permissions = []
1146 self.user_permissions = []
1147
1147
1148 def create_repo_group(
1148 def create_repo_group(
1149 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1149 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1150 group_name = "{prefix}_repogroup_{count}".format(
1150 group_name = "{prefix}_repogroup_{count}".format(
1151 prefix=self._test_name,
1151 prefix=self._test_name,
1152 count=len(self.repo_group_ids))
1152 count=len(self.repo_group_ids))
1153 repo_group = self.fixture.create_repo_group(
1153 repo_group = self.fixture.create_repo_group(
1154 group_name, cur_user=owner)
1154 group_name, cur_user=owner)
1155 if auto_cleanup:
1155 if auto_cleanup:
1156 self.repo_group_ids.append(repo_group.group_id)
1156 self.repo_group_ids.append(repo_group.group_id)
1157 return repo_group
1157 return repo_group
1158
1158
1159 def create_user(self, auto_cleanup=True, **kwargs):
1159 def create_user(self, auto_cleanup=True, **kwargs):
1160 user_name = "{prefix}_user_{count}".format(
1160 user_name = "{prefix}_user_{count}".format(
1161 prefix=self._test_name,
1161 prefix=self._test_name,
1162 count=len(self.user_ids))
1162 count=len(self.user_ids))
1163 user = self.fixture.create_user(user_name, **kwargs)
1163 user = self.fixture.create_user(user_name, **kwargs)
1164 if auto_cleanup:
1164 if auto_cleanup:
1165 self.user_ids.append(user.user_id)
1165 self.user_ids.append(user.user_id)
1166 return user
1166 return user
1167
1167
1168 def create_user_with_group(self):
1168 def create_user_with_group(self):
1169 user = self.create_user()
1169 user = self.create_user()
1170 user_group = self.create_user_group(members=[user])
1170 user_group = self.create_user_group(members=[user])
1171 return user, user_group
1171 return user, user_group
1172
1172
1173 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1173 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1174 group_name = "{prefix}_usergroup_{count}".format(
1174 group_name = "{prefix}_usergroup_{count}".format(
1175 prefix=self._test_name,
1175 prefix=self._test_name,
1176 count=len(self.user_group_ids))
1176 count=len(self.user_group_ids))
1177 user_group = self.fixture.create_user_group(group_name, **kwargs)
1177 user_group = self.fixture.create_user_group(group_name, **kwargs)
1178 if auto_cleanup:
1178 if auto_cleanup:
1179 self.user_group_ids.append(user_group.users_group_id)
1179 self.user_group_ids.append(user_group.users_group_id)
1180 if members:
1180 if members:
1181 for user in members:
1181 for user in members:
1182 UserGroupModel().add_user_to_group(user_group, user)
1182 UserGroupModel().add_user_to_group(user_group, user)
1183 return user_group
1183 return user_group
1184
1184
1185 def grant_user_permission(self, user_name, permission_name):
1185 def grant_user_permission(self, user_name, permission_name):
1186 self._inherit_default_user_permissions(user_name, False)
1186 self._inherit_default_user_permissions(user_name, False)
1187 self.user_permissions.append((user_name, permission_name))
1187 self.user_permissions.append((user_name, permission_name))
1188
1188
1189 def grant_user_permission_to_repo_group(
1189 def grant_user_permission_to_repo_group(
1190 self, repo_group, user, permission_name):
1190 self, repo_group, user, permission_name):
1191 permission = RepoGroupModel().grant_user_permission(
1191 permission = RepoGroupModel().grant_user_permission(
1192 repo_group, user, permission_name)
1192 repo_group, user, permission_name)
1193 self.user_repo_group_permission_ids.append(
1193 self.user_repo_group_permission_ids.append(
1194 (repo_group.group_id, user.user_id))
1194 (repo_group.group_id, user.user_id))
1195 return permission
1195 return permission
1196
1196
1197 def grant_user_group_permission_to_repo_group(
1197 def grant_user_group_permission_to_repo_group(
1198 self, repo_group, user_group, permission_name):
1198 self, repo_group, user_group, permission_name):
1199 permission = RepoGroupModel().grant_user_group_permission(
1199 permission = RepoGroupModel().grant_user_group_permission(
1200 repo_group, user_group, permission_name)
1200 repo_group, user_group, permission_name)
1201 self.user_group_repo_group_permission_ids.append(
1201 self.user_group_repo_group_permission_ids.append(
1202 (repo_group.group_id, user_group.users_group_id))
1202 (repo_group.group_id, user_group.users_group_id))
1203 return permission
1203 return permission
1204
1204
1205 def grant_user_permission_to_repo(
1205 def grant_user_permission_to_repo(
1206 self, repo, user, permission_name):
1206 self, repo, user, permission_name):
1207 permission = RepoModel().grant_user_permission(
1207 permission = RepoModel().grant_user_permission(
1208 repo, user, permission_name)
1208 repo, user, permission_name)
1209 self.user_repo_permission_ids.append(
1209 self.user_repo_permission_ids.append(
1210 (repo.repo_id, user.user_id))
1210 (repo.repo_id, user.user_id))
1211 return permission
1211 return permission
1212
1212
1213 def grant_user_group_permission_to_repo(
1213 def grant_user_group_permission_to_repo(
1214 self, repo, user_group, permission_name):
1214 self, repo, user_group, permission_name):
1215 permission = RepoModel().grant_user_group_permission(
1215 permission = RepoModel().grant_user_group_permission(
1216 repo, user_group, permission_name)
1216 repo, user_group, permission_name)
1217 self.user_group_repo_permission_ids.append(
1217 self.user_group_repo_permission_ids.append(
1218 (repo.repo_id, user_group.users_group_id))
1218 (repo.repo_id, user_group.users_group_id))
1219 return permission
1219 return permission
1220
1220
1221 def grant_user_permission_to_user_group(
1221 def grant_user_permission_to_user_group(
1222 self, target_user_group, user, permission_name):
1222 self, target_user_group, user, permission_name):
1223 permission = UserGroupModel().grant_user_permission(
1223 permission = UserGroupModel().grant_user_permission(
1224 target_user_group, user, permission_name)
1224 target_user_group, user, permission_name)
1225 self.user_user_group_permission_ids.append(
1225 self.user_user_group_permission_ids.append(
1226 (target_user_group.users_group_id, user.user_id))
1226 (target_user_group.users_group_id, user.user_id))
1227 return permission
1227 return permission
1228
1228
1229 def grant_user_group_permission_to_user_group(
1229 def grant_user_group_permission_to_user_group(
1230 self, target_user_group, user_group, permission_name):
1230 self, target_user_group, user_group, permission_name):
1231 permission = UserGroupModel().grant_user_group_permission(
1231 permission = UserGroupModel().grant_user_group_permission(
1232 target_user_group, user_group, permission_name)
1232 target_user_group, user_group, permission_name)
1233 self.user_group_user_group_permission_ids.append(
1233 self.user_group_user_group_permission_ids.append(
1234 (target_user_group.users_group_id, user_group.users_group_id))
1234 (target_user_group.users_group_id, user_group.users_group_id))
1235 return permission
1235 return permission
1236
1236
1237 def revoke_user_permission(self, user_name, permission_name):
1237 def revoke_user_permission(self, user_name, permission_name):
1238 self._inherit_default_user_permissions(user_name, True)
1238 self._inherit_default_user_permissions(user_name, True)
1239 UserModel().revoke_perm(user_name, permission_name)
1239 UserModel().revoke_perm(user_name, permission_name)
1240
1240
1241 def _inherit_default_user_permissions(self, user_name, value):
1241 def _inherit_default_user_permissions(self, user_name, value):
1242 user = UserModel().get_by_username(user_name)
1242 user = UserModel().get_by_username(user_name)
1243 user.inherit_default_permissions = value
1243 user.inherit_default_permissions = value
1244 Session().add(user)
1244 Session().add(user)
1245 Session().commit()
1245 Session().commit()
1246
1246
1247 def cleanup(self):
1247 def cleanup(self):
1248 self._cleanup_permissions()
1248 self._cleanup_permissions()
1249 self._cleanup_repo_groups()
1249 self._cleanup_repo_groups()
1250 self._cleanup_user_groups()
1250 self._cleanup_user_groups()
1251 self._cleanup_users()
1251 self._cleanup_users()
1252
1252
1253 def _cleanup_permissions(self):
1253 def _cleanup_permissions(self):
1254 if self.user_permissions:
1254 if self.user_permissions:
1255 for user_name, permission_name in self.user_permissions:
1255 for user_name, permission_name in self.user_permissions:
1256 self.revoke_user_permission(user_name, permission_name)
1256 self.revoke_user_permission(user_name, permission_name)
1257
1257
1258 for permission in self.user_repo_permission_ids:
1258 for permission in self.user_repo_permission_ids:
1259 RepoModel().revoke_user_permission(*permission)
1259 RepoModel().revoke_user_permission(*permission)
1260
1260
1261 for permission in self.user_group_repo_permission_ids:
1261 for permission in self.user_group_repo_permission_ids:
1262 RepoModel().revoke_user_group_permission(*permission)
1262 RepoModel().revoke_user_group_permission(*permission)
1263
1263
1264 for permission in self.user_repo_group_permission_ids:
1264 for permission in self.user_repo_group_permission_ids:
1265 RepoGroupModel().revoke_user_permission(*permission)
1265 RepoGroupModel().revoke_user_permission(*permission)
1266
1266
1267 for permission in self.user_group_repo_group_permission_ids:
1267 for permission in self.user_group_repo_group_permission_ids:
1268 RepoGroupModel().revoke_user_group_permission(*permission)
1268 RepoGroupModel().revoke_user_group_permission(*permission)
1269
1269
1270 for permission in self.user_user_group_permission_ids:
1270 for permission in self.user_user_group_permission_ids:
1271 UserGroupModel().revoke_user_permission(*permission)
1271 UserGroupModel().revoke_user_permission(*permission)
1272
1272
1273 for permission in self.user_group_user_group_permission_ids:
1273 for permission in self.user_group_user_group_permission_ids:
1274 UserGroupModel().revoke_user_group_permission(*permission)
1274 UserGroupModel().revoke_user_group_permission(*permission)
1275
1275
1276 def _cleanup_repo_groups(self):
1276 def _cleanup_repo_groups(self):
1277 def _repo_group_compare(first_group_id, second_group_id):
1277 def _repo_group_compare(first_group_id, second_group_id):
1278 """
1278 """
1279 Gives higher priority to the groups with the most complex paths
1279 Gives higher priority to the groups with the most complex paths
1280 """
1280 """
1281 first_group = RepoGroup.get(first_group_id)
1281 first_group = RepoGroup.get(first_group_id)
1282 second_group = RepoGroup.get(second_group_id)
1282 second_group = RepoGroup.get(second_group_id)
1283 first_group_parts = (
1283 first_group_parts = (
1284 len(first_group.group_name.split('/')) if first_group else 0)
1284 len(first_group.group_name.split('/')) if first_group else 0)
1285 second_group_parts = (
1285 second_group_parts = (
1286 len(second_group.group_name.split('/')) if second_group else 0)
1286 len(second_group.group_name.split('/')) if second_group else 0)
1287 return cmp(second_group_parts, first_group_parts)
1287 return cmp(second_group_parts, first_group_parts)
1288
1288
1289 sorted_repo_group_ids = sorted(
1289 sorted_repo_group_ids = sorted(
1290 self.repo_group_ids, cmp=_repo_group_compare)
1290 self.repo_group_ids, cmp=_repo_group_compare)
1291 for repo_group_id in sorted_repo_group_ids:
1291 for repo_group_id in sorted_repo_group_ids:
1292 self.fixture.destroy_repo_group(repo_group_id)
1292 self.fixture.destroy_repo_group(repo_group_id)
1293
1293
1294 def _cleanup_user_groups(self):
1294 def _cleanup_user_groups(self):
1295 def _user_group_compare(first_group_id, second_group_id):
1295 def _user_group_compare(first_group_id, second_group_id):
1296 """
1296 """
1297 Gives higher priority to the groups with the most complex paths
1297 Gives higher priority to the groups with the most complex paths
1298 """
1298 """
1299 first_group = UserGroup.get(first_group_id)
1299 first_group = UserGroup.get(first_group_id)
1300 second_group = UserGroup.get(second_group_id)
1300 second_group = UserGroup.get(second_group_id)
1301 first_group_parts = (
1301 first_group_parts = (
1302 len(first_group.users_group_name.split('/'))
1302 len(first_group.users_group_name.split('/'))
1303 if first_group else 0)
1303 if first_group else 0)
1304 second_group_parts = (
1304 second_group_parts = (
1305 len(second_group.users_group_name.split('/'))
1305 len(second_group.users_group_name.split('/'))
1306 if second_group else 0)
1306 if second_group else 0)
1307 return cmp(second_group_parts, first_group_parts)
1307 return cmp(second_group_parts, first_group_parts)
1308
1308
1309 sorted_user_group_ids = sorted(
1309 sorted_user_group_ids = sorted(
1310 self.user_group_ids, cmp=_user_group_compare)
1310 self.user_group_ids, cmp=_user_group_compare)
1311 for user_group_id in sorted_user_group_ids:
1311 for user_group_id in sorted_user_group_ids:
1312 self.fixture.destroy_user_group(user_group_id)
1312 self.fixture.destroy_user_group(user_group_id)
1313
1313
1314 def _cleanup_users(self):
1314 def _cleanup_users(self):
1315 for user_id in self.user_ids:
1315 for user_id in self.user_ids:
1316 self.fixture.destroy_user(user_id)
1316 self.fixture.destroy_user(user_id)
1317
1317
1318
1318
1319 # TODO: Think about moving this into a pytest-pyro package and make it a
1319 # TODO: Think about moving this into a pytest-pyro package and make it a
1320 # pytest plugin
1320 # pytest plugin
1321 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1321 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1322 def pytest_runtest_makereport(item, call):
1322 def pytest_runtest_makereport(item, call):
1323 """
1323 """
1324 Adding the remote traceback if the exception has this information.
1324 Adding the remote traceback if the exception has this information.
1325
1325
1326 Pyro4 attaches this information as the attribute `_pyroTraceback`
1326 Pyro4 attaches this information as the attribute `_pyroTraceback`
1327 to the exception instance.
1327 to the exception instance.
1328 """
1328 """
1329 outcome = yield
1329 outcome = yield
1330 report = outcome.get_result()
1330 report = outcome.get_result()
1331 if call.excinfo:
1331 if call.excinfo:
1332 _add_pyro_remote_traceback(report, call.excinfo.value)
1332 _add_pyro_remote_traceback(report, call.excinfo.value)
1333
1333
1334
1334
1335 def _add_pyro_remote_traceback(report, exc):
1335 def _add_pyro_remote_traceback(report, exc):
1336 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1336 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1337
1337
1338 if pyro_traceback:
1338 if pyro_traceback:
1339 traceback = ''.join(pyro_traceback)
1339 traceback = ''.join(pyro_traceback)
1340 section = 'Pyro4 remote traceback ' + report.when
1340 section = 'Pyro4 remote traceback ' + report.when
1341 report.sections.append((section, traceback))
1341 report.sections.append((section, traceback))
1342
1342
1343
1343
1344 @pytest.fixture(scope='session')
1344 @pytest.fixture(scope='session')
1345 def testrun():
1345 def testrun():
1346 return {
1346 return {
1347 'uuid': uuid.uuid4(),
1347 'uuid': uuid.uuid4(),
1348 'start': datetime.datetime.utcnow().isoformat(),
1348 'start': datetime.datetime.utcnow().isoformat(),
1349 'timestamp': int(time.time()),
1349 'timestamp': int(time.time()),
1350 }
1350 }
1351
1351
1352
1352
1353 @pytest.fixture(autouse=True)
1353 @pytest.fixture(autouse=True)
1354 def collect_appenlight_stats(request, testrun):
1354 def collect_appenlight_stats(request, testrun):
1355 """
1355 """
1356 This fixture reports memory consumtion of single tests.
1356 This fixture reports memory consumtion of single tests.
1357
1357
1358 It gathers data based on `psutil` and sends them to Appenlight. The option
1358 It gathers data based on `psutil` and sends them to Appenlight. The option
1359 ``--ae`` has te be used to enable this fixture and the API key for your
1359 ``--ae`` has te be used to enable this fixture and the API key for your
1360 application has to be provided in ``--ae-key``.
1360 application has to be provided in ``--ae-key``.
1361 """
1361 """
1362 try:
1362 try:
1363 # cygwin cannot have yet psutil support.
1363 # cygwin cannot have yet psutil support.
1364 import psutil
1364 import psutil
1365 except ImportError:
1365 except ImportError:
1366 return
1366 return
1367
1367
1368 if not request.config.getoption('--appenlight'):
1368 if not request.config.getoption('--appenlight'):
1369 return
1369 return
1370 else:
1370 else:
1371 # Only request the pylonsapp fixture if appenlight tracking is
1371 # Only request the pylonsapp fixture if appenlight tracking is
1372 # enabled. This will speed up a test run of unit tests by 2 to 3
1372 # enabled. This will speed up a test run of unit tests by 2 to 3
1373 # seconds if appenlight is not enabled.
1373 # seconds if appenlight is not enabled.
1374 pylonsapp = request.getfuncargvalue("pylonsapp")
1374 pylonsapp = request.getfuncargvalue("pylonsapp")
1375 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1375 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1376 client = AppenlightClient(
1376 client = AppenlightClient(
1377 url=url,
1377 url=url,
1378 api_key=request.config.getoption('--appenlight-api-key'),
1378 api_key=request.config.getoption('--appenlight-api-key'),
1379 namespace=request.node.nodeid,
1379 namespace=request.node.nodeid,
1380 request=str(testrun['uuid']),
1380 request=str(testrun['uuid']),
1381 testrun=testrun)
1381 testrun=testrun)
1382
1382
1383 client.collect({
1383 client.collect({
1384 'message': "Starting",
1384 'message': "Starting",
1385 })
1385 })
1386
1386
1387 server_and_port = pylonsapp.config['vcs.server']
1387 server_and_port = pylonsapp.config['vcs.server']
1388 server = create_vcsserver_proxy(server_and_port)
1388 server = create_vcsserver_proxy(server_and_port)
1389 with server:
1389 with server:
1390 vcs_pid = server.get_pid()
1390 vcs_pid = server.get_pid()
1391 server.run_gc()
1391 server.run_gc()
1392 vcs_process = psutil.Process(vcs_pid)
1392 vcs_process = psutil.Process(vcs_pid)
1393 mem = vcs_process.memory_info()
1393 mem = vcs_process.memory_info()
1394 client.tag_before('vcsserver.rss', mem.rss)
1394 client.tag_before('vcsserver.rss', mem.rss)
1395 client.tag_before('vcsserver.vms', mem.vms)
1395 client.tag_before('vcsserver.vms', mem.vms)
1396
1396
1397 test_process = psutil.Process()
1397 test_process = psutil.Process()
1398 mem = test_process.memory_info()
1398 mem = test_process.memory_info()
1399 client.tag_before('test.rss', mem.rss)
1399 client.tag_before('test.rss', mem.rss)
1400 client.tag_before('test.vms', mem.vms)
1400 client.tag_before('test.vms', mem.vms)
1401
1401
1402 client.tag_before('time', time.time())
1402 client.tag_before('time', time.time())
1403
1403
1404 @request.addfinalizer
1404 @request.addfinalizer
1405 def send_stats():
1405 def send_stats():
1406 client.tag_after('time', time.time())
1406 client.tag_after('time', time.time())
1407 with server:
1407 with server:
1408 gc_stats = server.run_gc()
1408 gc_stats = server.run_gc()
1409 for tag, value in gc_stats.items():
1409 for tag, value in gc_stats.items():
1410 client.tag_after(tag, value)
1410 client.tag_after(tag, value)
1411 mem = vcs_process.memory_info()
1411 mem = vcs_process.memory_info()
1412 client.tag_after('vcsserver.rss', mem.rss)
1412 client.tag_after('vcsserver.rss', mem.rss)
1413 client.tag_after('vcsserver.vms', mem.vms)
1413 client.tag_after('vcsserver.vms', mem.vms)
1414
1414
1415 mem = test_process.memory_info()
1415 mem = test_process.memory_info()
1416 client.tag_after('test.rss', mem.rss)
1416 client.tag_after('test.rss', mem.rss)
1417 client.tag_after('test.vms', mem.vms)
1417 client.tag_after('test.vms', mem.vms)
1418
1418
1419 client.collect({
1419 client.collect({
1420 'message': "Finished",
1420 'message': "Finished",
1421 })
1421 })
1422 client.send_stats()
1422 client.send_stats()
1423
1423
1424 return client
1424 return client
1425
1425
1426
1426
1427 class AppenlightClient():
1427 class AppenlightClient():
1428
1428
1429 url_template = '{url}?protocol_version=0.5'
1429 url_template = '{url}?protocol_version=0.5'
1430
1430
1431 def __init__(
1431 def __init__(
1432 self, url, api_key, add_server=True, add_timestamp=True,
1432 self, url, api_key, add_server=True, add_timestamp=True,
1433 namespace=None, request=None, testrun=None):
1433 namespace=None, request=None, testrun=None):
1434 self.url = self.url_template.format(url=url)
1434 self.url = self.url_template.format(url=url)
1435 self.api_key = api_key
1435 self.api_key = api_key
1436 self.add_server = add_server
1436 self.add_server = add_server
1437 self.add_timestamp = add_timestamp
1437 self.add_timestamp = add_timestamp
1438 self.namespace = namespace
1438 self.namespace = namespace
1439 self.request = request
1439 self.request = request
1440 self.server = socket.getfqdn(socket.gethostname())
1440 self.server = socket.getfqdn(socket.gethostname())
1441 self.tags_before = {}
1441 self.tags_before = {}
1442 self.tags_after = {}
1442 self.tags_after = {}
1443 self.stats = []
1443 self.stats = []
1444 self.testrun = testrun or {}
1444 self.testrun = testrun or {}
1445
1445
1446 def tag_before(self, tag, value):
1446 def tag_before(self, tag, value):
1447 self.tags_before[tag] = value
1447 self.tags_before[tag] = value
1448
1448
1449 def tag_after(self, tag, value):
1449 def tag_after(self, tag, value):
1450 self.tags_after[tag] = value
1450 self.tags_after[tag] = value
1451
1451
1452 def collect(self, data):
1452 def collect(self, data):
1453 if self.add_server:
1453 if self.add_server:
1454 data.setdefault('server', self.server)
1454 data.setdefault('server', self.server)
1455 if self.add_timestamp:
1455 if self.add_timestamp:
1456 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1456 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1457 if self.namespace:
1457 if self.namespace:
1458 data.setdefault('namespace', self.namespace)
1458 data.setdefault('namespace', self.namespace)
1459 if self.request:
1459 if self.request:
1460 data.setdefault('request', self.request)
1460 data.setdefault('request', self.request)
1461 self.stats.append(data)
1461 self.stats.append(data)
1462
1462
1463 def send_stats(self):
1463 def send_stats(self):
1464 tags = [
1464 tags = [
1465 ('testrun', self.request),
1465 ('testrun', self.request),
1466 ('testrun.start', self.testrun['start']),
1466 ('testrun.start', self.testrun['start']),
1467 ('testrun.timestamp', self.testrun['timestamp']),
1467 ('testrun.timestamp', self.testrun['timestamp']),
1468 ('test', self.namespace),
1468 ('test', self.namespace),
1469 ]
1469 ]
1470 for key, value in self.tags_before.items():
1470 for key, value in self.tags_before.items():
1471 tags.append((key + '.before', value))
1471 tags.append((key + '.before', value))
1472 try:
1472 try:
1473 delta = self.tags_after[key] - value
1473 delta = self.tags_after[key] - value
1474 tags.append((key + '.delta', delta))
1474 tags.append((key + '.delta', delta))
1475 except Exception:
1475 except Exception:
1476 pass
1476 pass
1477 for key, value in self.tags_after.items():
1477 for key, value in self.tags_after.items():
1478 tags.append((key + '.after', value))
1478 tags.append((key + '.after', value))
1479 self.collect({
1479 self.collect({
1480 'message': "Collected tags",
1480 'message': "Collected tags",
1481 'tags': tags,
1481 'tags': tags,
1482 })
1482 })
1483
1483
1484 response = requests.post(
1484 response = requests.post(
1485 self.url,
1485 self.url,
1486 headers={
1486 headers={
1487 'X-appenlight-api-key': self.api_key},
1487 'X-appenlight-api-key': self.api_key},
1488 json=self.stats,
1488 json=self.stats,
1489 )
1489 )
1490
1490
1491 if not response.status_code == 200:
1491 if not response.status_code == 200:
1492 pprint.pprint(self.stats)
1492 pprint.pprint(self.stats)
1493 print response.headers
1493 print response.headers
1494 print response.text
1494 print response.text
1495 raise Exception('Sending to appenlight failed')
1495 raise Exception('Sending to appenlight failed')
1496
1496
1497
1497
1498 @pytest.fixture
1498 @pytest.fixture
1499 def gist_util(request, pylonsapp):
1499 def gist_util(request, pylonsapp):
1500 """
1500 """
1501 Provides a wired instance of `GistUtility` with integrated cleanup.
1501 Provides a wired instance of `GistUtility` with integrated cleanup.
1502 """
1502 """
1503 utility = GistUtility()
1503 utility = GistUtility()
1504 request.addfinalizer(utility.cleanup)
1504 request.addfinalizer(utility.cleanup)
1505 return utility
1505 return utility
1506
1506
1507
1507
1508 class GistUtility(object):
1508 class GistUtility(object):
1509 def __init__(self):
1509 def __init__(self):
1510 self.fixture = Fixture()
1510 self.fixture = Fixture()
1511 self.gist_ids = []
1511 self.gist_ids = []
1512
1512
1513 def create_gist(self, **kwargs):
1513 def create_gist(self, **kwargs):
1514 gist = self.fixture.create_gist(**kwargs)
1514 gist = self.fixture.create_gist(**kwargs)
1515 self.gist_ids.append(gist.gist_id)
1515 self.gist_ids.append(gist.gist_id)
1516 return gist
1516 return gist
1517
1517
1518 def cleanup(self):
1518 def cleanup(self):
1519 for id_ in self.gist_ids:
1519 for id_ in self.gist_ids:
1520 self.fixture.destroy_gists(str(id_))
1520 self.fixture.destroy_gists(str(id_))
1521
1521
1522
1522
1523 @pytest.fixture
1523 @pytest.fixture
1524 def enabled_backends(request):
1524 def enabled_backends(request):
1525 backends = request.config.option.backends
1525 backends = request.config.option.backends
1526 return backends[:]
1526 return backends[:]
1527
1527
1528
1528
1529 @pytest.fixture
1529 @pytest.fixture
1530 def settings_util(request):
1530 def settings_util(request):
1531 """
1531 """
1532 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1532 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1533 """
1533 """
1534 utility = SettingsUtility()
1534 utility = SettingsUtility()
1535 request.addfinalizer(utility.cleanup)
1535 request.addfinalizer(utility.cleanup)
1536 return utility
1536 return utility
1537
1537
1538
1538
1539 class SettingsUtility(object):
1539 class SettingsUtility(object):
1540 def __init__(self):
1540 def __init__(self):
1541 self.rhodecode_ui_ids = []
1541 self.rhodecode_ui_ids = []
1542 self.rhodecode_setting_ids = []
1542 self.rhodecode_setting_ids = []
1543 self.repo_rhodecode_ui_ids = []
1543 self.repo_rhodecode_ui_ids = []
1544 self.repo_rhodecode_setting_ids = []
1544 self.repo_rhodecode_setting_ids = []
1545
1545
1546 def create_repo_rhodecode_ui(
1546 def create_repo_rhodecode_ui(
1547 self, repo, section, value, key=None, active=True, cleanup=True):
1547 self, repo, section, value, key=None, active=True, cleanup=True):
1548 key = key or hashlib.sha1(
1548 key = key or hashlib.sha1(
1549 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1549 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1550
1550
1551 setting = RepoRhodeCodeUi()
1551 setting = RepoRhodeCodeUi()
1552 setting.repository_id = repo.repo_id
1552 setting.repository_id = repo.repo_id
1553 setting.ui_section = section
1553 setting.ui_section = section
1554 setting.ui_value = value
1554 setting.ui_value = value
1555 setting.ui_key = key
1555 setting.ui_key = key
1556 setting.ui_active = active
1556 setting.ui_active = active
1557 Session().add(setting)
1557 Session().add(setting)
1558 Session().commit()
1558 Session().commit()
1559
1559
1560 if cleanup:
1560 if cleanup:
1561 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1561 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1562 return setting
1562 return setting
1563
1563
1564 def create_rhodecode_ui(
1564 def create_rhodecode_ui(
1565 self, section, value, key=None, active=True, cleanup=True):
1565 self, section, value, key=None, active=True, cleanup=True):
1566 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1566 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1567
1567
1568 setting = RhodeCodeUi()
1568 setting = RhodeCodeUi()
1569 setting.ui_section = section
1569 setting.ui_section = section
1570 setting.ui_value = value
1570 setting.ui_value = value
1571 setting.ui_key = key
1571 setting.ui_key = key
1572 setting.ui_active = active
1572 setting.ui_active = active
1573 Session().add(setting)
1573 Session().add(setting)
1574 Session().commit()
1574 Session().commit()
1575
1575
1576 if cleanup:
1576 if cleanup:
1577 self.rhodecode_ui_ids.append(setting.ui_id)
1577 self.rhodecode_ui_ids.append(setting.ui_id)
1578 return setting
1578 return setting
1579
1579
1580 def create_repo_rhodecode_setting(
1580 def create_repo_rhodecode_setting(
1581 self, repo, name, value, type_, cleanup=True):
1581 self, repo, name, value, type_, cleanup=True):
1582 setting = RepoRhodeCodeSetting(
1582 setting = RepoRhodeCodeSetting(
1583 repo.repo_id, key=name, val=value, type=type_)
1583 repo.repo_id, key=name, val=value, type=type_)
1584 Session().add(setting)
1584 Session().add(setting)
1585 Session().commit()
1585 Session().commit()
1586
1586
1587 if cleanup:
1587 if cleanup:
1588 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1588 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1589 return setting
1589 return setting
1590
1590
1591 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1591 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1592 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1592 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1593 Session().add(setting)
1593 Session().add(setting)
1594 Session().commit()
1594 Session().commit()
1595
1595
1596 if cleanup:
1596 if cleanup:
1597 self.rhodecode_setting_ids.append(setting.app_settings_id)
1597 self.rhodecode_setting_ids.append(setting.app_settings_id)
1598
1598
1599 return setting
1599 return setting
1600
1600
1601 def cleanup(self):
1601 def cleanup(self):
1602 for id_ in self.rhodecode_ui_ids:
1602 for id_ in self.rhodecode_ui_ids:
1603 setting = RhodeCodeUi.get(id_)
1603 setting = RhodeCodeUi.get(id_)
1604 Session().delete(setting)
1604 Session().delete(setting)
1605
1605
1606 for id_ in self.rhodecode_setting_ids:
1606 for id_ in self.rhodecode_setting_ids:
1607 setting = RhodeCodeSetting.get(id_)
1607 setting = RhodeCodeSetting.get(id_)
1608 Session().delete(setting)
1608 Session().delete(setting)
1609
1609
1610 for id_ in self.repo_rhodecode_ui_ids:
1610 for id_ in self.repo_rhodecode_ui_ids:
1611 setting = RepoRhodeCodeUi.get(id_)
1611 setting = RepoRhodeCodeUi.get(id_)
1612 Session().delete(setting)
1612 Session().delete(setting)
1613
1613
1614 for id_ in self.repo_rhodecode_setting_ids:
1614 for id_ in self.repo_rhodecode_setting_ids:
1615 setting = RepoRhodeCodeSetting.get(id_)
1615 setting = RepoRhodeCodeSetting.get(id_)
1616 Session().delete(setting)
1616 Session().delete(setting)
1617
1617
1618 Session().commit()
1618 Session().commit()
1619
1619
1620
1620
1621 @pytest.fixture
1621 @pytest.fixture
1622 def no_notifications(request):
1622 def no_notifications(request):
1623 notification_patcher = mock.patch(
1623 notification_patcher = mock.patch(
1624 'rhodecode.model.notification.NotificationModel.create')
1624 'rhodecode.model.notification.NotificationModel.create')
1625 notification_patcher.start()
1625 notification_patcher.start()
1626 request.addfinalizer(notification_patcher.stop)
1626 request.addfinalizer(notification_patcher.stop)
1627
1627
1628
1628
1629 @pytest.fixture
1629 @pytest.fixture
1630 def silence_action_logger(request):
1630 def silence_action_logger(request):
1631 notification_patcher = mock.patch(
1631 notification_patcher = mock.patch(
1632 'rhodecode.lib.utils.action_logger')
1632 'rhodecode.lib.utils.action_logger')
1633 notification_patcher.start()
1633 notification_patcher.start()
1634 request.addfinalizer(notification_patcher.stop)
1634 request.addfinalizer(notification_patcher.stop)
1635
1635
1636
1636
1637 @pytest.fixture(scope='session')
1637 @pytest.fixture(scope='session')
1638 def repeat(request):
1638 def repeat(request):
1639 """
1639 """
1640 The number of repetitions is based on this fixture.
1640 The number of repetitions is based on this fixture.
1641
1641
1642 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1642 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1643 tests are not too slow in our default test suite.
1643 tests are not too slow in our default test suite.
1644 """
1644 """
1645 return request.config.getoption('--repeat')
1645 return request.config.getoption('--repeat')
1646
1646
1647
1647
1648 @pytest.fixture
1648 @pytest.fixture
1649 def rhodecode_fixtures():
1649 def rhodecode_fixtures():
1650 return Fixture()
1650 return Fixture()
1651
1651
1652
1652
1653 @pytest.fixture
1653 @pytest.fixture
1654 def request_stub():
1654 def request_stub():
1655 """
1655 """
1656 Stub request object.
1656 Stub request object.
1657 """
1657 """
1658 request = pyramid.testing.DummyRequest()
1658 request = pyramid.testing.DummyRequest()
1659 request.scheme = 'https'
1659 request.scheme = 'https'
1660 return request
1660 return request
1661
1661
1662
1662
1663 @pytest.fixture
1663 @pytest.fixture
1664 def config_stub(request, request_stub):
1664 def config_stub(request, request_stub):
1665 """
1665 """
1666 Set up pyramid.testing and return the Configurator.
1666 Set up pyramid.testing and return the Configurator.
1667 """
1667 """
1668 config = pyramid.testing.setUp(request=request_stub)
1668 config = pyramid.testing.setUp(request=request_stub)
1669
1669
1670 @request.addfinalizer
1670 @request.addfinalizer
1671 def cleanup():
1671 def cleanup():
1672 pyramid.testing.tearDown()
1672 pyramid.testing.tearDown()
1673
1673
1674 return config
1674 return config
1675
1675
1676
1676
1677 @pytest.fixture
1677 @pytest.fixture
1678 def StubIntegrationType():
1678 def StubIntegrationType():
1679 class _StubIntegrationType(IntegrationTypeBase):
1679 class _StubIntegrationType(IntegrationTypeBase):
1680 """ Test integration type class """
1680 """ Test integration type class """
1681
1681
1682 key = 'test'
1682 key = 'test'
1683 display_name = 'Test integration type'
1683 display_name = 'Test integration type'
1684 description = 'A test integration type for testing'
1684 description = 'A test integration type for testing'
1685 icon = 'test_icon_html_image'
1685 icon = 'test_icon_html_image'
1686
1686
1687 def __init__(self, settings):
1687 def __init__(self, settings):
1688 super(_StubIntegrationType, self).__init__(settings)
1688 super(_StubIntegrationType, self).__init__(settings)
1689 self.sent_events = [] # for testing
1689 self.sent_events = [] # for testing
1690
1690
1691 def send_event(self, event):
1691 def send_event(self, event):
1692 self.sent_events.append(event)
1692 self.sent_events.append(event)
1693
1693
1694 def settings_schema(self):
1694 def settings_schema(self):
1695 class SettingsSchema(colander.Schema):
1695 class SettingsSchema(colander.Schema):
1696 test_string_field = colander.SchemaNode(
1696 test_string_field = colander.SchemaNode(
1697 colander.String(),
1697 colander.String(),
1698 missing=colander.required,
1698 missing=colander.required,
1699 title='test string field',
1699 title='test string field',
1700 )
1700 )
1701 test_int_field = colander.SchemaNode(
1701 test_int_field = colander.SchemaNode(
1702 colander.Int(),
1702 colander.Int(),
1703 title='some integer setting',
1703 title='some integer setting',
1704 )
1704 )
1705 return SettingsSchema()
1705 return SettingsSchema()
1706
1706
1707
1707
1708 integration_type_registry.register_integration_type(_StubIntegrationType)
1708 integration_type_registry.register_integration_type(_StubIntegrationType)
1709 return _StubIntegrationType
1709 return _StubIntegrationType
1710
1710
1711 @pytest.fixture
1711 @pytest.fixture
1712 def stub_integration_settings():
1712 def stub_integration_settings():
1713 return {
1713 return {
1714 'test_string_field': 'some data',
1714 'test_string_field': 'some data',
1715 'test_int_field': 100,
1715 'test_int_field': 100,
1716 }
1716 }
1717
1717
1718
1718
1719 @pytest.fixture
1719 @pytest.fixture
1720 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1720 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1721 stub_integration_settings):
1721 stub_integration_settings):
1722 integration = IntegrationModel().create(
1722 integration = IntegrationModel().create(
1723 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1723 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1724 name='test repo integration',
1724 name='test repo integration',
1725 repo=repo_stub, repo_group=None, child_repos_only=None)
1725 repo=repo_stub, repo_group=None, child_repos_only=None)
1726
1726
1727 @request.addfinalizer
1727 @request.addfinalizer
1728 def cleanup():
1728 def cleanup():
1729 IntegrationModel().delete(integration)
1729 IntegrationModel().delete(integration)
1730
1730
1731 return integration
1731 return integration
1732
1732
1733
1733
1734 @pytest.fixture
1734 @pytest.fixture
1735 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1735 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1736 stub_integration_settings):
1736 stub_integration_settings):
1737 integration = IntegrationModel().create(
1737 integration = IntegrationModel().create(
1738 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1738 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1739 name='test repogroup integration',
1739 name='test repogroup integration',
1740 repo=None, repo_group=test_repo_group, child_repos_only=True)
1740 repo=None, repo_group=test_repo_group, child_repos_only=True)
1741
1741
1742 @request.addfinalizer
1742 @request.addfinalizer
1743 def cleanup():
1743 def cleanup():
1744 IntegrationModel().delete(integration)
1744 IntegrationModel().delete(integration)
1745
1745
1746 return integration
1746 return integration
1747
1747
1748
1748
1749 @pytest.fixture
1749 @pytest.fixture
1750 def repogroup_recursive_integration_stub(request, test_repo_group,
1750 def repogroup_recursive_integration_stub(request, test_repo_group,
1751 StubIntegrationType, stub_integration_settings):
1751 StubIntegrationType, stub_integration_settings):
1752 integration = IntegrationModel().create(
1752 integration = IntegrationModel().create(
1753 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1753 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1754 name='test recursive repogroup integration',
1754 name='test recursive repogroup integration',
1755 repo=None, repo_group=test_repo_group, child_repos_only=False)
1755 repo=None, repo_group=test_repo_group, child_repos_only=False)
1756
1756
1757 @request.addfinalizer
1757 @request.addfinalizer
1758 def cleanup():
1758 def cleanup():
1759 IntegrationModel().delete(integration)
1759 IntegrationModel().delete(integration)
1760
1760
1761 return integration
1761 return integration
1762
1762
1763
1763
1764 @pytest.fixture
1764 @pytest.fixture
1765 def global_integration_stub(request, StubIntegrationType,
1765 def global_integration_stub(request, StubIntegrationType,
1766 stub_integration_settings):
1766 stub_integration_settings):
1767 integration = IntegrationModel().create(
1767 integration = IntegrationModel().create(
1768 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1768 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1769 name='test global integration',
1769 name='test global integration',
1770 repo=None, repo_group=None, child_repos_only=None)
1770 repo=None, repo_group=None, child_repos_only=None)
1771
1771
1772 @request.addfinalizer
1772 @request.addfinalizer
1773 def cleanup():
1773 def cleanup():
1774 IntegrationModel().delete(integration)
1774 IntegrationModel().delete(integration)
1775
1775
1776 return integration
1776 return integration
1777
1777
1778
1778
1779 @pytest.fixture
1779 @pytest.fixture
1780 def root_repos_integration_stub(request, StubIntegrationType,
1780 def root_repos_integration_stub(request, StubIntegrationType,
1781 stub_integration_settings):
1781 stub_integration_settings):
1782 integration = IntegrationModel().create(
1782 integration = IntegrationModel().create(
1783 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1783 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1784 name='test global integration',
1784 name='test global integration',
1785 repo=None, repo_group=None, child_repos_only=True)
1785 repo=None, repo_group=None, child_repos_only=True)
1786
1786
1787 @request.addfinalizer
1787 @request.addfinalizer
1788 def cleanup():
1788 def cleanup():
1789 IntegrationModel().delete(integration)
1789 IntegrationModel().delete(integration)
1790
1790
1791 return integration
1791 return integration
@@ -1,455 +1,455 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging.config
22 import logging.config
23 import os
23 import os
24 import platform
24 import platform
25 import socket
25 import socket
26 import subprocess
26 import subprocess32
27 import time
27 import time
28 from urllib2 import urlopen, URLError
28 from urllib2 import urlopen, URLError
29
29
30 import configobj
30 import configobj
31 import pylons
31 import pylons
32 import pytest
32 import pytest
33 import webob
33 import webob
34 from beaker.session import SessionObject
34 from beaker.session import SessionObject
35 from paste.deploy import loadapp
35 from paste.deploy import loadapp
36 from pylons.i18n.translation import _get_translator
36 from pylons.i18n.translation import _get_translator
37 from pylons.util import ContextObj
37 from pylons.util import ContextObj
38 from Pyro4.errors import CommunicationError
38 from Pyro4.errors import CommunicationError
39 from routes.util import URLGenerator
39 from routes.util import URLGenerator
40
40
41 from rhodecode.lib import vcs
41 from rhodecode.lib import vcs
42 from rhodecode.tests.fixture import TestINI
42 from rhodecode.tests.fixture import TestINI
43 import rhodecode
43 import rhodecode
44
44
45
45
46 def _parse_json(value):
46 def _parse_json(value):
47 return json.loads(value) if value else None
47 return json.loads(value) if value else None
48
48
49
49
50 def pytest_addoption(parser):
50 def pytest_addoption(parser):
51 group = parser.getgroup('pylons')
51 group = parser.getgroup('pylons')
52 group.addoption(
52 group.addoption(
53 '--with-pylons', dest='pylons_config',
53 '--with-pylons', dest='pylons_config',
54 help="Set up a Pylons environment with the specified config file.")
54 help="Set up a Pylons environment with the specified config file.")
55 group.addoption(
55 group.addoption(
56 '--pylons-config-override', action='store', type=_parse_json,
56 '--pylons-config-override', action='store', type=_parse_json,
57 default=None, dest='pylons_config_override', help=(
57 default=None, dest='pylons_config_override', help=(
58 "Overrides the .ini file settings. Should be specified in JSON"
58 "Overrides the .ini file settings. Should be specified in JSON"
59 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
59 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
60 )
60 )
61 )
61 )
62 parser.addini(
62 parser.addini(
63 'pylons_config',
63 'pylons_config',
64 "Set up a Pylons environment with the specified config file.")
64 "Set up a Pylons environment with the specified config file.")
65
65
66 vcsgroup = parser.getgroup('vcs')
66 vcsgroup = parser.getgroup('vcs')
67 vcsgroup.addoption(
67 vcsgroup.addoption(
68 '--without-vcsserver', dest='with_vcsserver', action='store_false',
68 '--without-vcsserver', dest='with_vcsserver', action='store_false',
69 help="Do not start the VCSServer in a background process.")
69 help="Do not start the VCSServer in a background process.")
70 vcsgroup.addoption(
70 vcsgroup.addoption(
71 '--with-vcsserver', dest='vcsserver_config_pyro4',
71 '--with-vcsserver', dest='vcsserver_config_pyro4',
72 help="Start the VCSServer with the specified config file.")
72 help="Start the VCSServer with the specified config file.")
73 vcsgroup.addoption(
73 vcsgroup.addoption(
74 '--with-vcsserver-http', dest='vcsserver_config_http',
74 '--with-vcsserver-http', dest='vcsserver_config_http',
75 help="Start the HTTP VCSServer with the specified config file.")
75 help="Start the HTTP VCSServer with the specified config file.")
76 vcsgroup.addoption(
76 vcsgroup.addoption(
77 '--vcsserver-protocol', dest='vcsserver_protocol',
77 '--vcsserver-protocol', dest='vcsserver_protocol',
78 help="Start the VCSServer with HTTP / Pyro4 protocol support.")
78 help="Start the VCSServer with HTTP / Pyro4 protocol support.")
79 vcsgroup.addoption(
79 vcsgroup.addoption(
80 '--vcsserver-config-override', action='store', type=_parse_json,
80 '--vcsserver-config-override', action='store', type=_parse_json,
81 default=None, dest='vcsserver_config_override', help=(
81 default=None, dest='vcsserver_config_override', help=(
82 "Overrides the .ini file settings for the VCSServer. "
82 "Overrides the .ini file settings for the VCSServer. "
83 "Should be specified in JSON "
83 "Should be specified in JSON "
84 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
84 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
85 )
85 )
86 )
86 )
87 vcsgroup.addoption(
87 vcsgroup.addoption(
88 '--vcsserver-port', action='store', type=int,
88 '--vcsserver-port', action='store', type=int,
89 default=None, help=(
89 default=None, help=(
90 "Allows to set the port of the vcsserver. Useful when testing "
90 "Allows to set the port of the vcsserver. Useful when testing "
91 "against an already running server and random ports cause "
91 "against an already running server and random ports cause "
92 "trouble."))
92 "trouble."))
93 parser.addini(
93 parser.addini(
94 'vcsserver_config_pyro4',
94 'vcsserver_config_pyro4',
95 "Start the VCSServer with the specified config file.")
95 "Start the VCSServer with the specified config file.")
96 parser.addini(
96 parser.addini(
97 'vcsserver_config_http',
97 'vcsserver_config_http',
98 "Start the HTTP VCSServer with the specified config file.")
98 "Start the HTTP VCSServer with the specified config file.")
99 parser.addini(
99 parser.addini(
100 'vcsserver_protocol',
100 'vcsserver_protocol',
101 "Start the VCSServer with HTTP / Pyro4 protocol support.")
101 "Start the VCSServer with HTTP / Pyro4 protocol support.")
102
102
103
103
104 @pytest.fixture(scope='session')
104 @pytest.fixture(scope='session')
105 def vcsserver(request, vcsserver_port, vcsserver_factory):
105 def vcsserver(request, vcsserver_port, vcsserver_factory):
106 """
106 """
107 Session scope VCSServer.
107 Session scope VCSServer.
108
108
109 Tests wich need the VCSServer have to rely on this fixture in order
109 Tests wich need the VCSServer have to rely on this fixture in order
110 to ensure it will be running.
110 to ensure it will be running.
111
111
112 For specific needs, the fixture vcsserver_factory can be used. It allows to
112 For specific needs, the fixture vcsserver_factory can be used. It allows to
113 adjust the configuration file for the test run.
113 adjust the configuration file for the test run.
114
114
115 Command line args:
115 Command line args:
116
116
117 --without-vcsserver: Allows to switch this fixture off. You have to
117 --without-vcsserver: Allows to switch this fixture off. You have to
118 manually start the server.
118 manually start the server.
119
119
120 --vcsserver-port: Will expect the VCSServer to listen on this port.
120 --vcsserver-port: Will expect the VCSServer to listen on this port.
121 """
121 """
122
122
123 if not request.config.getoption('with_vcsserver'):
123 if not request.config.getoption('with_vcsserver'):
124 return None
124 return None
125
125
126 use_http = _use_vcs_http_server(request.config)
126 use_http = _use_vcs_http_server(request.config)
127 return vcsserver_factory(
127 return vcsserver_factory(
128 request, use_http=use_http, vcsserver_port=vcsserver_port)
128 request, use_http=use_http, vcsserver_port=vcsserver_port)
129
129
130
130
131 @pytest.fixture(scope='session')
131 @pytest.fixture(scope='session')
132 def vcsserver_factory(tmpdir_factory):
132 def vcsserver_factory(tmpdir_factory):
133 """
133 """
134 Use this if you need a running vcsserver with a special configuration.
134 Use this if you need a running vcsserver with a special configuration.
135 """
135 """
136
136
137 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
137 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
138
138
139 if vcsserver_port is None:
139 if vcsserver_port is None:
140 vcsserver_port = get_available_port()
140 vcsserver_port = get_available_port()
141
141
142 overrides = list(overrides)
142 overrides = list(overrides)
143 if use_http:
143 if use_http:
144 overrides.append({'server:main': {'port': vcsserver_port}})
144 overrides.append({'server:main': {'port': vcsserver_port}})
145 else:
145 else:
146 overrides.append({'DEFAULT': {'port': vcsserver_port}})
146 overrides.append({'DEFAULT': {'port': vcsserver_port}})
147
147
148 if is_cygwin():
148 if is_cygwin():
149 platform_override = {'DEFAULT': {
149 platform_override = {'DEFAULT': {
150 'beaker.cache.repo_object.type': 'nocache'}}
150 'beaker.cache.repo_object.type': 'nocache'}}
151 overrides.append(platform_override)
151 overrides.append(platform_override)
152
152
153 option_name = (
153 option_name = (
154 'vcsserver_config_http' if use_http else 'vcsserver_config_pyro4')
154 'vcsserver_config_http' if use_http else 'vcsserver_config_pyro4')
155 override_option_name = 'vcsserver_config_override'
155 override_option_name = 'vcsserver_config_override'
156 config_file = get_config(
156 config_file = get_config(
157 request.config, option_name=option_name,
157 request.config, option_name=option_name,
158 override_option_name=override_option_name, overrides=overrides,
158 override_option_name=override_option_name, overrides=overrides,
159 basetemp=tmpdir_factory.getbasetemp().strpath,
159 basetemp=tmpdir_factory.getbasetemp().strpath,
160 prefix='test_vcs_')
160 prefix='test_vcs_')
161
161
162 print "Using the VCSServer configuration", config_file
162 print "Using the VCSServer configuration", config_file
163 ServerClass = HttpVCSServer if use_http else Pyro4VCSServer
163 ServerClass = HttpVCSServer if use_http else Pyro4VCSServer
164 server = ServerClass(config_file)
164 server = ServerClass(config_file)
165 server.start()
165 server.start()
166
166
167 @request.addfinalizer
167 @request.addfinalizer
168 def cleanup():
168 def cleanup():
169 server.shutdown()
169 server.shutdown()
170
170
171 server.wait_until_ready()
171 server.wait_until_ready()
172 return server
172 return server
173
173
174 return factory
174 return factory
175
175
176
176
177 def is_cygwin():
177 def is_cygwin():
178 return 'cygwin' in platform.system().lower()
178 return 'cygwin' in platform.system().lower()
179
179
180
180
181 def _use_vcs_http_server(config):
181 def _use_vcs_http_server(config):
182 protocol_option = 'vcsserver_protocol'
182 protocol_option = 'vcsserver_protocol'
183 protocol = (
183 protocol = (
184 config.getoption(protocol_option) or
184 config.getoption(protocol_option) or
185 config.getini(protocol_option) or
185 config.getini(protocol_option) or
186 'http')
186 'http')
187 return protocol == 'http'
187 return protocol == 'http'
188
188
189
189
190 class VCSServer(object):
190 class VCSServer(object):
191 """
191 """
192 Represents a running VCSServer instance.
192 Represents a running VCSServer instance.
193 """
193 """
194
194
195 _args = []
195 _args = []
196
196
197 def start(self):
197 def start(self):
198 print("Starting the VCSServer: {}".format(self._args))
198 print("Starting the VCSServer: {}".format(self._args))
199 self.process = subprocess.Popen(self._args)
199 self.process = subprocess32.Popen(self._args)
200
200
201 def wait_until_ready(self, timeout=30):
201 def wait_until_ready(self, timeout=30):
202 raise NotImplementedError()
202 raise NotImplementedError()
203
203
204 def shutdown(self):
204 def shutdown(self):
205 self.process.kill()
205 self.process.kill()
206
206
207
207
208 class Pyro4VCSServer(VCSServer):
208 class Pyro4VCSServer(VCSServer):
209 def __init__(self, config_file):
209 def __init__(self, config_file):
210 """
210 """
211 :param config_file: The config file to start the server with
211 :param config_file: The config file to start the server with
212 """
212 """
213
213
214 config_data = configobj.ConfigObj(config_file)
214 config_data = configobj.ConfigObj(config_file)
215 self._config = config_data['DEFAULT']
215 self._config = config_data['DEFAULT']
216
216
217 args = ['vcsserver', '--config', config_file]
217 args = ['vcsserver', '--config', config_file]
218 self._args = args
218 self._args = args
219
219
220 def wait_until_ready(self, timeout=30):
220 def wait_until_ready(self, timeout=30):
221 remote_server = vcs.create_vcsserver_proxy(
221 remote_server = vcs.create_vcsserver_proxy(
222 self.server_and_port, 'pyro4')
222 self.server_and_port, 'pyro4')
223 start = time.time()
223 start = time.time()
224 with remote_server:
224 with remote_server:
225 while time.time() - start < timeout:
225 while time.time() - start < timeout:
226 try:
226 try:
227 remote_server.ping()
227 remote_server.ping()
228 break
228 break
229 except CommunicationError:
229 except CommunicationError:
230 time.sleep(0.2)
230 time.sleep(0.2)
231 else:
231 else:
232 pytest.exit(
232 pytest.exit(
233 "Starting the VCSServer failed or took more than {} "
233 "Starting the VCSServer failed or took more than {} "
234 "seconds.".format(timeout))
234 "seconds.".format(timeout))
235
235
236 @property
236 @property
237 def server_and_port(self):
237 def server_and_port(self):
238 return '{host}:{port}'.format(**self._config)
238 return '{host}:{port}'.format(**self._config)
239
239
240
240
241 class HttpVCSServer(VCSServer):
241 class HttpVCSServer(VCSServer):
242 """
242 """
243 Represents a running VCSServer instance.
243 Represents a running VCSServer instance.
244 """
244 """
245 def __init__(self, config_file):
245 def __init__(self, config_file):
246 config_data = configobj.ConfigObj(config_file)
246 config_data = configobj.ConfigObj(config_file)
247 self._config = config_data['server:main']
247 self._config = config_data['server:main']
248
248
249 args = ['pserve', config_file]
249 args = ['pserve', config_file]
250 self._args = args
250 self._args = args
251
251
252 @property
252 @property
253 def http_url(self):
253 def http_url(self):
254 template = 'http://{host}:{port}/'
254 template = 'http://{host}:{port}/'
255 return template.format(**self._config)
255 return template.format(**self._config)
256
256
257 def start(self):
257 def start(self):
258 self.process = subprocess.Popen(self._args)
258 self.process = subprocess32.Popen(self._args)
259
259
260 def wait_until_ready(self, timeout=30):
260 def wait_until_ready(self, timeout=30):
261 host = self._config['host']
261 host = self._config['host']
262 port = self._config['port']
262 port = self._config['port']
263 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
263 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
264 start = time.time()
264 start = time.time()
265
265
266 while time.time() - start < timeout:
266 while time.time() - start < timeout:
267 try:
267 try:
268 urlopen(status_url)
268 urlopen(status_url)
269 break
269 break
270 except URLError:
270 except URLError:
271 time.sleep(0.2)
271 time.sleep(0.2)
272 else:
272 else:
273 pytest.exit(
273 pytest.exit(
274 "Starting the VCSServer failed or took more than {} "
274 "Starting the VCSServer failed or took more than {} "
275 "seconds.".format(timeout))
275 "seconds.".format(timeout))
276
276
277 def shutdown(self):
277 def shutdown(self):
278 self.process.kill()
278 self.process.kill()
279
279
280
280
281 @pytest.fixture(scope='session')
281 @pytest.fixture(scope='session')
282 def pylons_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
282 def pylons_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
283 option_name = 'pylons_config'
283 option_name = 'pylons_config'
284
284
285 overrides = [
285 overrides = [
286 {'server:main': {'port': rcserver_port}},
286 {'server:main': {'port': rcserver_port}},
287 {'app:main': {
287 {'app:main': {
288 'vcs.server': 'localhost:%s' % vcsserver_port,
288 'vcs.server': 'localhost:%s' % vcsserver_port,
289 # johbo: We will always start the VCSServer on our own based on the
289 # johbo: We will always start the VCSServer on our own based on the
290 # fixtures of the test cases. For the test run it must always be
290 # fixtures of the test cases. For the test run it must always be
291 # off in the INI file.
291 # off in the INI file.
292 'vcs.start_server': 'false',
292 'vcs.start_server': 'false',
293 }},
293 }},
294 ]
294 ]
295 if _use_vcs_http_server(request.config):
295 if _use_vcs_http_server(request.config):
296 overrides.append({
296 overrides.append({
297 'app:main': {
297 'app:main': {
298 'vcs.server.protocol': 'http',
298 'vcs.server.protocol': 'http',
299 'vcs.scm_app_implementation': 'http',
299 'vcs.scm_app_implementation': 'http',
300 'vcs.hooks.protocol': 'http',
300 'vcs.hooks.protocol': 'http',
301 }
301 }
302 })
302 })
303 else:
303 else:
304 overrides.append({
304 overrides.append({
305 'app:main': {
305 'app:main': {
306 'vcs.server.protocol': 'pyro4',
306 'vcs.server.protocol': 'pyro4',
307 'vcs.scm_app_implementation': 'pyro4',
307 'vcs.scm_app_implementation': 'pyro4',
308 'vcs.hooks.protocol': 'pyro4',
308 'vcs.hooks.protocol': 'pyro4',
309 }
309 }
310 })
310 })
311
311
312 filename = get_config(
312 filename = get_config(
313 request.config, option_name=option_name,
313 request.config, option_name=option_name,
314 override_option_name='{}_override'.format(option_name),
314 override_option_name='{}_override'.format(option_name),
315 overrides=overrides,
315 overrides=overrides,
316 basetemp=tmpdir_factory.getbasetemp().strpath,
316 basetemp=tmpdir_factory.getbasetemp().strpath,
317 prefix='test_rce_')
317 prefix='test_rce_')
318 return filename
318 return filename
319
319
320
320
321 @pytest.fixture(scope='session')
321 @pytest.fixture(scope='session')
322 def rcserver_port(request):
322 def rcserver_port(request):
323 port = get_available_port()
323 port = get_available_port()
324 print 'Using rcserver port %s' % (port, )
324 print 'Using rcserver port %s' % (port, )
325 return port
325 return port
326
326
327
327
328 @pytest.fixture(scope='session')
328 @pytest.fixture(scope='session')
329 def vcsserver_port(request):
329 def vcsserver_port(request):
330 port = request.config.getoption('--vcsserver-port')
330 port = request.config.getoption('--vcsserver-port')
331 if port is None:
331 if port is None:
332 port = get_available_port()
332 port = get_available_port()
333 print 'Using vcsserver port %s' % (port, )
333 print 'Using vcsserver port %s' % (port, )
334 return port
334 return port
335
335
336
336
337 def get_available_port():
337 def get_available_port():
338 family = socket.AF_INET
338 family = socket.AF_INET
339 socktype = socket.SOCK_STREAM
339 socktype = socket.SOCK_STREAM
340 host = '127.0.0.1'
340 host = '127.0.0.1'
341
341
342 mysocket = socket.socket(family, socktype)
342 mysocket = socket.socket(family, socktype)
343 mysocket.bind((host, 0))
343 mysocket.bind((host, 0))
344 port = mysocket.getsockname()[1]
344 port = mysocket.getsockname()[1]
345 mysocket.close()
345 mysocket.close()
346 del mysocket
346 del mysocket
347 return port
347 return port
348
348
349
349
350 @pytest.fixture(scope='session')
350 @pytest.fixture(scope='session')
351 def available_port_factory():
351 def available_port_factory():
352 """
352 """
353 Returns a callable which returns free port numbers.
353 Returns a callable which returns free port numbers.
354 """
354 """
355 return get_available_port
355 return get_available_port
356
356
357
357
358 @pytest.fixture
358 @pytest.fixture
359 def available_port(available_port_factory):
359 def available_port(available_port_factory):
360 """
360 """
361 Gives you one free port for the current test.
361 Gives you one free port for the current test.
362
362
363 Uses "available_port_factory" to retrieve the port.
363 Uses "available_port_factory" to retrieve the port.
364 """
364 """
365 return available_port_factory()
365 return available_port_factory()
366
366
367
367
368 @pytest.fixture(scope='session')
368 @pytest.fixture(scope='session')
369 def pylonsapp(pylons_config, vcsserver, http_environ_session):
369 def pylonsapp(pylons_config, vcsserver, http_environ_session):
370 print "Using the RhodeCode configuration", pylons_config
370 print "Using the RhodeCode configuration", pylons_config
371 logging.config.fileConfig(
371 logging.config.fileConfig(
372 pylons_config, disable_existing_loggers=False)
372 pylons_config, disable_existing_loggers=False)
373 app = _setup_pylons_environment(pylons_config, http_environ_session)
373 app = _setup_pylons_environment(pylons_config, http_environ_session)
374 return app
374 return app
375
375
376
376
377 @pytest.fixture(scope='session')
377 @pytest.fixture(scope='session')
378 def testini_factory(tmpdir_factory, pylons_config):
378 def testini_factory(tmpdir_factory, pylons_config):
379 """
379 """
380 Factory to create an INI file based on TestINI.
380 Factory to create an INI file based on TestINI.
381
381
382 It will make sure to place the INI file in the correct directory.
382 It will make sure to place the INI file in the correct directory.
383 """
383 """
384 basetemp = tmpdir_factory.getbasetemp().strpath
384 basetemp = tmpdir_factory.getbasetemp().strpath
385 return TestIniFactory(basetemp, pylons_config)
385 return TestIniFactory(basetemp, pylons_config)
386
386
387
387
388 class TestIniFactory(object):
388 class TestIniFactory(object):
389
389
390 def __init__(self, basetemp, template_ini):
390 def __init__(self, basetemp, template_ini):
391 self._basetemp = basetemp
391 self._basetemp = basetemp
392 self._template_ini = template_ini
392 self._template_ini = template_ini
393
393
394 def __call__(self, ini_params, new_file_prefix='test'):
394 def __call__(self, ini_params, new_file_prefix='test'):
395 ini_file = TestINI(
395 ini_file = TestINI(
396 self._template_ini, ini_params=ini_params,
396 self._template_ini, ini_params=ini_params,
397 new_file_prefix=new_file_prefix, dir=self._basetemp)
397 new_file_prefix=new_file_prefix, dir=self._basetemp)
398 result = ini_file.create()
398 result = ini_file.create()
399 return result
399 return result
400
400
401
401
402 def get_config(
402 def get_config(
403 config, option_name, override_option_name, overrides=None,
403 config, option_name, override_option_name, overrides=None,
404 basetemp=None, prefix='test'):
404 basetemp=None, prefix='test'):
405 """
405 """
406 Find a configuration file and apply overrides for the given `prefix`.
406 Find a configuration file and apply overrides for the given `prefix`.
407 """
407 """
408 config_file = (
408 config_file = (
409 config.getoption(option_name) or config.getini(option_name))
409 config.getoption(option_name) or config.getini(option_name))
410 if not config_file:
410 if not config_file:
411 pytest.exit(
411 pytest.exit(
412 "Configuration error, could not extract {}.".format(option_name))
412 "Configuration error, could not extract {}.".format(option_name))
413
413
414 overrides = overrides or []
414 overrides = overrides or []
415 config_override = config.getoption(override_option_name)
415 config_override = config.getoption(override_option_name)
416 if config_override:
416 if config_override:
417 overrides.append(config_override)
417 overrides.append(config_override)
418 temp_ini_file = TestINI(
418 temp_ini_file = TestINI(
419 config_file, ini_params=overrides, new_file_prefix=prefix,
419 config_file, ini_params=overrides, new_file_prefix=prefix,
420 dir=basetemp)
420 dir=basetemp)
421
421
422 return temp_ini_file.create()
422 return temp_ini_file.create()
423
423
424
424
425 def _setup_pylons_environment(pylons_config, http_environ):
425 def _setup_pylons_environment(pylons_config, http_environ):
426 current_path = os.getcwd()
426 current_path = os.getcwd()
427 pylonsapp = loadapp(
427 pylonsapp = loadapp(
428 'config:' + pylons_config, relative_to=current_path)
428 'config:' + pylons_config, relative_to=current_path)
429
429
430 # Using rhodecode.CONFIG which is assigned during "load_environment".
430 # Using rhodecode.CONFIG which is assigned during "load_environment".
431 # The indirect approach is used, because "pylonsapp" may actually be
431 # The indirect approach is used, because "pylonsapp" may actually be
432 # the Pyramid application.
432 # the Pyramid application.
433 pylonsapp_config = rhodecode.CONFIG
433 pylonsapp_config = rhodecode.CONFIG
434 _init_stack(pylonsapp_config, environ=http_environ)
434 _init_stack(pylonsapp_config, environ=http_environ)
435
435
436 # For compatibility add the attribute "config" which would be
436 # For compatibility add the attribute "config" which would be
437 # present on the Pylons application.
437 # present on the Pylons application.
438 pylonsapp.config = pylonsapp_config
438 pylonsapp.config = pylonsapp_config
439 return pylonsapp
439 return pylonsapp
440
440
441
441
442 def _init_stack(config=None, environ=None):
442 def _init_stack(config=None, environ=None):
443 if not config:
443 if not config:
444 config = pylons.test.pylonsapp.config
444 config = pylons.test.pylonsapp.config
445 if not environ:
445 if not environ:
446 environ = {}
446 environ = {}
447 pylons.url._push_object(URLGenerator(config['routes.map'], environ or {}))
447 pylons.url._push_object(URLGenerator(config['routes.map'], environ or {}))
448 pylons.app_globals._push_object(config['pylons.app_globals'])
448 pylons.app_globals._push_object(config['pylons.app_globals'])
449 pylons.config._push_object(config)
449 pylons.config._push_object(config)
450 pylons.tmpl_context._push_object(ContextObj())
450 pylons.tmpl_context._push_object(ContextObj())
451 # Initialize a translator for tests that utilize i18n
451 # Initialize a translator for tests that utilize i18n
452 translator = _get_translator(pylons.config.get('lang'))
452 translator = _get_translator(pylons.config.get('lang'))
453 pylons.translator._push_object(translator)
453 pylons.translator._push_object(translator)
454 pylons.session._push_object(SessionObject(environ or {}))
454 pylons.session._push_object(SessionObject(environ or {}))
455 pylons.request._push_object(webob.Request.blank('', environ=environ))
455 pylons.request._push_object(webob.Request.blank('', environ=environ))
@@ -1,217 +1,215 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Test suite for making push/pull operations
22 Test suite for making push/pull operations
23 """
23 """
24
24
25 import os
25 import os
26 import sys
26 import sys
27 import shutil
27 import shutil
28 import logging
28 import logging
29 from os.path import join as jn
29 from os.path import join as jn
30 from os.path import dirname as dn
30 from os.path import dirname as dn
31
31
32 from tempfile import _RandomNameSequence
32 from tempfile import _RandomNameSequence
33 from subprocess import Popen, PIPE
33 from subprocess32 import Popen, PIPE
34
35 from paste.deploy import appconfig
34 from paste.deploy import appconfig
36 from pylons import config
37
35
38 from rhodecode.lib.utils import add_cache
36 from rhodecode.lib.utils import add_cache
39 from rhodecode.lib.utils2 import engine_from_config
37 from rhodecode.lib.utils2 import engine_from_config
40 from rhodecode.lib.auth import get_crypt_password
38 from rhodecode.lib.auth import get_crypt_password
41 from rhodecode.model import init_model
39 from rhodecode.model import init_model
42 from rhodecode.model import meta
40 from rhodecode.model import meta
43 from rhodecode.model.db import User, Repository
41 from rhodecode.model.db import User, Repository
44
42
45 from rhodecode.tests import TESTS_TMP_PATH, NEW_HG_REPO, HG_REPO
43 from rhodecode.tests import TESTS_TMP_PATH, HG_REPO
46 from rhodecode.config.environment import load_environment
44 from rhodecode.config.environment import load_environment
47
45
48 rel_path = dn(dn(dn(dn(os.path.abspath(__file__)))))
46 rel_path = dn(dn(dn(dn(os.path.abspath(__file__)))))
49 conf = appconfig('config:rc.ini', relative_to=rel_path)
47 conf = appconfig('config:rc.ini', relative_to=rel_path)
50 load_environment(conf.global_conf, conf.local_conf)
48 load_environment(conf.global_conf, conf.local_conf)
51
49
52 add_cache(conf)
50 add_cache(conf)
53
51
54 USER = 'test_admin'
52 USER = 'test_admin'
55 PASS = 'test12'
53 PASS = 'test12'
56 HOST = 'rc.local'
54 HOST = 'rc.local'
57 METHOD = 'pull'
55 METHOD = 'pull'
58 DEBUG = True
56 DEBUG = True
59 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
60
58
61
59
62 class Command(object):
60 class Command(object):
63
61
64 def __init__(self, cwd):
62 def __init__(self, cwd):
65 self.cwd = cwd
63 self.cwd = cwd
66
64
67 def execute(self, cmd, *args):
65 def execute(self, cmd, *args):
68 """Runs command on the system with given ``args``.
66 """Runs command on the system with given ``args``.
69 """
67 """
70
68
71 command = cmd + ' ' + ' '.join(args)
69 command = cmd + ' ' + ' '.join(args)
72 log.debug('Executing %s' % command)
70 log.debug('Executing %s' % command)
73 if DEBUG:
71 if DEBUG:
74 print command
72 print command
75 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
73 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
76 stdout, stderr = p.communicate()
74 stdout, stderr = p.communicate()
77 if DEBUG:
75 if DEBUG:
78 print stdout, stderr
76 print stdout, stderr
79 return stdout, stderr
77 return stdout, stderr
80
78
81
79
82 def get_session():
80 def get_session():
83 engine = engine_from_config(conf, 'sqlalchemy.db1.')
81 engine = engine_from_config(conf, 'sqlalchemy.db1.')
84 init_model(engine)
82 init_model(engine)
85 sa = meta.Session
83 sa = meta.Session
86 return sa
84 return sa
87
85
88
86
89 def create_test_user(force=True):
87 def create_test_user(force=True):
90 print 'creating test user'
88 print 'creating test user'
91 sa = get_session()
89 sa = get_session()
92
90
93 user = sa.query(User).filter(User.username == USER).scalar()
91 user = sa.query(User).filter(User.username == USER).scalar()
94
92
95 if force and user is not None:
93 if force and user is not None:
96 print 'removing current user'
94 print 'removing current user'
97 for repo in sa.query(Repository).filter(Repository.user == user).all():
95 for repo in sa.query(Repository).filter(Repository.user == user).all():
98 sa.delete(repo)
96 sa.delete(repo)
99 sa.delete(user)
97 sa.delete(user)
100 sa.commit()
98 sa.commit()
101
99
102 if user is None or force:
100 if user is None or force:
103 print 'creating new one'
101 print 'creating new one'
104 new_usr = User()
102 new_usr = User()
105 new_usr.username = USER
103 new_usr.username = USER
106 new_usr.password = get_crypt_password(PASS)
104 new_usr.password = get_crypt_password(PASS)
107 new_usr.email = 'mail@mail.com'
105 new_usr.email = 'mail@mail.com'
108 new_usr.name = 'test'
106 new_usr.name = 'test'
109 new_usr.lastname = 'lasttestname'
107 new_usr.lastname = 'lasttestname'
110 new_usr.active = True
108 new_usr.active = True
111 new_usr.admin = True
109 new_usr.admin = True
112 sa.add(new_usr)
110 sa.add(new_usr)
113 sa.commit()
111 sa.commit()
114
112
115 print 'done'
113 print 'done'
116
114
117
115
118 def create_test_repo(force=True):
116 def create_test_repo(force=True):
119 print 'creating test repo'
117 print 'creating test repo'
120 from rhodecode.model.repo import RepoModel
118 from rhodecode.model.repo import RepoModel
121 sa = get_session()
119 sa = get_session()
122
120
123 user = sa.query(User).filter(User.username == USER).scalar()
121 user = sa.query(User).filter(User.username == USER).scalar()
124 if user is None:
122 if user is None:
125 raise Exception('user not found')
123 raise Exception('user not found')
126
124
127 repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
125 repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
128
126
129 if repo is None:
127 if repo is None:
130 print 'repo not found creating'
128 print 'repo not found creating'
131
129
132 form_data = {'repo_name': HG_REPO,
130 form_data = {'repo_name': HG_REPO,
133 'repo_type': 'hg',
131 'repo_type': 'hg',
134 'private':False,
132 'private':False,
135 'clone_uri': '' }
133 'clone_uri': '' }
136 rm = RepoModel(sa)
134 rm = RepoModel(sa)
137 rm.base_path = '/home/hg'
135 rm.base_path = '/home/hg'
138 rm.create(form_data, user)
136 rm.create(form_data, user)
139
137
140 print 'done'
138 print 'done'
141
139
142
140
143 def set_anonymous_access(enable=True):
141 def set_anonymous_access(enable=True):
144 sa = get_session()
142 sa = get_session()
145 user = sa.query(User).filter(User.username == 'default').one()
143 user = sa.query(User).filter(User.username == 'default').one()
146 user.active = enable
144 user.active = enable
147 sa.add(user)
145 sa.add(user)
148 sa.commit()
146 sa.commit()
149
147
150
148
151 def get_anonymous_access():
149 def get_anonymous_access():
152 sa = get_session()
150 sa = get_session()
153 return sa.query(User).filter(User.username == 'default').one().active
151 return sa.query(User).filter(User.username == 'default').one().active
154
152
155
153
156 #==============================================================================
154 #==============================================================================
157 # TESTS
155 # TESTS
158 #==============================================================================
156 #==============================================================================
159 def test_clone_with_credentials(repo=HG_REPO, method=METHOD,
157 def test_clone_with_credentials(repo=HG_REPO, method=METHOD,
160 seq=None, backend='hg', check_output=True):
158 seq=None, backend='hg', check_output=True):
161 cwd = path = jn(TESTS_TMP_PATH, repo)
159 cwd = path = jn(TESTS_TMP_PATH, repo)
162
160
163 if seq is None:
161 if seq is None:
164 seq = _RandomNameSequence().next()
162 seq = _RandomNameSequence().next()
165
163
166 try:
164 try:
167 shutil.rmtree(path, ignore_errors=True)
165 shutil.rmtree(path, ignore_errors=True)
168 os.makedirs(path)
166 os.makedirs(path)
169 except OSError:
167 except OSError:
170 raise
168 raise
171
169
172 clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
170 clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
173 {'user': USER,
171 {'user': USER,
174 'pass': PASS,
172 'pass': PASS,
175 'host': HOST,
173 'host': HOST,
176 'cloned_repo': repo, }
174 'cloned_repo': repo, }
177
175
178 dest = path + seq
176 dest = path + seq
179 if method == 'pull':
177 if method == 'pull':
180 stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
178 stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
181 else:
179 else:
182 stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
180 stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
183 if check_output:
181 if check_output:
184 if backend == 'hg':
182 if backend == 'hg':
185 assert """adding file changes""" in stdout, 'no messages about cloning'
183 assert """adding file changes""" in stdout, 'no messages about cloning'
186 assert """abort""" not in stderr, 'got error from clone'
184 assert """abort""" not in stderr, 'got error from clone'
187 elif backend == 'git':
185 elif backend == 'git':
188 assert """Cloning into""" in stdout, 'no messages about cloning'
186 assert """Cloning into""" in stdout, 'no messages about cloning'
189
187
190 if __name__ == '__main__':
188 if __name__ == '__main__':
191 try:
189 try:
192 create_test_user(force=False)
190 create_test_user(force=False)
193 seq = None
191 seq = None
194 import time
192 import time
195
193
196 try:
194 try:
197 METHOD = sys.argv[3]
195 METHOD = sys.argv[3]
198 except Exception:
196 except Exception:
199 pass
197 pass
200
198
201 try:
199 try:
202 backend = sys.argv[4]
200 backend = sys.argv[4]
203 except Exception:
201 except Exception:
204 backend = 'hg'
202 backend = 'hg'
205
203
206 if METHOD == 'pull':
204 if METHOD == 'pull':
207 seq = _RandomNameSequence().next()
205 seq = _RandomNameSequence().next()
208 test_clone_with_credentials(repo=sys.argv[1], method='clone',
206 test_clone_with_credentials(repo=sys.argv[1], method='clone',
209 seq=seq, backend=backend)
207 seq=seq, backend=backend)
210 s = time.time()
208 s = time.time()
211 for i in range(1, int(sys.argv[2]) + 1):
209 for i in range(1, int(sys.argv[2]) + 1):
212 print 'take', i
210 print 'take', i
213 test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
211 test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
214 seq=seq, backend=backend)
212 seq=seq, backend=backend)
215 print 'time taken %.3f' % (time.time() - s)
213 print 'time taken %.3f' % (time.time() - s)
216 except Exception as e:
214 except Exception as e:
217 sys.exit('stop on %s' % e)
215 sys.exit('stop on %s' % e)
@@ -1,293 +1,293 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess
25 import subprocess32
26 import urllib2
26 import urllib2
27 from urlparse import urlparse, parse_qsl
27 from urlparse import urlparse, parse_qsl
28 from urllib import unquote_plus
28 from urllib import unquote_plus
29
29
30 import pytest
30 import pytest
31 import rc_testdata
31 import rc_testdata
32 from lxml.html import fromstring, tostring
32 from lxml.html import fromstring, tostring
33 from lxml.cssselect import CSSSelector
33 from lxml.cssselect import CSSSelector
34
34
35 from rhodecode.model.db import User
35 from rhodecode.model.db import User
36 from rhodecode.model.meta import Session
36 from rhodecode.model.meta import Session
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
38 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
38 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
39
39
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 def set_anonymous_access(enabled):
44 def set_anonymous_access(enabled):
45 """(Dis)allows anonymous access depending on parameter `enabled`"""
45 """(Dis)allows anonymous access depending on parameter `enabled`"""
46 user = User.get_default_user()
46 user = User.get_default_user()
47 user.active = enabled
47 user.active = enabled
48 Session().add(user)
48 Session().add(user)
49 Session().commit()
49 Session().commit()
50 log.info('anonymous access is now: %s', enabled)
50 log.info('anonymous access is now: %s', enabled)
51 assert enabled == User.get_default_user().active, (
51 assert enabled == User.get_default_user().active, (
52 'Cannot set anonymous access')
52 'Cannot set anonymous access')
53
53
54
54
55 def check_xfail_backends(node, backend_alias):
55 def check_xfail_backends(node, backend_alias):
56 # Using "xfail_backends" here intentionally, since this marks work
56 # Using "xfail_backends" here intentionally, since this marks work
57 # which is "to be done" soon.
57 # which is "to be done" soon.
58 skip_marker = node.get_marker('xfail_backends')
58 skip_marker = node.get_marker('xfail_backends')
59 if skip_marker and backend_alias in skip_marker.args:
59 if skip_marker and backend_alias in skip_marker.args:
60 msg = "Support for backend %s to be developed." % (backend_alias, )
60 msg = "Support for backend %s to be developed." % (backend_alias, )
61 msg = skip_marker.kwargs.get('reason', msg)
61 msg = skip_marker.kwargs.get('reason', msg)
62 pytest.xfail(msg)
62 pytest.xfail(msg)
63
63
64
64
65 def check_skip_backends(node, backend_alias):
65 def check_skip_backends(node, backend_alias):
66 # Using "skip_backends" here intentionally, since this marks work which is
66 # Using "skip_backends" here intentionally, since this marks work which is
67 # not supported.
67 # not supported.
68 skip_marker = node.get_marker('skip_backends')
68 skip_marker = node.get_marker('skip_backends')
69 if skip_marker and backend_alias in skip_marker.args:
69 if skip_marker and backend_alias in skip_marker.args:
70 msg = "Feature not supported for backend %s." % (backend_alias, )
70 msg = "Feature not supported for backend %s." % (backend_alias, )
71 msg = skip_marker.kwargs.get('reason', msg)
71 msg = skip_marker.kwargs.get('reason', msg)
72 pytest.skip(msg)
72 pytest.skip(msg)
73
73
74
74
75 def extract_git_repo_from_dump(dump_name, repo_name):
75 def extract_git_repo_from_dump(dump_name, repo_name):
76 """Create git repo `repo_name` from dump `dump_name`."""
76 """Create git repo `repo_name` from dump `dump_name`."""
77 repos_path = ScmModel().repos_path
77 repos_path = ScmModel().repos_path
78 target_path = os.path.join(repos_path, repo_name)
78 target_path = os.path.join(repos_path, repo_name)
79 rc_testdata.extract_git_dump(dump_name, target_path)
79 rc_testdata.extract_git_dump(dump_name, target_path)
80 return target_path
80 return target_path
81
81
82
82
83 def extract_hg_repo_from_dump(dump_name, repo_name):
83 def extract_hg_repo_from_dump(dump_name, repo_name):
84 """Create hg repo `repo_name` from dump `dump_name`."""
84 """Create hg repo `repo_name` from dump `dump_name`."""
85 repos_path = ScmModel().repos_path
85 repos_path = ScmModel().repos_path
86 target_path = os.path.join(repos_path, repo_name)
86 target_path = os.path.join(repos_path, repo_name)
87 rc_testdata.extract_hg_dump(dump_name, target_path)
87 rc_testdata.extract_hg_dump(dump_name, target_path)
88 return target_path
88 return target_path
89
89
90
90
91 def extract_svn_repo_from_dump(dump_name, repo_name):
91 def extract_svn_repo_from_dump(dump_name, repo_name):
92 """Create a svn repo `repo_name` from dump `dump_name`."""
92 """Create a svn repo `repo_name` from dump `dump_name`."""
93 repos_path = ScmModel().repos_path
93 repos_path = ScmModel().repos_path
94 target_path = os.path.join(repos_path, repo_name)
94 target_path = os.path.join(repos_path, repo_name)
95 SubversionRepository(target_path, create=True)
95 SubversionRepository(target_path, create=True)
96 _load_svn_dump_into_repo(dump_name, target_path)
96 _load_svn_dump_into_repo(dump_name, target_path)
97 return target_path
97 return target_path
98
98
99
99
100 def assert_message_in_log(log_records, message, levelno, module):
100 def assert_message_in_log(log_records, message, levelno, module):
101 messages = [
101 messages = [
102 r.message for r in log_records
102 r.message for r in log_records
103 if r.module == module and r.levelno == levelno
103 if r.module == module and r.levelno == levelno
104 ]
104 ]
105 assert message in messages
105 assert message in messages
106
106
107
107
108 def _load_svn_dump_into_repo(dump_name, repo_path):
108 def _load_svn_dump_into_repo(dump_name, repo_path):
109 """
109 """
110 Utility to populate a svn repository with a named dump
110 Utility to populate a svn repository with a named dump
111
111
112 Currently the dumps are in rc_testdata. They might later on be
112 Currently the dumps are in rc_testdata. They might later on be
113 integrated with the main repository once they stabilize more.
113 integrated with the main repository once they stabilize more.
114 """
114 """
115 dump = rc_testdata.load_svn_dump(dump_name)
115 dump = rc_testdata.load_svn_dump(dump_name)
116 load_dump = subprocess.Popen(
116 load_dump = subprocess32.Popen(
117 ['svnadmin', 'load', repo_path],
117 ['svnadmin', 'load', repo_path],
118 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
118 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
119 stderr=subprocess.PIPE)
119 stderr=subprocess32.PIPE)
120 out, err = load_dump.communicate(dump)
120 out, err = load_dump.communicate(dump)
121 if load_dump.returncode != 0:
121 if load_dump.returncode != 0:
122 log.error("Output of load_dump command: %s", out)
122 log.error("Output of load_dump command: %s", out)
123 log.error("Error output of load_dump command: %s", err)
123 log.error("Error output of load_dump command: %s", err)
124 raise Exception(
124 raise Exception(
125 'Failed to load dump "%s" into repository at path "%s".'
125 'Failed to load dump "%s" into repository at path "%s".'
126 % (dump_name, repo_path))
126 % (dump_name, repo_path))
127
127
128
128
129 class AssertResponse(object):
129 class AssertResponse(object):
130 """
130 """
131 Utility that helps to assert things about a given HTML response.
131 Utility that helps to assert things about a given HTML response.
132 """
132 """
133
133
134 def __init__(self, response):
134 def __init__(self, response):
135 self.response = response
135 self.response = response
136
136
137 def one_element_exists(self, css_selector):
137 def one_element_exists(self, css_selector):
138 self.get_element(css_selector)
138 self.get_element(css_selector)
139
139
140 def no_element_exists(self, css_selector):
140 def no_element_exists(self, css_selector):
141 assert not self._get_elements(css_selector)
141 assert not self._get_elements(css_selector)
142
142
143 def element_equals_to(self, css_selector, expected_content):
143 def element_equals_to(self, css_selector, expected_content):
144 element = self.get_element(css_selector)
144 element = self.get_element(css_selector)
145 element_text = self._element_to_string(element)
145 element_text = self._element_to_string(element)
146 assert expected_content in element_text
146 assert expected_content in element_text
147
147
148 def element_contains(self, css_selector, expected_content):
148 def element_contains(self, css_selector, expected_content):
149 element = self.get_element(css_selector)
149 element = self.get_element(css_selector)
150 assert expected_content in element.text_content()
150 assert expected_content in element.text_content()
151
151
152 def contains_one_link(self, link_text, href):
152 def contains_one_link(self, link_text, href):
153 doc = fromstring(self.response.body)
153 doc = fromstring(self.response.body)
154 sel = CSSSelector('a[href]')
154 sel = CSSSelector('a[href]')
155 elements = [
155 elements = [
156 e for e in sel(doc) if e.text_content().strip() == link_text]
156 e for e in sel(doc) if e.text_content().strip() == link_text]
157 assert len(elements) == 1, "Did not find link or found multiple links"
157 assert len(elements) == 1, "Did not find link or found multiple links"
158 self._ensure_url_equal(elements[0].attrib.get('href'), href)
158 self._ensure_url_equal(elements[0].attrib.get('href'), href)
159
159
160 def contains_one_anchor(self, anchor_id):
160 def contains_one_anchor(self, anchor_id):
161 doc = fromstring(self.response.body)
161 doc = fromstring(self.response.body)
162 sel = CSSSelector('#' + anchor_id)
162 sel = CSSSelector('#' + anchor_id)
163 elements = sel(doc)
163 elements = sel(doc)
164 assert len(elements) == 1
164 assert len(elements) == 1
165
165
166 def _ensure_url_equal(self, found, expected):
166 def _ensure_url_equal(self, found, expected):
167 assert _Url(found) == _Url(expected)
167 assert _Url(found) == _Url(expected)
168
168
169 def get_element(self, css_selector):
169 def get_element(self, css_selector):
170 elements = self._get_elements(css_selector)
170 elements = self._get_elements(css_selector)
171 assert len(elements) == 1
171 assert len(elements) == 1
172 return elements[0]
172 return elements[0]
173
173
174 def get_elements(self, css_selector):
174 def get_elements(self, css_selector):
175 return self._get_elements(css_selector)
175 return self._get_elements(css_selector)
176
176
177 def _get_elements(self, css_selector):
177 def _get_elements(self, css_selector):
178 doc = fromstring(self.response.body)
178 doc = fromstring(self.response.body)
179 sel = CSSSelector(css_selector)
179 sel = CSSSelector(css_selector)
180 elements = sel(doc)
180 elements = sel(doc)
181 return elements
181 return elements
182
182
183 def _element_to_string(self, element):
183 def _element_to_string(self, element):
184 return tostring(element)
184 return tostring(element)
185
185
186
186
187 class _Url(object):
187 class _Url(object):
188 """
188 """
189 A url object that can be compared with other url orbjects
189 A url object that can be compared with other url orbjects
190 without regard to the vagaries of encoding, escaping, and ordering
190 without regard to the vagaries of encoding, escaping, and ordering
191 of parameters in query strings.
191 of parameters in query strings.
192
192
193 Inspired by
193 Inspired by
194 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
194 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
195 """
195 """
196
196
197 def __init__(self, url):
197 def __init__(self, url):
198 parts = urlparse(url)
198 parts = urlparse(url)
199 _query = frozenset(parse_qsl(parts.query))
199 _query = frozenset(parse_qsl(parts.query))
200 _path = unquote_plus(parts.path)
200 _path = unquote_plus(parts.path)
201 parts = parts._replace(query=_query, path=_path)
201 parts = parts._replace(query=_query, path=_path)
202 self.parts = parts
202 self.parts = parts
203
203
204 def __eq__(self, other):
204 def __eq__(self, other):
205 return self.parts == other.parts
205 return self.parts == other.parts
206
206
207 def __hash__(self):
207 def __hash__(self):
208 return hash(self.parts)
208 return hash(self.parts)
209
209
210
210
211 def run_test_concurrently(times, raise_catched_exc=True):
211 def run_test_concurrently(times, raise_catched_exc=True):
212 """
212 """
213 Add this decorator to small pieces of code that you want to test
213 Add this decorator to small pieces of code that you want to test
214 concurrently
214 concurrently
215
215
216 ex:
216 ex:
217
217
218 @test_concurrently(25)
218 @test_concurrently(25)
219 def my_test_function():
219 def my_test_function():
220 ...
220 ...
221 """
221 """
222 def test_concurrently_decorator(test_func):
222 def test_concurrently_decorator(test_func):
223 def wrapper(*args, **kwargs):
223 def wrapper(*args, **kwargs):
224 exceptions = []
224 exceptions = []
225
225
226 def call_test_func():
226 def call_test_func():
227 try:
227 try:
228 test_func(*args, **kwargs)
228 test_func(*args, **kwargs)
229 except Exception, e:
229 except Exception, e:
230 exceptions.append(e)
230 exceptions.append(e)
231 if raise_catched_exc:
231 if raise_catched_exc:
232 raise
232 raise
233 threads = []
233 threads = []
234 for i in range(times):
234 for i in range(times):
235 threads.append(threading.Thread(target=call_test_func))
235 threads.append(threading.Thread(target=call_test_func))
236 for t in threads:
236 for t in threads:
237 t.start()
237 t.start()
238 for t in threads:
238 for t in threads:
239 t.join()
239 t.join()
240 if exceptions:
240 if exceptions:
241 raise Exception(
241 raise Exception(
242 'test_concurrently intercepted %s exceptions: %s' % (
242 'test_concurrently intercepted %s exceptions: %s' % (
243 len(exceptions), exceptions))
243 len(exceptions), exceptions))
244 return wrapper
244 return wrapper
245 return test_concurrently_decorator
245 return test_concurrently_decorator
246
246
247
247
248 def wait_for_url(url, timeout=10):
248 def wait_for_url(url, timeout=10):
249 """
249 """
250 Wait until URL becomes reachable.
250 Wait until URL becomes reachable.
251
251
252 It polls the URL until the timeout is reached or it became reachable.
252 It polls the URL until the timeout is reached or it became reachable.
253 If will call to `py.test.fail` in case the URL is not reachable.
253 If will call to `py.test.fail` in case the URL is not reachable.
254 """
254 """
255 timeout = time.time() + timeout
255 timeout = time.time() + timeout
256 last = 0
256 last = 0
257 wait = 0.1
257 wait = 0.1
258
258
259 while (timeout > last):
259 while (timeout > last):
260 last = time.time()
260 last = time.time()
261 if is_url_reachable(url):
261 if is_url_reachable(url):
262 break
262 break
263 elif ((last + wait) > time.time()):
263 elif ((last + wait) > time.time()):
264 # Go to sleep because not enough time has passed since last check.
264 # Go to sleep because not enough time has passed since last check.
265 time.sleep(wait)
265 time.sleep(wait)
266 else:
266 else:
267 pytest.fail("Timeout while waiting for URL {}".format(url))
267 pytest.fail("Timeout while waiting for URL {}".format(url))
268
268
269
269
270 def is_url_reachable(url):
270 def is_url_reachable(url):
271 try:
271 try:
272 urllib2.urlopen(url)
272 urllib2.urlopen(url)
273 except urllib2.URLError:
273 except urllib2.URLError:
274 return False
274 return False
275 return True
275 return True
276
276
277
277
278 def get_session_from_response(response):
278 def get_session_from_response(response):
279 """
279 """
280 This returns the session from a response object. Pylons has some magic
280 This returns the session from a response object. Pylons has some magic
281 to make the session available as `response.session`. But pyramid
281 to make the session available as `response.session`. But pyramid
282 doesn't expose it.
282 doesn't expose it.
283 """
283 """
284 # TODO: Try to look up the session key also.
284 # TODO: Try to look up the session key also.
285 return response.request.environ['beaker.session']
285 return response.request.environ['beaker.session']
286
286
287
287
288 def repo_on_filesystem(repo_name):
288 def repo_on_filesystem(repo_name):
289 from rhodecode.lib import vcs
289 from rhodecode.lib import vcs
290 from rhodecode.tests import TESTS_TMP_PATH
290 from rhodecode.tests import TESTS_TMP_PATH
291 repo = vcs.get_vcs_instance(
291 repo = vcs.get_vcs_instance(
292 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
292 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
293 return repo is not None
293 return repo is not None
@@ -1,195 +1,195 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import os
22 import os
23 import subprocess
23 import subprocess32
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.vcs.exceptions import VCSError
27 from rhodecode.lib.vcs.exceptions import VCSError
28 from rhodecode.lib.vcs.utils import author_email, author_name
28 from rhodecode.lib.vcs.utils import author_email, author_name
29 from rhodecode.lib.vcs.utils.helpers import get_scm
29 from rhodecode.lib.vcs.utils.helpers import get_scm
30 from rhodecode.lib.vcs.utils.helpers import get_scms_for_path
30 from rhodecode.lib.vcs.utils.helpers import get_scms_for_path
31 from rhodecode.lib.vcs.utils.helpers import parse_datetime
31 from rhodecode.lib.vcs.utils.helpers import parse_datetime
32 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
32 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
33
33
34
34
35 @pytest.mark.usefixtures("pylonsapp")
35 @pytest.mark.usefixtures("pylonsapp")
36 class TestPaths:
36 class TestPaths:
37
37
38 def _test_get_dirs_for_path(self, path, expected):
38 def _test_get_dirs_for_path(self, path, expected):
39 """
39 """
40 Tests if get_dirs_for_path returns same as expected.
40 Tests if get_dirs_for_path returns same as expected.
41 """
41 """
42 expected = sorted(expected)
42 expected = sorted(expected)
43 result = sorted(get_dirs_for_path(path))
43 result = sorted(get_dirs_for_path(path))
44 assert result == expected, (
44 assert result == expected, (
45 "%s != %s which was expected result for path %s"
45 "%s != %s which was expected result for path %s"
46 % (result, expected, path))
46 % (result, expected, path))
47
47
48 def test_get_dirs_for_path(self):
48 def test_get_dirs_for_path(self):
49 path = 'foo/bar/baz/file'
49 path = 'foo/bar/baz/file'
50 paths_and_results = (
50 paths_and_results = (
51 ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
51 ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
52 ('foo/bar/', ['foo', 'foo/bar']),
52 ('foo/bar/', ['foo', 'foo/bar']),
53 ('foo/bar', ['foo']),
53 ('foo/bar', ['foo']),
54 )
54 )
55 for path, expected in paths_and_results:
55 for path, expected in paths_and_results:
56 self._test_get_dirs_for_path(path, expected)
56 self._test_get_dirs_for_path(path, expected)
57
57
58 def test_get_scms_for_path(self, tmpdir):
58 def test_get_scms_for_path(self, tmpdir):
59 new = tmpdir.strpath
59 new = tmpdir.strpath
60 assert get_scms_for_path(new) == []
60 assert get_scms_for_path(new) == []
61
61
62 os.mkdir(os.path.join(new, '.tux'))
62 os.mkdir(os.path.join(new, '.tux'))
63 assert get_scms_for_path(new) == []
63 assert get_scms_for_path(new) == []
64
64
65 os.mkdir(os.path.join(new, '.git'))
65 os.mkdir(os.path.join(new, '.git'))
66 assert set(get_scms_for_path(new)) == set(['git'])
66 assert set(get_scms_for_path(new)) == set(['git'])
67
67
68 os.mkdir(os.path.join(new, '.hg'))
68 os.mkdir(os.path.join(new, '.hg'))
69 assert set(get_scms_for_path(new)) == set(['git', 'hg'])
69 assert set(get_scms_for_path(new)) == set(['git', 'hg'])
70
70
71
71
72 class TestGetScm:
72 class TestGetScm:
73
73
74 def test_existing_repository(self, vcs_repository_support):
74 def test_existing_repository(self, vcs_repository_support):
75 alias, repo = vcs_repository_support
75 alias, repo = vcs_repository_support
76 assert (alias, repo.path) == get_scm(repo.path)
76 assert (alias, repo.path) == get_scm(repo.path)
77
77
78 def test_raises_if_path_is_empty(self, tmpdir):
78 def test_raises_if_path_is_empty(self, tmpdir):
79 with pytest.raises(VCSError):
79 with pytest.raises(VCSError):
80 get_scm(str(tmpdir))
80 get_scm(str(tmpdir))
81
81
82 def test_get_scm_error_path(self):
82 def test_get_scm_error_path(self):
83 with pytest.raises(VCSError):
83 with pytest.raises(VCSError):
84 get_scm('err')
84 get_scm('err')
85
85
86 def test_get_two_scms_for_path(self, tmpdir):
86 def test_get_two_scms_for_path(self, tmpdir):
87 multialias_repo_path = str(tmpdir)
87 multialias_repo_path = str(tmpdir)
88
88
89 subprocess.check_call(['hg', 'init', multialias_repo_path])
89 subprocess32.check_call(['hg', 'init', multialias_repo_path])
90 subprocess.check_call(['git', 'init', multialias_repo_path])
90 subprocess32.check_call(['git', 'init', multialias_repo_path])
91
91
92 with pytest.raises(VCSError):
92 with pytest.raises(VCSError):
93 get_scm(multialias_repo_path)
93 get_scm(multialias_repo_path)
94
94
95 def test_ignores_svn_working_copy(self, tmpdir):
95 def test_ignores_svn_working_copy(self, tmpdir):
96 tmpdir.mkdir('.svn')
96 tmpdir.mkdir('.svn')
97 with pytest.raises(VCSError):
97 with pytest.raises(VCSError):
98 get_scm(tmpdir.strpath)
98 get_scm(tmpdir.strpath)
99
99
100
100
101 class TestParseDatetime:
101 class TestParseDatetime:
102
102
103 def test_datetime_text(self):
103 def test_datetime_text(self):
104 assert parse_datetime('2010-04-07 21:29:41') == \
104 assert parse_datetime('2010-04-07 21:29:41') == \
105 datetime.datetime(2010, 4, 7, 21, 29, 41)
105 datetime.datetime(2010, 4, 7, 21, 29, 41)
106
106
107 def test_no_seconds(self):
107 def test_no_seconds(self):
108 assert parse_datetime('2010-04-07 21:29') == \
108 assert parse_datetime('2010-04-07 21:29') == \
109 datetime.datetime(2010, 4, 7, 21, 29)
109 datetime.datetime(2010, 4, 7, 21, 29)
110
110
111 def test_date_only(self):
111 def test_date_only(self):
112 assert parse_datetime('2010-04-07') == \
112 assert parse_datetime('2010-04-07') == \
113 datetime.datetime(2010, 4, 7)
113 datetime.datetime(2010, 4, 7)
114
114
115 def test_another_format(self):
115 def test_another_format(self):
116 assert parse_datetime('04/07/10 21:29:41') == \
116 assert parse_datetime('04/07/10 21:29:41') == \
117 datetime.datetime(2010, 4, 7, 21, 29, 41)
117 datetime.datetime(2010, 4, 7, 21, 29, 41)
118
118
119 def test_now(self):
119 def test_now(self):
120 assert parse_datetime('now') - datetime.datetime.now() < \
120 assert parse_datetime('now') - datetime.datetime.now() < \
121 datetime.timedelta(seconds=1)
121 datetime.timedelta(seconds=1)
122
122
123 def test_today(self):
123 def test_today(self):
124 today = datetime.date.today()
124 today = datetime.date.today()
125 assert parse_datetime('today') == \
125 assert parse_datetime('today') == \
126 datetime.datetime(*today.timetuple()[:3])
126 datetime.datetime(*today.timetuple()[:3])
127
127
128 def test_yesterday(self):
128 def test_yesterday(self):
129 yesterday = datetime.date.today() - datetime.timedelta(days=1)
129 yesterday = datetime.date.today() - datetime.timedelta(days=1)
130 assert parse_datetime('yesterday') == \
130 assert parse_datetime('yesterday') == \
131 datetime.datetime(*yesterday.timetuple()[:3])
131 datetime.datetime(*yesterday.timetuple()[:3])
132
132
133 def test_tomorrow(self):
133 def test_tomorrow(self):
134 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
134 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
135 args = tomorrow.timetuple()[:3] + (23, 59, 59)
135 args = tomorrow.timetuple()[:3] + (23, 59, 59)
136 assert parse_datetime('tomorrow') == datetime.datetime(*args)
136 assert parse_datetime('tomorrow') == datetime.datetime(*args)
137
137
138 def test_days(self):
138 def test_days(self):
139 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
139 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
140 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
140 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
141 expected = datetime.datetime(*args)
141 expected = datetime.datetime(*args)
142 assert parse_datetime('3d') == expected
142 assert parse_datetime('3d') == expected
143 assert parse_datetime('3 d') == expected
143 assert parse_datetime('3 d') == expected
144 assert parse_datetime('3 day') == expected
144 assert parse_datetime('3 day') == expected
145 assert parse_datetime('3 days') == expected
145 assert parse_datetime('3 days') == expected
146
146
147 def test_weeks(self):
147 def test_weeks(self):
148 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
148 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
149 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
149 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
150 expected = datetime.datetime(*args)
150 expected = datetime.datetime(*args)
151 assert parse_datetime('3w') == expected
151 assert parse_datetime('3w') == expected
152 assert parse_datetime('3 w') == expected
152 assert parse_datetime('3 w') == expected
153 assert parse_datetime('3 week') == expected
153 assert parse_datetime('3 week') == expected
154 assert parse_datetime('3 weeks') == expected
154 assert parse_datetime('3 weeks') == expected
155
155
156 def test_mixed(self):
156 def test_mixed(self):
157 timestamp = (
157 timestamp = (
158 datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3))
158 datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3))
159 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
159 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
160 expected = datetime.datetime(*args)
160 expected = datetime.datetime(*args)
161 assert parse_datetime('2w3d') == expected
161 assert parse_datetime('2w3d') == expected
162 assert parse_datetime('2w 3d') == expected
162 assert parse_datetime('2w 3d') == expected
163 assert parse_datetime('2w 3 days') == expected
163 assert parse_datetime('2w 3 days') == expected
164 assert parse_datetime('2 weeks 3 days') == expected
164 assert parse_datetime('2 weeks 3 days') == expected
165
165
166
166
167 @pytest.mark.parametrize("test_str, name, email", [
167 @pytest.mark.parametrize("test_str, name, email", [
168 ('Marcin Kuzminski <marcin@python-works.com>',
168 ('Marcin Kuzminski <marcin@python-works.com>',
169 'Marcin Kuzminski', 'marcin@python-works.com'),
169 'Marcin Kuzminski', 'marcin@python-works.com'),
170 ('Marcin Kuzminski Spaces < marcin@python-works.com >',
170 ('Marcin Kuzminski Spaces < marcin@python-works.com >',
171 'Marcin Kuzminski Spaces', 'marcin@python-works.com'),
171 'Marcin Kuzminski Spaces', 'marcin@python-works.com'),
172 ('Marcin Kuzminski <marcin.kuzminski@python-works.com>',
172 ('Marcin Kuzminski <marcin.kuzminski@python-works.com>',
173 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'),
173 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'),
174 ('mrf RFC_SPEC <marcin+kuzminski@python-works.com>',
174 ('mrf RFC_SPEC <marcin+kuzminski@python-works.com>',
175 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'),
175 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'),
176 ('username <user@email.com>',
176 ('username <user@email.com>',
177 'username', 'user@email.com'),
177 'username', 'user@email.com'),
178 ('username <user@email.com',
178 ('username <user@email.com',
179 'username', 'user@email.com'),
179 'username', 'user@email.com'),
180 ('broken missing@email.com',
180 ('broken missing@email.com',
181 'broken', 'missing@email.com'),
181 'broken', 'missing@email.com'),
182 ('<justemail@mail.com>',
182 ('<justemail@mail.com>',
183 '', 'justemail@mail.com'),
183 '', 'justemail@mail.com'),
184 ('justname',
184 ('justname',
185 'justname', ''),
185 'justname', ''),
186 ('Mr Double Name withemail@email.com ',
186 ('Mr Double Name withemail@email.com ',
187 'Mr Double Name', 'withemail@email.com'),
187 'Mr Double Name', 'withemail@email.com'),
188 ])
188 ])
189 class TestAuthorExtractors:
189 class TestAuthorExtractors:
190
190
191 def test_author_email(self, test_str, name, email):
191 def test_author_email(self, test_str, name, email):
192 assert email == author_email(test_str)
192 assert email == author_email(test_str)
193
193
194 def test_author_name(self, test_str, name, email):
194 def test_author_name(self, test_str, name, email):
195 assert name == author_name(test_str)
195 assert name == author_name(test_str)
@@ -1,118 +1,118 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities for tests only. These are not or should not be used normally -
22 Utilities for tests only. These are not or should not be used normally -
23 functions here are crafted as we don't want to use ``vcs`` to verify tests.
23 functions here are crafted as we don't want to use ``vcs`` to verify tests.
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29
29
30 from subprocess import Popen
30 from subprocess32 import Popen
31
31
32
32
33 class VCSTestError(Exception):
33 class VCSTestError(Exception):
34 pass
34 pass
35
35
36
36
37 def run_command(cmd, args):
37 def run_command(cmd, args):
38 """
38 """
39 Runs command on the system with given ``args``.
39 Runs command on the system with given ``args``.
40 """
40 """
41 command = ' '.join((cmd, args))
41 command = ' '.join((cmd, args))
42 p = Popen(command, shell=True)
42 p = Popen(command, shell=True)
43 status = os.waitpid(p.pid, 0)[1]
43 status = os.waitpid(p.pid, 0)[1]
44 return status
44 return status
45
45
46
46
47 def eprint(msg):
47 def eprint(msg):
48 """
48 """
49 Prints given ``msg`` into sys.stderr as nose test runner hides all output
49 Prints given ``msg`` into sys.stderr as nose test runner hides all output
50 from sys.stdout by default and if we want to pipe stream somewhere we don't
50 from sys.stdout by default and if we want to pipe stream somewhere we don't
51 need those verbose messages anyway.
51 need those verbose messages anyway.
52 Appends line break.
52 Appends line break.
53 """
53 """
54 sys.stderr.write(msg)
54 sys.stderr.write(msg)
55 sys.stderr.write('\n')
55 sys.stderr.write('\n')
56
56
57
57
58 # TODO: Revisit once we have CI running, if this is not helping us, remove it
58 # TODO: Revisit once we have CI running, if this is not helping us, remove it
59 class SCMFetcher(object):
59 class SCMFetcher(object):
60
60
61 def __init__(self, alias, test_repo_path):
61 def __init__(self, alias, test_repo_path):
62 """
62 """
63 :param clone_cmd: command which would clone remote repository; pass
63 :param clone_cmd: command which would clone remote repository; pass
64 only first bits - remote path and destination would be appended
64 only first bits - remote path and destination would be appended
65 using ``remote_repo`` and ``test_repo_path``
65 using ``remote_repo`` and ``test_repo_path``
66 """
66 """
67 self.alias = alias
67 self.alias = alias
68 self.test_repo_path = test_repo_path
68 self.test_repo_path = test_repo_path
69
69
70 def setup(self):
70 def setup(self):
71 if not os.path.isdir(self.test_repo_path):
71 if not os.path.isdir(self.test_repo_path):
72 self.fetch_repo()
72 self.fetch_repo()
73
73
74 def fetch_repo(self):
74 def fetch_repo(self):
75 """
75 """
76 Tries to fetch repository from remote path.
76 Tries to fetch repository from remote path.
77 """
77 """
78 remote = self.remote_repo
78 remote = self.remote_repo
79 eprint(
79 eprint(
80 "Fetching repository %s into %s" % (remote, self.test_repo_path))
80 "Fetching repository %s into %s" % (remote, self.test_repo_path))
81 run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path))
81 run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path))
82
82
83
83
84 def get_normalized_path(path):
84 def get_normalized_path(path):
85 """
85 """
86 If given path exists, new path would be generated and returned. Otherwise
86 If given path exists, new path would be generated and returned. Otherwise
87 same whats given is returned. Assumes that there would be no more than
87 same whats given is returned. Assumes that there would be no more than
88 10000 same named files.
88 10000 same named files.
89 """
89 """
90 if os.path.exists(path):
90 if os.path.exists(path):
91 dir, basename = os.path.split(path)
91 dir, basename = os.path.split(path)
92 splitted_name = basename.split('.')
92 splitted_name = basename.split('.')
93 if len(splitted_name) > 1:
93 if len(splitted_name) > 1:
94 ext = splitted_name[-1]
94 ext = splitted_name[-1]
95 else:
95 else:
96 ext = None
96 ext = None
97 name = '.'.join(splitted_name[:-1])
97 name = '.'.join(splitted_name[:-1])
98 matcher = re.compile(r'^.*-(\d{5})$')
98 matcher = re.compile(r'^.*-(\d{5})$')
99 start = 0
99 start = 0
100 m = matcher.match(name)
100 m = matcher.match(name)
101 if not m:
101 if not m:
102 # Haven't append number yet so return first
102 # Haven't append number yet so return first
103 newname = '%s-00000' % name
103 newname = '%s-00000' % name
104 newpath = os.path.join(dir, newname)
104 newpath = os.path.join(dir, newname)
105 if ext:
105 if ext:
106 newpath = '.'.join((newpath, ext))
106 newpath = '.'.join((newpath, ext))
107 return get_normalized_path(newpath)
107 return get_normalized_path(newpath)
108 else:
108 else:
109 start = int(m.group(1)[-5:]) + 1
109 start = int(m.group(1)[-5:]) + 1
110 for x in xrange(start, 10000):
110 for x in xrange(start, 10000):
111 newname = name[:-5] + str(x).rjust(5, '0')
111 newname = name[:-5] + str(x).rjust(5, '0')
112 newpath = os.path.join(dir, newname)
112 newpath = os.path.join(dir, newname)
113 if ext:
113 if ext:
114 newpath = '.'.join((newpath, ext))
114 newpath = '.'.join((newpath, ext))
115 if not os.path.exists(newpath):
115 if not os.path.exists(newpath):
116 return newpath
116 return newpath
117 raise VCSTestError("Couldn't compute new path for %s" % path)
117 raise VCSTestError("Couldn't compute new path for %s" % path)
118 return path
118 return path
General Comments 0
You need to be logged in to leave comments. Login now