##// END OF EJS Templates
vcs-server: expose remote tracebacks from http backend using the Pyro4AwareFormatter.
marcink -
r1257:edb7f6bf default
parent child Browse files
Show More
@@ -1,116 +1,136 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import sys
21 import logging
22 import logging
22
23
23
24
24 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
25 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
25
26
26 # Sequences
27 # Sequences
27 RESET_SEQ = "\033[0m"
28 RESET_SEQ = "\033[0m"
28 COLOR_SEQ = "\033[0;%dm"
29 COLOR_SEQ = "\033[0;%dm"
29 BOLD_SEQ = "\033[1m"
30 BOLD_SEQ = "\033[1m"
30
31
31 COLORS = {
32 COLORS = {
32 'CRITICAL': MAGENTA,
33 'CRITICAL': MAGENTA,
33 'ERROR': RED,
34 'ERROR': RED,
34 'WARNING': CYAN,
35 'WARNING': CYAN,
35 'INFO': GREEN,
36 'INFO': GREEN,
36 'DEBUG': BLUE,
37 'DEBUG': BLUE,
37 'SQL': YELLOW
38 'SQL': YELLOW
38 }
39 }
39
40
40
41
41 def one_space_trim(s):
42 def one_space_trim(s):
42 if s.find(" ") == -1:
43 if s.find(" ") == -1:
43 return s
44 return s
44 else:
45 else:
45 s = s.replace(' ', ' ')
46 s = s.replace(' ', ' ')
46 return one_space_trim(s)
47 return one_space_trim(s)
47
48
48
49
49 def format_sql(sql):
50 def format_sql(sql):
50 sql = sql.replace('\n', '')
51 sql = sql.replace('\n', '')
51 sql = one_space_trim(sql)
52 sql = one_space_trim(sql)
52 sql = sql\
53 sql = sql\
53 .replace(',', ',\n\t')\
54 .replace(',', ',\n\t')\
54 .replace('SELECT', '\n\tSELECT \n\t')\
55 .replace('SELECT', '\n\tSELECT \n\t')\
55 .replace('UPDATE', '\n\tUPDATE \n\t')\
56 .replace('UPDATE', '\n\tUPDATE \n\t')\
56 .replace('DELETE', '\n\tDELETE \n\t')\
57 .replace('DELETE', '\n\tDELETE \n\t')\
57 .replace('FROM', '\n\tFROM')\
58 .replace('FROM', '\n\tFROM')\
58 .replace('ORDER BY', '\n\tORDER BY')\
59 .replace('ORDER BY', '\n\tORDER BY')\
59 .replace('LIMIT', '\n\tLIMIT')\
60 .replace('LIMIT', '\n\tLIMIT')\
60 .replace('WHERE', '\n\tWHERE')\
61 .replace('WHERE', '\n\tWHERE')\
61 .replace('AND', '\n\tAND')\
62 .replace('AND', '\n\tAND')\
62 .replace('LEFT', '\n\tLEFT')\
63 .replace('LEFT', '\n\tLEFT')\
63 .replace('INNER', '\n\tINNER')\
64 .replace('INNER', '\n\tINNER')\
64 .replace('INSERT', '\n\tINSERT')\
65 .replace('INSERT', '\n\tINSERT')\
65 .replace('DELETE', '\n\tDELETE')
66 .replace('DELETE', '\n\tDELETE')
66 return sql
67 return sql
67
68
68
69
69 class Pyro4AwareFormatter(logging.Formatter):
70 class Pyro4AwareFormatter(logging.Formatter):
70 """
71 """
71 Extended logging formatter which prints out Pyro4 remote tracebacks.
72 Extended logging formatter which prints out Pyro4 remote tracebacks.
72 """
73 """
73
74
74 def formatException(self, ei):
75 def formatException(self, ei):
75 ex_type, ex_value, ex_tb = ei
76 ex_type, ex_value, ex_tb = ei
76 if hasattr(ex_value, '_pyroTraceback'):
77
77 # johbo: Avoiding to import pyro4 until we get an exception
78 local_tb = logging.Formatter.formatException(self, ei)
78 # which actually has a remote traceback. This avoids issues
79 if hasattr(ex_value, '_vcs_server_traceback'):
79 # when gunicorn is used with gevent, since the logging would
80
80 # trigger an import of Pyro4 before the patches of gevent
81 def formatRemoteTraceback(remote_tb_lines):
81 # are applied.
82 result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"]
82 import Pyro4.util
83 result.append(remote_tb_lines)
83 return ''.join(
84 result.append("\n +--- End of remote traceback\n")
84 Pyro4.util.getPyroTraceback(ex_type, ex_value, ex_tb))
85 return result
85 return logging.Formatter.formatException(self, ei)
86
87 try:
88 if ex_type is not None and ex_value is None and ex_tb is None:
89 # possible old (3.x) call syntax where caller is only providing exception object
90 if type(ex_type) is not type:
91 raise TypeError(
92 "invalid argument: ex_type should be an exception type, or just supply no arguments at all")
93 if ex_type is None and ex_tb is None:
94 ex_type, ex_value, ex_tb = sys.exc_info()
95
96 remote_tb = getattr(ex_value, "_vcs_server_traceback", None)
97
98 if remote_tb:
99 remote_tb = formatRemoteTraceback(remote_tb)
100 return local_tb + ''.join(remote_tb)
101 finally:
102 # clean up cycle to traceback, to allow proper GC
103 del ex_type, ex_value, ex_tb
104
105 return local_tb
86
106
87
107
88 class ColorFormatter(Pyro4AwareFormatter):
108 class ColorFormatter(Pyro4AwareFormatter):
89
109
90 def format(self, record):
110 def format(self, record):
91 """
111 """
92 Changes record's levelname to use with COLORS enum
112 Changes record's levelname to use with COLORS enum
93 """
113 """
94
114
95 levelname = record.levelname
115 levelname = record.levelname
96 start = COLOR_SEQ % (COLORS[levelname])
116 start = COLOR_SEQ % (COLORS[levelname])
97 def_record = logging.Formatter.format(self, record)
117 def_record = logging.Formatter.format(self, record)
98 end = RESET_SEQ
118 end = RESET_SEQ
99
119
100 colored_record = ''.join([start, def_record, end])
120 colored_record = ''.join([start, def_record, end])
101 return colored_record
121 return colored_record
102
122
103
123
104 class ColorFormatterSql(logging.Formatter):
124 class ColorFormatterSql(logging.Formatter):
105
125
106 def format(self, record):
126 def format(self, record):
107 """
127 """
108 Changes record's levelname to use with COLORS enum
128 Changes record's levelname to use with COLORS enum
109 """
129 """
110
130
111 start = COLOR_SEQ % (COLORS['SQL'])
131 start = COLOR_SEQ % (COLORS['SQL'])
112 def_record = format_sql(logging.Formatter.format(self, record))
132 def_record = format_sql(logging.Formatter.format(self, record))
113 end = RESET_SEQ
133 end = RESET_SEQ
114
134
115 colored_record = ''.join([start, def_record, end])
135 colored_record = ''.join([start, def_record, end])
116 return colored_record
136 return colored_record
@@ -1,91 +1,91 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 middleware to handle appenlight publishing of errors
22 middleware to handle appenlight publishing of errors
23 """
23 """
24
24
25 from appenlight_client import make_appenlight_middleware
25 from appenlight_client import make_appenlight_middleware
26 from appenlight_client.exceptions import get_current_traceback
26 from appenlight_client.exceptions import get_current_traceback
27 from appenlight_client.wsgi import AppenlightWSGIWrapper
27 from appenlight_client.wsgi import AppenlightWSGIWrapper
28
28
29
29
30 def track_exception(environ):
30 def track_exception(environ):
31 if 'appenlight.client' not in environ:
31 if 'appenlight.client' not in environ:
32 return
32 return
33
33
34 # pass the traceback object to middleware
34 # pass the traceback object to middleware
35 environ['appenlight.__traceback'] = get_current_traceback(
35 environ['appenlight.__traceback'] = get_current_traceback(
36 skip=1,
36 skip=1,
37 show_hidden_frames=True,
37 show_hidden_frames=True,
38 ignore_system_exceptions=True)
38 ignore_system_exceptions=True)
39
39
40
40
41 def track_extra_information(environ, section, value):
41 def track_extra_information(environ, section, value):
42 """
42 """
43 Utility function to attach extra information in case of an error condition.
43 Utility function to attach extra information in case of an error condition.
44
44
45 It will take care of attaching this information to the right place inside
45 It will take care of attaching this information to the right place inside
46 of `environ`, so that the appenight client can pick it up.
46 of `environ`, so that the appenight client can pick it up.
47 """
47 """
48 environ.setdefault('appenlight.extra', {})
48 environ.setdefault('appenlight.extra', {})
49 environ['appenlight.extra'][section] = value
49 environ['appenlight.extra'][section] = value
50
50
51
51
52 def wrap_in_appenlight_if_enabled(app, settings, appenlight_client=None):
52 def wrap_in_appenlight_if_enabled(app, settings, appenlight_client=None):
53 """
53 """
54 Wraps the given `app` for appenlight support.
54 Wraps the given `app` for appenlight support.
55
55
56 .. important::
56 .. important::
57
57
58 Appenlight expects that the wrapper is executed only once, that's why
58 Appenlight expects that the wrapper is executed only once, that's why
59 the parameter `appenlight_client` can be used to pass in an already
59 the parameter `appenlight_client` can be used to pass in an already
60 existing client instance to avoid that decorators are applied more than
60 existing client instance to avoid that decorators are applied more than
61 once.
61 once.
62
62
63 This is in use to support our setup of the vcs related middlewares.
63 This is in use to support our setup of the vcs related middlewares.
64
64
65 """
65 """
66 if settings['appenlight']:
66 if settings['appenlight']:
67 app = RemoteTracebackTracker(app)
67 app = RemoteTracebackTracker(app)
68 if not appenlight_client:
68 if not appenlight_client:
69 app = make_appenlight_middleware(app, settings)
69 app = make_appenlight_middleware(app, settings)
70 appenlight_client = app.appenlight_client
70 appenlight_client = app.appenlight_client
71 else:
71 else:
72 app = AppenlightWSGIWrapper(app, appenlight_client)
72 app = AppenlightWSGIWrapper(app, appenlight_client)
73 return app, appenlight_client
73 return app, appenlight_client
74
74
75
75
76 class RemoteTracebackTracker(object):
76 class RemoteTracebackTracker(object):
77 """
77 """
78 Utility middleware which forwards Pyro4 remote traceback information.
78 Utility middleware which forwards VCSServer remote traceback information.
79 """
79 """
80
80
81 def __init__(self, app):
81 def __init__(self, app):
82 self.application = app
82 self.application = app
83
83
84 def __call__(self, environ, start_response):
84 def __call__(self, environ, start_response):
85 try:
85 try:
86 return self.application(environ, start_response)
86 return self.application(environ, start_response)
87 except Exception as e:
87 except Exception as e:
88 if hasattr(e, '_pyroTraceback'):
88 if hasattr(e, '_vcs_server_traceback'):
89 track_extra_information(
89 track_extra_information(
90 environ, 'remote_traceback', ''.join(e._pyroTraceback))
90 environ, 'remote_traceback', e._vcs_server_traceback)
91 raise
91 raise
@@ -1,285 +1,291 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2016 RhodeCode GmbH
3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Client for the VCSServer implemented based on HTTP.
22 Client for the VCSServer implemented based on HTTP.
23
23
24
24
25 Status
25 Status
26 ------
26 ------
27
27
28 This client implementation shall eventually replace the Pyro4 based
28 This client implementation shall eventually replace the Pyro4 based
29 implementation.
29 implementation.
30 """
30 """
31
31
32 import copy
32 import copy
33 import logging
33 import logging
34 import threading
34 import threading
35 import urllib2
35 import urllib2
36 import urlparse
36 import urlparse
37 import uuid
37 import uuid
38
38
39 import pycurl
39 import pycurl
40 import msgpack
40 import msgpack
41 import requests
41 import requests
42
42
43 from . import exceptions, CurlSession
43 from . import exceptions, CurlSession
44
44
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 # TODO: mikhail: Keep it in sync with vcsserver's
49 # TODO: mikhail: Keep it in sync with vcsserver's
50 # HTTPApplication.ALLOWED_EXCEPTIONS
50 # HTTPApplication.ALLOWED_EXCEPTIONS
51 EXCEPTIONS_MAP = {
51 EXCEPTIONS_MAP = {
52 'KeyError': KeyError,
52 'KeyError': KeyError,
53 'URLError': urllib2.URLError,
53 'URLError': urllib2.URLError,
54 }
54 }
55
55
56
56
57 class RepoMaker(object):
57 class RepoMaker(object):
58
58
59 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
59 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
60 self.url = urlparse.urljoin(
60 self.url = urlparse.urljoin(
61 'http://%s' % server_and_port, backend_endpoint)
61 'http://%s' % server_and_port, backend_endpoint)
62 self._session_factory = session_factory
62 self._session_factory = session_factory
63 self.backend_type = backend_type
63 self.backend_type = backend_type
64
64
65 def __call__(self, path, config, with_wire=None):
65 def __call__(self, path, config, with_wire=None):
66 log.debug('RepoMaker call on %s', path)
66 log.debug('RepoMaker call on %s', path)
67 return RemoteRepo(
67 return RemoteRepo(
68 path, config, self.url, self._session_factory(),
68 path, config, self.url, self._session_factory(),
69 with_wire=with_wire)
69 with_wire=with_wire)
70
70
71 def __getattr__(self, name):
71 def __getattr__(self, name):
72 def f(*args, **kwargs):
72 def f(*args, **kwargs):
73 return self._call(name, *args, **kwargs)
73 return self._call(name, *args, **kwargs)
74 return f
74 return f
75
75
76 @exceptions.map_vcs_exceptions
76 @exceptions.map_vcs_exceptions
77 def _call(self, name, *args, **kwargs):
77 def _call(self, name, *args, **kwargs):
78 payload = {
78 payload = {
79 'id': str(uuid.uuid4()),
79 'id': str(uuid.uuid4()),
80 'method': name,
80 'method': name,
81 'backend': self.backend_type,
81 'backend': self.backend_type,
82 'params': {'args': args, 'kwargs': kwargs}
82 'params': {'args': args, 'kwargs': kwargs}
83 }
83 }
84 return _remote_call(
84 return _remote_call(
85 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
85 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
86
86
87
87
88 class ServiceConnection(object):
88 class ServiceConnection(object):
89 def __init__(self, server_and_port, backend_endpoint, session_factory):
89 def __init__(self, server_and_port, backend_endpoint, session_factory):
90 self.url = urlparse.urljoin(
90 self.url = urlparse.urljoin(
91 'http://%s' % server_and_port, backend_endpoint)
91 'http://%s' % server_and_port, backend_endpoint)
92 self._session_factory = session_factory
92 self._session_factory = session_factory
93
93
94 def __getattr__(self, name):
94 def __getattr__(self, name):
95 def f(*args, **kwargs):
95 def f(*args, **kwargs):
96 return self._call(name, *args, **kwargs)
96 return self._call(name, *args, **kwargs)
97
97
98 return f
98 return f
99
99
100 @exceptions.map_vcs_exceptions
100 @exceptions.map_vcs_exceptions
101 def _call(self, name, *args, **kwargs):
101 def _call(self, name, *args, **kwargs):
102 payload = {
102 payload = {
103 'id': str(uuid.uuid4()),
103 'id': str(uuid.uuid4()),
104 'method': name,
104 'method': name,
105 'params': {'args': args, 'kwargs': kwargs}
105 'params': {'args': args, 'kwargs': kwargs}
106 }
106 }
107 return _remote_call(
107 return _remote_call(
108 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
108 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
109
109
110
110
111 class RemoteRepo(object):
111 class RemoteRepo(object):
112
112
113 def __init__(self, path, config, url, session, with_wire=None):
113 def __init__(self, path, config, url, session, with_wire=None):
114 self.url = url
114 self.url = url
115 self._session = session
115 self._session = session
116 self._wire = {
116 self._wire = {
117 "path": path,
117 "path": path,
118 "config": config,
118 "config": config,
119 "context": self._create_vcs_cache_context(),
119 "context": self._create_vcs_cache_context(),
120 }
120 }
121 if with_wire:
121 if with_wire:
122 self._wire.update(with_wire)
122 self._wire.update(with_wire)
123
123
124 # johbo: Trading complexity for performance. Avoiding the call to
124 # johbo: Trading complexity for performance. Avoiding the call to
125 # log.debug brings a few percent gain even if is is not active.
125 # log.debug brings a few percent gain even if is is not active.
126 if log.isEnabledFor(logging.DEBUG):
126 if log.isEnabledFor(logging.DEBUG):
127 self._call = self._call_with_logging
127 self._call = self._call_with_logging
128
128
129 def __getattr__(self, name):
129 def __getattr__(self, name):
130 def f(*args, **kwargs):
130 def f(*args, **kwargs):
131 return self._call(name, *args, **kwargs)
131 return self._call(name, *args, **kwargs)
132 return f
132 return f
133
133
134 @exceptions.map_vcs_exceptions
134 @exceptions.map_vcs_exceptions
135 def _call(self, name, *args, **kwargs):
135 def _call(self, name, *args, **kwargs):
136 # TODO: oliver: This is currently necessary pre-call since the
136 # TODO: oliver: This is currently necessary pre-call since the
137 # config object is being changed for hooking scenarios
137 # config object is being changed for hooking scenarios
138 wire = copy.deepcopy(self._wire)
138 wire = copy.deepcopy(self._wire)
139 wire["config"] = wire["config"].serialize()
139 wire["config"] = wire["config"].serialize()
140 payload = {
140 payload = {
141 'id': str(uuid.uuid4()),
141 'id': str(uuid.uuid4()),
142 'method': name,
142 'method': name,
143 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
143 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
144 }
144 }
145 return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
145 return _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
146
146
147 def _call_with_logging(self, name, *args, **kwargs):
147 def _call_with_logging(self, name, *args, **kwargs):
148 log.debug('Calling %s@%s', self.url, name)
148 log.debug('Calling %s@%s', self.url, name)
149 return RemoteRepo._call(self, name, *args, **kwargs)
149 return RemoteRepo._call(self, name, *args, **kwargs)
150
150
151 def __getitem__(self, key):
151 def __getitem__(self, key):
152 return self.revision(key)
152 return self.revision(key)
153
153
154 def _create_vcs_cache_context(self):
154 def _create_vcs_cache_context(self):
155 """
155 """
156 Creates a unique string which is passed to the VCSServer on every
156 Creates a unique string which is passed to the VCSServer on every
157 remote call. It is used as cache key in the VCSServer.
157 remote call. It is used as cache key in the VCSServer.
158 """
158 """
159 return str(uuid.uuid4())
159 return str(uuid.uuid4())
160
160
161 def invalidate_vcs_cache(self):
161 def invalidate_vcs_cache(self):
162 """
162 """
163 This invalidates the context which is sent to the VCSServer on every
163 This invalidates the context which is sent to the VCSServer on every
164 call to a remote method. It forces the VCSServer to create a fresh
164 call to a remote method. It forces the VCSServer to create a fresh
165 repository instance on the next call to a remote method.
165 repository instance on the next call to a remote method.
166 """
166 """
167 self._wire['context'] = self._create_vcs_cache_context()
167 self._wire['context'] = self._create_vcs_cache_context()
168
168
169
169
170 class RemoteObject(object):
170 class RemoteObject(object):
171
171
172 def __init__(self, url, session):
172 def __init__(self, url, session):
173 self._url = url
173 self._url = url
174 self._session = session
174 self._session = session
175
175
176 # johbo: Trading complexity for performance. Avoiding the call to
176 # johbo: Trading complexity for performance. Avoiding the call to
177 # log.debug brings a few percent gain even if is is not active.
177 # log.debug brings a few percent gain even if is is not active.
178 if log.isEnabledFor(logging.DEBUG):
178 if log.isEnabledFor(logging.DEBUG):
179 self._call = self._call_with_logging
179 self._call = self._call_with_logging
180
180
181 def __getattr__(self, name):
181 def __getattr__(self, name):
182 def f(*args, **kwargs):
182 def f(*args, **kwargs):
183 return self._call(name, *args, **kwargs)
183 return self._call(name, *args, **kwargs)
184 return f
184 return f
185
185
186 @exceptions.map_vcs_exceptions
186 @exceptions.map_vcs_exceptions
187 def _call(self, name, *args, **kwargs):
187 def _call(self, name, *args, **kwargs):
188 payload = {
188 payload = {
189 'id': str(uuid.uuid4()),
189 'id': str(uuid.uuid4()),
190 'method': name,
190 'method': name,
191 'params': {'args': args, 'kwargs': kwargs}
191 'params': {'args': args, 'kwargs': kwargs}
192 }
192 }
193 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
193 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
194
194
195 def _call_with_logging(self, name, *args, **kwargs):
195 def _call_with_logging(self, name, *args, **kwargs):
196 log.debug('Calling %s@%s', self._url, name)
196 log.debug('Calling %s@%s', self._url, name)
197 return RemoteObject._call(self, name, *args, **kwargs)
197 return RemoteObject._call(self, name, *args, **kwargs)
198
198
199
199
200 def _remote_call(url, payload, exceptions_map, session):
200 def _remote_call(url, payload, exceptions_map, session):
201 try:
201 try:
202 response = session.post(url, data=msgpack.packb(payload))
202 response = session.post(url, data=msgpack.packb(payload))
203 except pycurl.error as e:
203 except pycurl.error as e:
204 raise exceptions.HttpVCSCommunicationError(e)
204 raise exceptions.HttpVCSCommunicationError(e)
205
205
206 try:
206 try:
207 response = msgpack.unpackb(response.content)
207 response = msgpack.unpackb(response.content)
208 except Exception:
208 except Exception:
209 log.exception('Failed to decode repsponse %r', response.content)
209 log.exception('Failed to decode repsponse %r', response.content)
210 raise
210 raise
211
211
212 error = response.get('error')
212 error = response.get('error')
213 if error:
213 if error:
214 type_ = error.get('type', 'Exception')
214 type_ = error.get('type', 'Exception')
215 exc = exceptions_map.get(type_, Exception)
215 exc = exceptions_map.get(type_, Exception)
216 exc = exc(error.get('message'))
216 exc = exc(error.get('message'))
217 try:
217 try:
218 exc._vcs_kind = error['_vcs_kind']
218 exc._vcs_kind = error['_vcs_kind']
219 except KeyError:
219 except KeyError:
220 pass
220 pass
221
222 try:
223 exc._vcs_server_traceback = error['traceback']
224 except KeyError:
225 pass
226
221 raise exc
227 raise exc
222 return response.get('result')
228 return response.get('result')
223
229
224
230
225 class VcsHttpProxy(object):
231 class VcsHttpProxy(object):
226
232
227 CHUNK_SIZE = 16384
233 CHUNK_SIZE = 16384
228
234
229 def __init__(self, server_and_port, backend_endpoint):
235 def __init__(self, server_and_port, backend_endpoint):
230 adapter = requests.adapters.HTTPAdapter(max_retries=5)
236 adapter = requests.adapters.HTTPAdapter(max_retries=5)
231 self.base_url = urlparse.urljoin(
237 self.base_url = urlparse.urljoin(
232 'http://%s' % server_and_port, backend_endpoint)
238 'http://%s' % server_and_port, backend_endpoint)
233 self.session = requests.Session()
239 self.session = requests.Session()
234 self.session.mount('http://', adapter)
240 self.session.mount('http://', adapter)
235
241
236 def handle(self, environment, input_data, *args, **kwargs):
242 def handle(self, environment, input_data, *args, **kwargs):
237 data = {
243 data = {
238 'environment': environment,
244 'environment': environment,
239 'input_data': input_data,
245 'input_data': input_data,
240 'args': args,
246 'args': args,
241 'kwargs': kwargs
247 'kwargs': kwargs
242 }
248 }
243 result = self.session.post(
249 result = self.session.post(
244 self.base_url, msgpack.packb(data), stream=True)
250 self.base_url, msgpack.packb(data), stream=True)
245 return self._get_result(result)
251 return self._get_result(result)
246
252
247 def _deserialize_and_raise(self, error):
253 def _deserialize_and_raise(self, error):
248 exception = Exception(error['message'])
254 exception = Exception(error['message'])
249 try:
255 try:
250 exception._vcs_kind = error['_vcs_kind']
256 exception._vcs_kind = error['_vcs_kind']
251 except KeyError:
257 except KeyError:
252 pass
258 pass
253 raise exception
259 raise exception
254
260
255 def _iterate(self, result):
261 def _iterate(self, result):
256 unpacker = msgpack.Unpacker()
262 unpacker = msgpack.Unpacker()
257 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
263 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
258 unpacker.feed(line)
264 unpacker.feed(line)
259 for chunk in unpacker:
265 for chunk in unpacker:
260 yield chunk
266 yield chunk
261
267
262 def _get_result(self, result):
268 def _get_result(self, result):
263 iterator = self._iterate(result)
269 iterator = self._iterate(result)
264 error = iterator.next()
270 error = iterator.next()
265 if error:
271 if error:
266 self._deserialize_and_raise(error)
272 self._deserialize_and_raise(error)
267
273
268 status = iterator.next()
274 status = iterator.next()
269 headers = iterator.next()
275 headers = iterator.next()
270
276
271 return iterator, status, headers
277 return iterator, status, headers
272
278
273
279
274 class ThreadlocalSessionFactory(object):
280 class ThreadlocalSessionFactory(object):
275 """
281 """
276 Creates one CurlSession per thread on demand.
282 Creates one CurlSession per thread on demand.
277 """
283 """
278
284
279 def __init__(self):
285 def __init__(self):
280 self._thread_local = threading.local()
286 self._thread_local = threading.local()
281
287
282 def __call__(self):
288 def __call__(self):
283 if not hasattr(self._thread_local, 'curl_session'):
289 if not hasattr(self._thread_local, 'curl_session'):
284 self._thread_local.curl_session = CurlSession()
290 self._thread_local.curl_session = CurlSession()
285 return self._thread_local.curl_session
291 return self._thread_local.curl_session
@@ -1,205 +1,205 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Custom vcs exceptions module.
22 Custom vcs exceptions module.
23 """
23 """
24
24
25 import functools
25 import functools
26 import urllib2
26 import urllib2
27
27
28
28
29 class VCSCommunicationError(Exception):
29 class VCSCommunicationError(Exception):
30 pass
30 pass
31
31
32
32
33 class PyroVCSCommunicationError(VCSCommunicationError):
33 class PyroVCSCommunicationError(VCSCommunicationError):
34 pass
34 pass
35
35
36
36
37 class HttpVCSCommunicationError(VCSCommunicationError):
37 class HttpVCSCommunicationError(VCSCommunicationError):
38 pass
38 pass
39
39
40
40
41 class VCSError(Exception):
41 class VCSError(Exception):
42 pass
42 pass
43
43
44
44
45 class RepositoryError(VCSError):
45 class RepositoryError(VCSError):
46 pass
46 pass
47
47
48
48
49 class RepositoryRequirementError(RepositoryError):
49 class RepositoryRequirementError(RepositoryError):
50 pass
50 pass
51
51
52
52
53 class VCSBackendNotSupportedError(VCSError):
53 class VCSBackendNotSupportedError(VCSError):
54 """
54 """
55 Exception raised when VCSServer does not support requested backend
55 Exception raised when VCSServer does not support requested backend
56 """
56 """
57
57
58
58
59 class EmptyRepositoryError(RepositoryError):
59 class EmptyRepositoryError(RepositoryError):
60 pass
60 pass
61
61
62
62
63 class TagAlreadyExistError(RepositoryError):
63 class TagAlreadyExistError(RepositoryError):
64 pass
64 pass
65
65
66
66
67 class TagDoesNotExistError(RepositoryError):
67 class TagDoesNotExistError(RepositoryError):
68 pass
68 pass
69
69
70
70
71 class BranchAlreadyExistError(RepositoryError):
71 class BranchAlreadyExistError(RepositoryError):
72 pass
72 pass
73
73
74
74
75 class BranchDoesNotExistError(RepositoryError):
75 class BranchDoesNotExistError(RepositoryError):
76 pass
76 pass
77
77
78
78
79 class CommitError(RepositoryError):
79 class CommitError(RepositoryError):
80 """
80 """
81 Exceptions related to an existing commit
81 Exceptions related to an existing commit
82 """
82 """
83
83
84
84
85 class CommitDoesNotExistError(CommitError):
85 class CommitDoesNotExistError(CommitError):
86 pass
86 pass
87
87
88
88
89 class CommittingError(RepositoryError):
89 class CommittingError(RepositoryError):
90 """
90 """
91 Exceptions happening while creating a new commit
91 Exceptions happening while creating a new commit
92 """
92 """
93
93
94
94
95 class NothingChangedError(CommittingError):
95 class NothingChangedError(CommittingError):
96 pass
96 pass
97
97
98
98
99 class NodeError(VCSError):
99 class NodeError(VCSError):
100 pass
100 pass
101
101
102
102
103 class RemovedFileNodeError(NodeError):
103 class RemovedFileNodeError(NodeError):
104 pass
104 pass
105
105
106
106
107 class NodeAlreadyExistsError(CommittingError):
107 class NodeAlreadyExistsError(CommittingError):
108 pass
108 pass
109
109
110
110
111 class NodeAlreadyChangedError(CommittingError):
111 class NodeAlreadyChangedError(CommittingError):
112 pass
112 pass
113
113
114
114
115 class NodeDoesNotExistError(CommittingError):
115 class NodeDoesNotExistError(CommittingError):
116 pass
116 pass
117
117
118
118
119 class NodeNotChangedError(CommittingError):
119 class NodeNotChangedError(CommittingError):
120 pass
120 pass
121
121
122
122
123 class NodeAlreadyAddedError(CommittingError):
123 class NodeAlreadyAddedError(CommittingError):
124 pass
124 pass
125
125
126
126
127 class NodeAlreadyRemovedError(CommittingError):
127 class NodeAlreadyRemovedError(CommittingError):
128 pass
128 pass
129
129
130
130
131 class SubrepoMergeError(RepositoryError):
131 class SubrepoMergeError(RepositoryError):
132 """
132 """
133 This happens if we try to merge a repository which contains subrepos and
133 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
134 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
135 their references in the root repo are merged.
136 """
136 """
137
137
138
138
139 class ImproperArchiveTypeError(VCSError):
139 class ImproperArchiveTypeError(VCSError):
140 pass
140 pass
141
141
142
142
143 class CommandError(VCSError):
143 class CommandError(VCSError):
144 pass
144 pass
145
145
146
146
147 class UnhandledException(VCSError):
147 class UnhandledException(VCSError):
148 """
148 """
149 Signals that something unexpected went wrong.
149 Signals that something unexpected went wrong.
150
150
151 This usually means we have a programming error on the side of the VCSServer
151 This usually means we have a programming error on the side of the VCSServer
152 and should inspect the logfile of the VCSServer to find more details.
152 and should inspect the logfile of the VCSServer to find more details.
153 """
153 """
154
154
155
155
156 _EXCEPTION_MAP = {
156 _EXCEPTION_MAP = {
157 'abort': RepositoryError,
157 'abort': RepositoryError,
158 'archive': ImproperArchiveTypeError,
158 'archive': ImproperArchiveTypeError,
159 'error': RepositoryError,
159 'error': RepositoryError,
160 'lookup': CommitDoesNotExistError,
160 'lookup': CommitDoesNotExistError,
161 'repo_locked': RepositoryError,
161 'repo_locked': RepositoryError,
162 'requirement': RepositoryRequirementError,
162 'requirement': RepositoryRequirementError,
163 'unhandled': UnhandledException,
163 'unhandled': UnhandledException,
164 # TODO: johbo: Define our own exception for this and stop abusing
164 # TODO: johbo: Define our own exception for this and stop abusing
165 # urllib's exception class.
165 # urllib's exception class.
166 'url_error': urllib2.URLError,
166 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
167 'subrepo_merge_error': SubrepoMergeError,
168 }
168 }
169
169
170
170
171 def map_vcs_exceptions(func):
171 def map_vcs_exceptions(func):
172 """
172 """
173 Utility to decorate functions so that plain exceptions are translated.
173 Utility to decorate functions so that plain exceptions are translated.
174
174
175 The translation is based on `exc_map` which maps a `str` indicating
175 The translation is based on `exc_map` which maps a `str` indicating
176 the error type into an exception class representing this error inside
176 the error type into an exception class representing this error inside
177 of the vcs layer.
177 of the vcs layer.
178 """
178 """
179
179
180 @functools.wraps(func)
180 @functools.wraps(func)
181 def wrapper(*args, **kwargs):
181 def wrapper(*args, **kwargs):
182 try:
182 try:
183 return func(*args, **kwargs)
183 return func(*args, **kwargs)
184 except Exception as e:
184 except Exception as e:
185 # The error middleware adds information if it finds
185 # The error middleware adds information if it finds
186 # __traceback_info__ in a frame object. This way the remote
186 # __traceback_info__ in a frame object. This way the remote
187 # traceback information is made available in error reports.
187 # traceback information is made available in error reports.
188 remote_tb = getattr(e, '_pyroTraceback', None)
188 remote_tb = getattr(e, '_vcs_server_traceback', None)
189 if remote_tb:
189 if remote_tb:
190 __traceback_info__ = (
190 __traceback_info__ = (
191 'Found Pyro4 remote traceback information:\n\n' +
191 'Found VCSServer remote traceback information:\n\n' +
192 '\n'.join(remote_tb))
192 '\n'.join(remote_tb))
193
193
194 # Avoid that remote_tb also appears in the frame
194 # Avoid that remote_tb also appears in the frame
195 del remote_tb
195 del remote_tb
196
196
197 # Special vcs errors had an attribute "_vcs_kind" which is used
197 # Special vcs errors had an attribute "_vcs_kind" which is used
198 # to translate them to the proper exception class in the vcs
198 # to translate them to the proper exception class in the vcs
199 # client layer.
199 # client layer.
200 kind = getattr(e, '_vcs_kind', None)
200 kind = getattr(e, '_vcs_kind', None)
201 if kind:
201 if kind:
202 raise _EXCEPTION_MAP[kind](*e.args)
202 raise _EXCEPTION_MAP[kind](*e.args)
203 else:
203 else:
204 raise
204 raise
205 return wrapper
205 return wrapper
@@ -1,54 +1,55 b''
1
1
2 /******************************************************************************
2 /******************************************************************************
3 * *
3 * *
4 * DO NOT CHANGE THIS FILE MANUALLY *
4 * DO NOT CHANGE THIS FILE MANUALLY *
5 * *
5 * *
6 * *
6 * *
7 * This file is automatically generated when the app starts up with *
7 * This file is automatically generated when the app starts up with *
8 * generate_js_files = true *
8 * generate_js_files = true *
9 * *
9 * *
10 * To add a route here pass jsroute=True to the route definition in the app *
10 * To add a route here pass jsroute=True to the route definition in the app *
11 * *
11 * *
12 ******************************************************************************/
12 ******************************************************************************/
13 function registerRCRoutes() {
13 function registerRCRoutes() {
14 // routes registration
14 // routes registration
15 pyroutes.register('home', '/', []);
15 pyroutes.register('home', '/', []);
16 pyroutes.register('user_autocomplete_data', '/_users', []);
16 pyroutes.register('user_autocomplete_data', '/_users', []);
17 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
17 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
18 pyroutes.register('new_repo', '/_admin/create_repository', []);
18 pyroutes.register('new_repo', '/_admin/create_repository', []);
19 pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']);
19 pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']);
20 pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']);
20 pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']);
21 pyroutes.register('gists', '/_admin/gists', []);
21 pyroutes.register('gists', '/_admin/gists', []);
22 pyroutes.register('new_gist', '/_admin/gists/new', []);
22 pyroutes.register('new_gist', '/_admin/gists/new', []);
23 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
23 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
24 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
24 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
25 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
25 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
26 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
26 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
27 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/default-reviewers', ['repo_name']);
27 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/default-reviewers', ['repo_name']);
28 pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']);
28 pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']);
29 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
29 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
30 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
30 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
31 pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']);
31 pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']);
32 pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']);
32 pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']);
33 pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
33 pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
34 pyroutes.register('changeset_info', '/%(repo_name)s/changeset_info/%(revision)s', ['repo_name', 'revision']);
34 pyroutes.register('changeset_info', '/%(repo_name)s/changeset_info/%(revision)s', ['repo_name', 'revision']);
35 pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
35 pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
36 pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']);
36 pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']);
37 pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']);
37 pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']);
38 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
38 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
39 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
39 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
40 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
40 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
41 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
41 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
42 pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
42 pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
43 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
43 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
44 pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']);
44 pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']);
45 pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
45 pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
46 pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
46 pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
47 pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
47 pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
48 pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
48 pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
49 pyroutes.register('files_annotate_home', '/%(repo_name)s/annotate/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
49 pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
50 pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
50 pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
51 pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']);
51 pyroutes.register('files_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
52 pyroutes.register('files_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
52 pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']);
53 pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']);
53 pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']);
54 pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']);
54 }
55 }
@@ -1,1799 +1,1798 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32
32
33 import mock
33 import mock
34 import pyramid.testing
34 import pyramid.testing
35 import pytest
35 import pytest
36 import colander
36 import colander
37 import requests
37 import requests
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.utils2 import AttributeDict
40 from rhodecode.lib.utils2 import AttributeDict
41 from rhodecode.model.changeset_status import ChangesetStatusModel
41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.comment import ChangesetCommentsModel
42 from rhodecode.model.comment import ChangesetCommentsModel
43 from rhodecode.model.db import (
43 from rhodecode.model.db import (
44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.pull_request import PullRequestModel
47 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.repo import RepoModel
48 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo_group import RepoGroupModel
49 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.user import UserModel
50 from rhodecode.model.user import UserModel
51 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.user_group import UserGroupModel
52 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.integration import IntegrationModel
53 from rhodecode.model.integration import IntegrationModel
54 from rhodecode.integrations import integration_type_registry
54 from rhodecode.integrations import integration_type_registry
55 from rhodecode.integrations.types.base import IntegrationTypeBase
55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 from rhodecode.lib.utils import repo2db_mapper
56 from rhodecode.lib.utils import repo2db_mapper
57 from rhodecode.lib.vcs import create_vcsserver_proxy
57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 from rhodecode.lib.vcs.backends import get_backend
58 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.nodes import FileNode
59 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.tests import (
60 from rhodecode.tests import (
61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_REGULAR_PASS)
63 TEST_USER_REGULAR_PASS)
64 from rhodecode.tests.utils import CustomTestApp
64 from rhodecode.tests.utils import CustomTestApp
65 from rhodecode.tests.fixture import Fixture
65 from rhodecode.tests.fixture import Fixture
66
66
67
67
68 def _split_comma(value):
68 def _split_comma(value):
69 return value.split(',')
69 return value.split(',')
70
70
71
71
72 def pytest_addoption(parser):
72 def pytest_addoption(parser):
73 parser.addoption(
73 parser.addoption(
74 '--keep-tmp-path', action='store_true',
74 '--keep-tmp-path', action='store_true',
75 help="Keep the test temporary directories")
75 help="Keep the test temporary directories")
76 parser.addoption(
76 parser.addoption(
77 '--backends', action='store', type=_split_comma,
77 '--backends', action='store', type=_split_comma,
78 default=['git', 'hg', 'svn'],
78 default=['git', 'hg', 'svn'],
79 help="Select which backends to test for backend specific tests.")
79 help="Select which backends to test for backend specific tests.")
80 parser.addoption(
80 parser.addoption(
81 '--dbs', action='store', type=_split_comma,
81 '--dbs', action='store', type=_split_comma,
82 default=['sqlite'],
82 default=['sqlite'],
83 help="Select which database to test for database specific tests. "
83 help="Select which database to test for database specific tests. "
84 "Possible options are sqlite,postgres,mysql")
84 "Possible options are sqlite,postgres,mysql")
85 parser.addoption(
85 parser.addoption(
86 '--appenlight', '--ae', action='store_true',
86 '--appenlight', '--ae', action='store_true',
87 help="Track statistics in appenlight.")
87 help="Track statistics in appenlight.")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight-api-key', '--ae-key',
89 '--appenlight-api-key', '--ae-key',
90 help="API key for Appenlight.")
90 help="API key for Appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-url', '--ae-url',
92 '--appenlight-url', '--ae-url',
93 default="https://ae.rhodecode.com",
93 default="https://ae.rhodecode.com",
94 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
94 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
95 parser.addoption(
95 parser.addoption(
96 '--sqlite-connection-string', action='store',
96 '--sqlite-connection-string', action='store',
97 default='', help="Connection string for the dbs tests with SQLite")
97 default='', help="Connection string for the dbs tests with SQLite")
98 parser.addoption(
98 parser.addoption(
99 '--postgres-connection-string', action='store',
99 '--postgres-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with Postgres")
100 default='', help="Connection string for the dbs tests with Postgres")
101 parser.addoption(
101 parser.addoption(
102 '--mysql-connection-string', action='store',
102 '--mysql-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with MySQL")
103 default='', help="Connection string for the dbs tests with MySQL")
104 parser.addoption(
104 parser.addoption(
105 '--repeat', type=int, default=100,
105 '--repeat', type=int, default=100,
106 help="Number of repetitions in performance tests.")
106 help="Number of repetitions in performance tests.")
107
107
108
108
109 def pytest_configure(config):
109 def pytest_configure(config):
110 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
110 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
111 from rhodecode.config import patches
111 from rhodecode.config import patches
112 patches.kombu_1_5_1_python_2_7_11()
112 patches.kombu_1_5_1_python_2_7_11()
113
113
114
114
115 def pytest_collection_modifyitems(session, config, items):
115 def pytest_collection_modifyitems(session, config, items):
116 # nottest marked, compare nose, used for transition from nose to pytest
116 # nottest marked, compare nose, used for transition from nose to pytest
117 remaining = [
117 remaining = [
118 i for i in items if getattr(i.obj, '__test__', True)]
118 i for i in items if getattr(i.obj, '__test__', True)]
119 items[:] = remaining
119 items[:] = remaining
120
120
121
121
122 def pytest_generate_tests(metafunc):
122 def pytest_generate_tests(metafunc):
123 # Support test generation based on --backend parameter
123 # Support test generation based on --backend parameter
124 if 'backend_alias' in metafunc.fixturenames:
124 if 'backend_alias' in metafunc.fixturenames:
125 backends = get_backends_from_metafunc(metafunc)
125 backends = get_backends_from_metafunc(metafunc)
126 scope = None
126 scope = None
127 if not backends:
127 if not backends:
128 pytest.skip("Not enabled for any of selected backends")
128 pytest.skip("Not enabled for any of selected backends")
129 metafunc.parametrize('backend_alias', backends, scope=scope)
129 metafunc.parametrize('backend_alias', backends, scope=scope)
130 elif hasattr(metafunc.function, 'backends'):
130 elif hasattr(metafunc.function, 'backends'):
131 backends = get_backends_from_metafunc(metafunc)
131 backends = get_backends_from_metafunc(metafunc)
132 if not backends:
132 if not backends:
133 pytest.skip("Not enabled for any of selected backends")
133 pytest.skip("Not enabled for any of selected backends")
134
134
135
135
136 def get_backends_from_metafunc(metafunc):
136 def get_backends_from_metafunc(metafunc):
137 requested_backends = set(metafunc.config.getoption('--backends'))
137 requested_backends = set(metafunc.config.getoption('--backends'))
138 if hasattr(metafunc.function, 'backends'):
138 if hasattr(metafunc.function, 'backends'):
139 # Supported backends by this test function, created from
139 # Supported backends by this test function, created from
140 # pytest.mark.backends
140 # pytest.mark.backends
141 backends = metafunc.function.backends.args
141 backends = metafunc.function.backends.args
142 elif hasattr(metafunc.cls, 'backend_alias'):
142 elif hasattr(metafunc.cls, 'backend_alias'):
143 # Support class attribute "backend_alias", this is mainly
143 # Support class attribute "backend_alias", this is mainly
144 # for legacy reasons for tests not yet using pytest.mark.backends
144 # for legacy reasons for tests not yet using pytest.mark.backends
145 backends = [metafunc.cls.backend_alias]
145 backends = [metafunc.cls.backend_alias]
146 else:
146 else:
147 backends = metafunc.config.getoption('--backends')
147 backends = metafunc.config.getoption('--backends')
148 return requested_backends.intersection(backends)
148 return requested_backends.intersection(backends)
149
149
150
150
151 @pytest.fixture(scope='session', autouse=True)
151 @pytest.fixture(scope='session', autouse=True)
152 def activate_example_rcextensions(request):
152 def activate_example_rcextensions(request):
153 """
153 """
154 Patch in an example rcextensions module which verifies passed in kwargs.
154 Patch in an example rcextensions module which verifies passed in kwargs.
155 """
155 """
156 from rhodecode.tests.other import example_rcextensions
156 from rhodecode.tests.other import example_rcextensions
157
157
158 old_extensions = rhodecode.EXTENSIONS
158 old_extensions = rhodecode.EXTENSIONS
159 rhodecode.EXTENSIONS = example_rcextensions
159 rhodecode.EXTENSIONS = example_rcextensions
160
160
161 @request.addfinalizer
161 @request.addfinalizer
162 def cleanup():
162 def cleanup():
163 rhodecode.EXTENSIONS = old_extensions
163 rhodecode.EXTENSIONS = old_extensions
164
164
165
165
166 @pytest.fixture
166 @pytest.fixture
167 def capture_rcextensions():
167 def capture_rcextensions():
168 """
168 """
169 Returns the recorded calls to entry points in rcextensions.
169 Returns the recorded calls to entry points in rcextensions.
170 """
170 """
171 calls = rhodecode.EXTENSIONS.calls
171 calls = rhodecode.EXTENSIONS.calls
172 calls.clear()
172 calls.clear()
173 # Note: At this moment, it is still the empty dict, but that will
173 # Note: At this moment, it is still the empty dict, but that will
174 # be filled during the test run and since it is a reference this
174 # be filled during the test run and since it is a reference this
175 # is enough to make it work.
175 # is enough to make it work.
176 return calls
176 return calls
177
177
178
178
179 @pytest.fixture(scope='session')
179 @pytest.fixture(scope='session')
180 def http_environ_session():
180 def http_environ_session():
181 """
181 """
182 Allow to use "http_environ" in session scope.
182 Allow to use "http_environ" in session scope.
183 """
183 """
184 return http_environ(
184 return http_environ(
185 http_host_stub=http_host_stub())
185 http_host_stub=http_host_stub())
186
186
187
187
188 @pytest.fixture
188 @pytest.fixture
189 def http_host_stub():
189 def http_host_stub():
190 """
190 """
191 Value of HTTP_HOST in the test run.
191 Value of HTTP_HOST in the test run.
192 """
192 """
193 return 'test.example.com:80'
193 return 'test.example.com:80'
194
194
195
195
196 @pytest.fixture
196 @pytest.fixture
197 def http_environ(http_host_stub):
197 def http_environ(http_host_stub):
198 """
198 """
199 HTTP extra environ keys.
199 HTTP extra environ keys.
200
200
201 User by the test application and as well for setting up the pylons
201 User by the test application and as well for setting up the pylons
202 environment. In the case of the fixture "app" it should be possible
202 environment. In the case of the fixture "app" it should be possible
203 to override this for a specific test case.
203 to override this for a specific test case.
204 """
204 """
205 return {
205 return {
206 'SERVER_NAME': http_host_stub.split(':')[0],
206 'SERVER_NAME': http_host_stub.split(':')[0],
207 'SERVER_PORT': http_host_stub.split(':')[1],
207 'SERVER_PORT': http_host_stub.split(':')[1],
208 'HTTP_HOST': http_host_stub,
208 'HTTP_HOST': http_host_stub,
209 }
209 }
210
210
211
211
212 @pytest.fixture(scope='function')
212 @pytest.fixture(scope='function')
213 def app(request, pylonsapp, http_environ):
213 def app(request, pylonsapp, http_environ):
214
214
215
215
216 app = CustomTestApp(
216 app = CustomTestApp(
217 pylonsapp,
217 pylonsapp,
218 extra_environ=http_environ)
218 extra_environ=http_environ)
219 if request.cls:
219 if request.cls:
220 request.cls.app = app
220 request.cls.app = app
221 return app
221 return app
222
222
223
223
224 @pytest.fixture(scope='session')
224 @pytest.fixture(scope='session')
225 def app_settings(pylonsapp, pylons_config):
225 def app_settings(pylonsapp, pylons_config):
226 """
226 """
227 Settings dictionary used to create the app.
227 Settings dictionary used to create the app.
228
228
229 Parses the ini file and passes the result through the sanitize and apply
229 Parses the ini file and passes the result through the sanitize and apply
230 defaults mechanism in `rhodecode.config.middleware`.
230 defaults mechanism in `rhodecode.config.middleware`.
231 """
231 """
232 from paste.deploy.loadwsgi import loadcontext, APP
232 from paste.deploy.loadwsgi import loadcontext, APP
233 from rhodecode.config.middleware import (
233 from rhodecode.config.middleware import (
234 sanitize_settings_and_apply_defaults)
234 sanitize_settings_and_apply_defaults)
235 context = loadcontext(APP, 'config:' + pylons_config)
235 context = loadcontext(APP, 'config:' + pylons_config)
236 settings = sanitize_settings_and_apply_defaults(context.config())
236 settings = sanitize_settings_and_apply_defaults(context.config())
237 return settings
237 return settings
238
238
239
239
240 @pytest.fixture(scope='session')
240 @pytest.fixture(scope='session')
241 def db(app_settings):
241 def db(app_settings):
242 """
242 """
243 Initializes the database connection.
243 Initializes the database connection.
244
244
245 It uses the same settings which are used to create the ``pylonsapp`` or
245 It uses the same settings which are used to create the ``pylonsapp`` or
246 ``app`` fixtures.
246 ``app`` fixtures.
247 """
247 """
248 from rhodecode.config.utils import initialize_database
248 from rhodecode.config.utils import initialize_database
249 initialize_database(app_settings)
249 initialize_database(app_settings)
250
250
251
251
252 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
252 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
253
253
254
254
255 def _autologin_user(app, *args):
255 def _autologin_user(app, *args):
256 session = login_user_session(app, *args)
256 session = login_user_session(app, *args)
257 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
257 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
258 return LoginData(csrf_token, session['rhodecode_user'])
258 return LoginData(csrf_token, session['rhodecode_user'])
259
259
260
260
261 @pytest.fixture
261 @pytest.fixture
262 def autologin_user(app):
262 def autologin_user(app):
263 """
263 """
264 Utility fixture which makes sure that the admin user is logged in
264 Utility fixture which makes sure that the admin user is logged in
265 """
265 """
266 return _autologin_user(app)
266 return _autologin_user(app)
267
267
268
268
269 @pytest.fixture
269 @pytest.fixture
270 def autologin_regular_user(app):
270 def autologin_regular_user(app):
271 """
271 """
272 Utility fixture which makes sure that the regular user is logged in
272 Utility fixture which makes sure that the regular user is logged in
273 """
273 """
274 return _autologin_user(
274 return _autologin_user(
275 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
275 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
276
276
277
277
278 @pytest.fixture(scope='function')
278 @pytest.fixture(scope='function')
279 def csrf_token(request, autologin_user):
279 def csrf_token(request, autologin_user):
280 return autologin_user.csrf_token
280 return autologin_user.csrf_token
281
281
282
282
283 @pytest.fixture(scope='function')
283 @pytest.fixture(scope='function')
284 def xhr_header(request):
284 def xhr_header(request):
285 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
285 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
286
286
287
287
288 @pytest.fixture
288 @pytest.fixture
289 def real_crypto_backend(monkeypatch):
289 def real_crypto_backend(monkeypatch):
290 """
290 """
291 Switch the production crypto backend on for this test.
291 Switch the production crypto backend on for this test.
292
292
293 During the test run the crypto backend is replaced with a faster
293 During the test run the crypto backend is replaced with a faster
294 implementation based on the MD5 algorithm.
294 implementation based on the MD5 algorithm.
295 """
295 """
296 monkeypatch.setattr(rhodecode, 'is_test', False)
296 monkeypatch.setattr(rhodecode, 'is_test', False)
297
297
298
298
299 @pytest.fixture(scope='class')
299 @pytest.fixture(scope='class')
300 def index_location(request, pylonsapp):
300 def index_location(request, pylonsapp):
301 index_location = pylonsapp.config['app_conf']['search.location']
301 index_location = pylonsapp.config['app_conf']['search.location']
302 if request.cls:
302 if request.cls:
303 request.cls.index_location = index_location
303 request.cls.index_location = index_location
304 return index_location
304 return index_location
305
305
306
306
307 @pytest.fixture(scope='session', autouse=True)
307 @pytest.fixture(scope='session', autouse=True)
308 def tests_tmp_path(request):
308 def tests_tmp_path(request):
309 """
309 """
310 Create temporary directory to be used during the test session.
310 Create temporary directory to be used during the test session.
311 """
311 """
312 if not os.path.exists(TESTS_TMP_PATH):
312 if not os.path.exists(TESTS_TMP_PATH):
313 os.makedirs(TESTS_TMP_PATH)
313 os.makedirs(TESTS_TMP_PATH)
314
314
315 if not request.config.getoption('--keep-tmp-path'):
315 if not request.config.getoption('--keep-tmp-path'):
316 @request.addfinalizer
316 @request.addfinalizer
317 def remove_tmp_path():
317 def remove_tmp_path():
318 shutil.rmtree(TESTS_TMP_PATH)
318 shutil.rmtree(TESTS_TMP_PATH)
319
319
320 return TESTS_TMP_PATH
320 return TESTS_TMP_PATH
321
321
322
322
323 @pytest.fixture(scope='session', autouse=True)
323 @pytest.fixture(scope='session', autouse=True)
324 def patch_pyro_request_scope_proxy_factory(request):
324 def patch_pyro_request_scope_proxy_factory(request):
325 """
325 """
326 Patch the pyro proxy factory to always use the same dummy request object
326 Patch the pyro proxy factory to always use the same dummy request object
327 when under test. This will return the same pyro proxy on every call.
327 when under test. This will return the same pyro proxy on every call.
328 """
328 """
329 dummy_request = pyramid.testing.DummyRequest()
329 dummy_request = pyramid.testing.DummyRequest()
330
330
331 def mocked_call(self, request=None):
331 def mocked_call(self, request=None):
332 return self.getProxy(request=dummy_request)
332 return self.getProxy(request=dummy_request)
333
333
334 patcher = mock.patch(
334 patcher = mock.patch(
335 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
335 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
336 new=mocked_call)
336 new=mocked_call)
337 patcher.start()
337 patcher.start()
338
338
339 @request.addfinalizer
339 @request.addfinalizer
340 def undo_patching():
340 def undo_patching():
341 patcher.stop()
341 patcher.stop()
342
342
343
343
344 @pytest.fixture
344 @pytest.fixture
345 def test_repo_group(request):
345 def test_repo_group(request):
346 """
346 """
347 Create a temporary repository group, and destroy it after
347 Create a temporary repository group, and destroy it after
348 usage automatically
348 usage automatically
349 """
349 """
350 fixture = Fixture()
350 fixture = Fixture()
351 repogroupid = 'test_repo_group_%s' % int(time.time())
351 repogroupid = 'test_repo_group_%s' % int(time.time())
352 repo_group = fixture.create_repo_group(repogroupid)
352 repo_group = fixture.create_repo_group(repogroupid)
353
353
354 def _cleanup():
354 def _cleanup():
355 fixture.destroy_repo_group(repogroupid)
355 fixture.destroy_repo_group(repogroupid)
356
356
357 request.addfinalizer(_cleanup)
357 request.addfinalizer(_cleanup)
358 return repo_group
358 return repo_group
359
359
360
360
361 @pytest.fixture
361 @pytest.fixture
362 def test_user_group(request):
362 def test_user_group(request):
363 """
363 """
364 Create a temporary user group, and destroy it after
364 Create a temporary user group, and destroy it after
365 usage automatically
365 usage automatically
366 """
366 """
367 fixture = Fixture()
367 fixture = Fixture()
368 usergroupid = 'test_user_group_%s' % int(time.time())
368 usergroupid = 'test_user_group_%s' % int(time.time())
369 user_group = fixture.create_user_group(usergroupid)
369 user_group = fixture.create_user_group(usergroupid)
370
370
371 def _cleanup():
371 def _cleanup():
372 fixture.destroy_user_group(user_group)
372 fixture.destroy_user_group(user_group)
373
373
374 request.addfinalizer(_cleanup)
374 request.addfinalizer(_cleanup)
375 return user_group
375 return user_group
376
376
377
377
378 @pytest.fixture(scope='session')
378 @pytest.fixture(scope='session')
379 def test_repo(request):
379 def test_repo(request):
380 container = TestRepoContainer()
380 container = TestRepoContainer()
381 request.addfinalizer(container._cleanup)
381 request.addfinalizer(container._cleanup)
382 return container
382 return container
383
383
384
384
385 class TestRepoContainer(object):
385 class TestRepoContainer(object):
386 """
386 """
387 Container for test repositories which are used read only.
387 Container for test repositories which are used read only.
388
388
389 Repositories will be created on demand and re-used during the lifetime
389 Repositories will be created on demand and re-used during the lifetime
390 of this object.
390 of this object.
391
391
392 Usage to get the svn test repository "minimal"::
392 Usage to get the svn test repository "minimal"::
393
393
394 test_repo = TestContainer()
394 test_repo = TestContainer()
395 repo = test_repo('minimal', 'svn')
395 repo = test_repo('minimal', 'svn')
396
396
397 """
397 """
398
398
399 dump_extractors = {
399 dump_extractors = {
400 'git': utils.extract_git_repo_from_dump,
400 'git': utils.extract_git_repo_from_dump,
401 'hg': utils.extract_hg_repo_from_dump,
401 'hg': utils.extract_hg_repo_from_dump,
402 'svn': utils.extract_svn_repo_from_dump,
402 'svn': utils.extract_svn_repo_from_dump,
403 }
403 }
404
404
405 def __init__(self):
405 def __init__(self):
406 self._cleanup_repos = []
406 self._cleanup_repos = []
407 self._fixture = Fixture()
407 self._fixture = Fixture()
408 self._repos = {}
408 self._repos = {}
409
409
410 def __call__(self, dump_name, backend_alias):
410 def __call__(self, dump_name, backend_alias):
411 key = (dump_name, backend_alias)
411 key = (dump_name, backend_alias)
412 if key not in self._repos:
412 if key not in self._repos:
413 repo = self._create_repo(dump_name, backend_alias)
413 repo = self._create_repo(dump_name, backend_alias)
414 self._repos[key] = repo.repo_id
414 self._repos[key] = repo.repo_id
415 return Repository.get(self._repos[key])
415 return Repository.get(self._repos[key])
416
416
417 def _create_repo(self, dump_name, backend_alias):
417 def _create_repo(self, dump_name, backend_alias):
418 repo_name = '%s-%s' % (backend_alias, dump_name)
418 repo_name = '%s-%s' % (backend_alias, dump_name)
419 backend_class = get_backend(backend_alias)
419 backend_class = get_backend(backend_alias)
420 dump_extractor = self.dump_extractors[backend_alias]
420 dump_extractor = self.dump_extractors[backend_alias]
421 repo_path = dump_extractor(dump_name, repo_name)
421 repo_path = dump_extractor(dump_name, repo_name)
422 vcs_repo = backend_class(repo_path)
422 vcs_repo = backend_class(repo_path)
423 repo2db_mapper({repo_name: vcs_repo})
423 repo2db_mapper({repo_name: vcs_repo})
424 repo = RepoModel().get_by_repo_name(repo_name)
424 repo = RepoModel().get_by_repo_name(repo_name)
425 self._cleanup_repos.append(repo_name)
425 self._cleanup_repos.append(repo_name)
426 return repo
426 return repo
427
427
428 def _cleanup(self):
428 def _cleanup(self):
429 for repo_name in reversed(self._cleanup_repos):
429 for repo_name in reversed(self._cleanup_repos):
430 self._fixture.destroy_repo(repo_name)
430 self._fixture.destroy_repo(repo_name)
431
431
432
432
433 @pytest.fixture
433 @pytest.fixture
434 def backend(request, backend_alias, pylonsapp, test_repo):
434 def backend(request, backend_alias, pylonsapp, test_repo):
435 """
435 """
436 Parametrized fixture which represents a single backend implementation.
436 Parametrized fixture which represents a single backend implementation.
437
437
438 It respects the option `--backends` to focus the test run on specific
438 It respects the option `--backends` to focus the test run on specific
439 backend implementations.
439 backend implementations.
440
440
441 It also supports `pytest.mark.xfail_backends` to mark tests as failing
441 It also supports `pytest.mark.xfail_backends` to mark tests as failing
442 for specific backends. This is intended as a utility for incremental
442 for specific backends. This is intended as a utility for incremental
443 development of a new backend implementation.
443 development of a new backend implementation.
444 """
444 """
445 if backend_alias not in request.config.getoption('--backends'):
445 if backend_alias not in request.config.getoption('--backends'):
446 pytest.skip("Backend %s not selected." % (backend_alias, ))
446 pytest.skip("Backend %s not selected." % (backend_alias, ))
447
447
448 utils.check_xfail_backends(request.node, backend_alias)
448 utils.check_xfail_backends(request.node, backend_alias)
449 utils.check_skip_backends(request.node, backend_alias)
449 utils.check_skip_backends(request.node, backend_alias)
450
450
451 repo_name = 'vcs_test_%s' % (backend_alias, )
451 repo_name = 'vcs_test_%s' % (backend_alias, )
452 backend = Backend(
452 backend = Backend(
453 alias=backend_alias,
453 alias=backend_alias,
454 repo_name=repo_name,
454 repo_name=repo_name,
455 test_name=request.node.name,
455 test_name=request.node.name,
456 test_repo_container=test_repo)
456 test_repo_container=test_repo)
457 request.addfinalizer(backend.cleanup)
457 request.addfinalizer(backend.cleanup)
458 return backend
458 return backend
459
459
460
460
461 @pytest.fixture
461 @pytest.fixture
462 def backend_git(request, pylonsapp, test_repo):
462 def backend_git(request, pylonsapp, test_repo):
463 return backend(request, 'git', pylonsapp, test_repo)
463 return backend(request, 'git', pylonsapp, test_repo)
464
464
465
465
466 @pytest.fixture
466 @pytest.fixture
467 def backend_hg(request, pylonsapp, test_repo):
467 def backend_hg(request, pylonsapp, test_repo):
468 return backend(request, 'hg', pylonsapp, test_repo)
468 return backend(request, 'hg', pylonsapp, test_repo)
469
469
470
470
471 @pytest.fixture
471 @pytest.fixture
472 def backend_svn(request, pylonsapp, test_repo):
472 def backend_svn(request, pylonsapp, test_repo):
473 return backend(request, 'svn', pylonsapp, test_repo)
473 return backend(request, 'svn', pylonsapp, test_repo)
474
474
475
475
476 @pytest.fixture
476 @pytest.fixture
477 def backend_random(backend_git):
477 def backend_random(backend_git):
478 """
478 """
479 Use this to express that your tests need "a backend.
479 Use this to express that your tests need "a backend.
480
480
481 A few of our tests need a backend, so that we can run the code. This
481 A few of our tests need a backend, so that we can run the code. This
482 fixture is intended to be used for such cases. It will pick one of the
482 fixture is intended to be used for such cases. It will pick one of the
483 backends and run the tests.
483 backends and run the tests.
484
484
485 The fixture `backend` would run the test multiple times for each
485 The fixture `backend` would run the test multiple times for each
486 available backend which is a pure waste of time if the test is
486 available backend which is a pure waste of time if the test is
487 independent of the backend type.
487 independent of the backend type.
488 """
488 """
489 # TODO: johbo: Change this to pick a random backend
489 # TODO: johbo: Change this to pick a random backend
490 return backend_git
490 return backend_git
491
491
492
492
493 @pytest.fixture
493 @pytest.fixture
494 def backend_stub(backend_git):
494 def backend_stub(backend_git):
495 """
495 """
496 Use this to express that your tests need a backend stub
496 Use this to express that your tests need a backend stub
497
497
498 TODO: mikhail: Implement a real stub logic instead of returning
498 TODO: mikhail: Implement a real stub logic instead of returning
499 a git backend
499 a git backend
500 """
500 """
501 return backend_git
501 return backend_git
502
502
503
503
504 @pytest.fixture
504 @pytest.fixture
505 def repo_stub(backend_stub):
505 def repo_stub(backend_stub):
506 """
506 """
507 Use this to express that your tests need a repository stub
507 Use this to express that your tests need a repository stub
508 """
508 """
509 return backend_stub.create_repo()
509 return backend_stub.create_repo()
510
510
511
511
512 class Backend(object):
512 class Backend(object):
513 """
513 """
514 Represents the test configuration for one supported backend
514 Represents the test configuration for one supported backend
515
515
516 Provides easy access to different test repositories based on
516 Provides easy access to different test repositories based on
517 `__getitem__`. Such repositories will only be created once per test
517 `__getitem__`. Such repositories will only be created once per test
518 session.
518 session.
519 """
519 """
520
520
521 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
521 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
522 _master_repo = None
522 _master_repo = None
523 _commit_ids = {}
523 _commit_ids = {}
524
524
525 def __init__(self, alias, repo_name, test_name, test_repo_container):
525 def __init__(self, alias, repo_name, test_name, test_repo_container):
526 self.alias = alias
526 self.alias = alias
527 self.repo_name = repo_name
527 self.repo_name = repo_name
528 self._cleanup_repos = []
528 self._cleanup_repos = []
529 self._test_name = test_name
529 self._test_name = test_name
530 self._test_repo_container = test_repo_container
530 self._test_repo_container = test_repo_container
531 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
531 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
532 # Fixture will survive in the end.
532 # Fixture will survive in the end.
533 self._fixture = Fixture()
533 self._fixture = Fixture()
534
534
535 def __getitem__(self, key):
535 def __getitem__(self, key):
536 return self._test_repo_container(key, self.alias)
536 return self._test_repo_container(key, self.alias)
537
537
538 @property
538 @property
539 def repo(self):
539 def repo(self):
540 """
540 """
541 Returns the "current" repository. This is the vcs_test repo or the
541 Returns the "current" repository. This is the vcs_test repo or the
542 last repo which has been created with `create_repo`.
542 last repo which has been created with `create_repo`.
543 """
543 """
544 from rhodecode.model.db import Repository
544 from rhodecode.model.db import Repository
545 return Repository.get_by_repo_name(self.repo_name)
545 return Repository.get_by_repo_name(self.repo_name)
546
546
547 @property
547 @property
548 def default_branch_name(self):
548 def default_branch_name(self):
549 VcsRepository = get_backend(self.alias)
549 VcsRepository = get_backend(self.alias)
550 return VcsRepository.DEFAULT_BRANCH_NAME
550 return VcsRepository.DEFAULT_BRANCH_NAME
551
551
552 @property
552 @property
553 def default_head_id(self):
553 def default_head_id(self):
554 """
554 """
555 Returns the default head id of the underlying backend.
555 Returns the default head id of the underlying backend.
556
556
557 This will be the default branch name in case the backend does have a
557 This will be the default branch name in case the backend does have a
558 default branch. In the other cases it will point to a valid head
558 default branch. In the other cases it will point to a valid head
559 which can serve as the base to create a new commit on top of it.
559 which can serve as the base to create a new commit on top of it.
560 """
560 """
561 vcsrepo = self.repo.scm_instance()
561 vcsrepo = self.repo.scm_instance()
562 head_id = (
562 head_id = (
563 vcsrepo.DEFAULT_BRANCH_NAME or
563 vcsrepo.DEFAULT_BRANCH_NAME or
564 vcsrepo.commit_ids[-1])
564 vcsrepo.commit_ids[-1])
565 return head_id
565 return head_id
566
566
567 @property
567 @property
568 def commit_ids(self):
568 def commit_ids(self):
569 """
569 """
570 Returns the list of commits for the last created repository
570 Returns the list of commits for the last created repository
571 """
571 """
572 return self._commit_ids
572 return self._commit_ids
573
573
574 def create_master_repo(self, commits):
574 def create_master_repo(self, commits):
575 """
575 """
576 Create a repository and remember it as a template.
576 Create a repository and remember it as a template.
577
577
578 This allows to easily create derived repositories to construct
578 This allows to easily create derived repositories to construct
579 more complex scenarios for diff, compare and pull requests.
579 more complex scenarios for diff, compare and pull requests.
580
580
581 Returns a commit map which maps from commit message to raw_id.
581 Returns a commit map which maps from commit message to raw_id.
582 """
582 """
583 self._master_repo = self.create_repo(commits=commits)
583 self._master_repo = self.create_repo(commits=commits)
584 return self._commit_ids
584 return self._commit_ids
585
585
586 def create_repo(
586 def create_repo(
587 self, commits=None, number_of_commits=0, heads=None,
587 self, commits=None, number_of_commits=0, heads=None,
588 name_suffix=u'', **kwargs):
588 name_suffix=u'', **kwargs):
589 """
589 """
590 Create a repository and record it for later cleanup.
590 Create a repository and record it for later cleanup.
591
591
592 :param commits: Optional. A sequence of dict instances.
592 :param commits: Optional. A sequence of dict instances.
593 Will add a commit per entry to the new repository.
593 Will add a commit per entry to the new repository.
594 :param number_of_commits: Optional. If set to a number, this number of
594 :param number_of_commits: Optional. If set to a number, this number of
595 commits will be added to the new repository.
595 commits will be added to the new repository.
596 :param heads: Optional. Can be set to a sequence of of commit
596 :param heads: Optional. Can be set to a sequence of of commit
597 names which shall be pulled in from the master repository.
597 names which shall be pulled in from the master repository.
598
598
599 """
599 """
600 self.repo_name = self._next_repo_name() + name_suffix
600 self.repo_name = self._next_repo_name() + name_suffix
601 repo = self._fixture.create_repo(
601 repo = self._fixture.create_repo(
602 self.repo_name, repo_type=self.alias, **kwargs)
602 self.repo_name, repo_type=self.alias, **kwargs)
603 self._cleanup_repos.append(repo.repo_name)
603 self._cleanup_repos.append(repo.repo_name)
604
604
605 commits = commits or [
605 commits = commits or [
606 {'message': 'Commit %s of %s' % (x, self.repo_name)}
606 {'message': 'Commit %s of %s' % (x, self.repo_name)}
607 for x in xrange(number_of_commits)]
607 for x in xrange(number_of_commits)]
608 self._add_commits_to_repo(repo.scm_instance(), commits)
608 self._add_commits_to_repo(repo.scm_instance(), commits)
609 if heads:
609 if heads:
610 self.pull_heads(repo, heads)
610 self.pull_heads(repo, heads)
611
611
612 return repo
612 return repo
613
613
614 def pull_heads(self, repo, heads):
614 def pull_heads(self, repo, heads):
615 """
615 """
616 Make sure that repo contains all commits mentioned in `heads`
616 Make sure that repo contains all commits mentioned in `heads`
617 """
617 """
618 vcsmaster = self._master_repo.scm_instance()
618 vcsmaster = self._master_repo.scm_instance()
619 vcsrepo = repo.scm_instance()
619 vcsrepo = repo.scm_instance()
620 vcsrepo.config.clear_section('hooks')
620 vcsrepo.config.clear_section('hooks')
621 commit_ids = [self._commit_ids[h] for h in heads]
621 commit_ids = [self._commit_ids[h] for h in heads]
622 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
622 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
623
623
624 def create_fork(self):
624 def create_fork(self):
625 repo_to_fork = self.repo_name
625 repo_to_fork = self.repo_name
626 self.repo_name = self._next_repo_name()
626 self.repo_name = self._next_repo_name()
627 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
627 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
628 self._cleanup_repos.append(self.repo_name)
628 self._cleanup_repos.append(self.repo_name)
629 return repo
629 return repo
630
630
631 def new_repo_name(self, suffix=u''):
631 def new_repo_name(self, suffix=u''):
632 self.repo_name = self._next_repo_name() + suffix
632 self.repo_name = self._next_repo_name() + suffix
633 self._cleanup_repos.append(self.repo_name)
633 self._cleanup_repos.append(self.repo_name)
634 return self.repo_name
634 return self.repo_name
635
635
636 def _next_repo_name(self):
636 def _next_repo_name(self):
637 return u"%s_%s" % (
637 return u"%s_%s" % (
638 self.invalid_repo_name.sub(u'_', self._test_name),
638 self.invalid_repo_name.sub(u'_', self._test_name),
639 len(self._cleanup_repos))
639 len(self._cleanup_repos))
640
640
641 def ensure_file(self, filename, content='Test content\n'):
641 def ensure_file(self, filename, content='Test content\n'):
642 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
642 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
643 commits = [
643 commits = [
644 {'added': [
644 {'added': [
645 FileNode(filename, content=content),
645 FileNode(filename, content=content),
646 ]},
646 ]},
647 ]
647 ]
648 self._add_commits_to_repo(self.repo.scm_instance(), commits)
648 self._add_commits_to_repo(self.repo.scm_instance(), commits)
649
649
650 def enable_downloads(self):
650 def enable_downloads(self):
651 repo = self.repo
651 repo = self.repo
652 repo.enable_downloads = True
652 repo.enable_downloads = True
653 Session().add(repo)
653 Session().add(repo)
654 Session().commit()
654 Session().commit()
655
655
656 def cleanup(self):
656 def cleanup(self):
657 for repo_name in reversed(self._cleanup_repos):
657 for repo_name in reversed(self._cleanup_repos):
658 self._fixture.destroy_repo(repo_name)
658 self._fixture.destroy_repo(repo_name)
659
659
660 def _add_commits_to_repo(self, repo, commits):
660 def _add_commits_to_repo(self, repo, commits):
661 commit_ids = _add_commits_to_repo(repo, commits)
661 commit_ids = _add_commits_to_repo(repo, commits)
662 if not commit_ids:
662 if not commit_ids:
663 return
663 return
664 self._commit_ids = commit_ids
664 self._commit_ids = commit_ids
665
665
666 # Creating refs for Git to allow fetching them from remote repository
666 # Creating refs for Git to allow fetching them from remote repository
667 if self.alias == 'git':
667 if self.alias == 'git':
668 refs = {}
668 refs = {}
669 for message in self._commit_ids:
669 for message in self._commit_ids:
670 # TODO: mikhail: do more special chars replacements
670 # TODO: mikhail: do more special chars replacements
671 ref_name = 'refs/test-refs/{}'.format(
671 ref_name = 'refs/test-refs/{}'.format(
672 message.replace(' ', ''))
672 message.replace(' ', ''))
673 refs[ref_name] = self._commit_ids[message]
673 refs[ref_name] = self._commit_ids[message]
674 self._create_refs(repo, refs)
674 self._create_refs(repo, refs)
675
675
676 def _create_refs(self, repo, refs):
676 def _create_refs(self, repo, refs):
677 for ref_name in refs:
677 for ref_name in refs:
678 repo.set_refs(ref_name, refs[ref_name])
678 repo.set_refs(ref_name, refs[ref_name])
679
679
680
680
681 @pytest.fixture
681 @pytest.fixture
682 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
682 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
683 """
683 """
684 Parametrized fixture which represents a single vcs backend implementation.
684 Parametrized fixture which represents a single vcs backend implementation.
685
685
686 See the fixture `backend` for more details. This one implements the same
686 See the fixture `backend` for more details. This one implements the same
687 concept, but on vcs level. So it does not provide model instances etc.
687 concept, but on vcs level. So it does not provide model instances etc.
688
688
689 Parameters are generated dynamically, see :func:`pytest_generate_tests`
689 Parameters are generated dynamically, see :func:`pytest_generate_tests`
690 for how this works.
690 for how this works.
691 """
691 """
692 if backend_alias not in request.config.getoption('--backends'):
692 if backend_alias not in request.config.getoption('--backends'):
693 pytest.skip("Backend %s not selected." % (backend_alias, ))
693 pytest.skip("Backend %s not selected." % (backend_alias, ))
694
694
695 utils.check_xfail_backends(request.node, backend_alias)
695 utils.check_xfail_backends(request.node, backend_alias)
696 utils.check_skip_backends(request.node, backend_alias)
696 utils.check_skip_backends(request.node, backend_alias)
697
697
698 repo_name = 'vcs_test_%s' % (backend_alias, )
698 repo_name = 'vcs_test_%s' % (backend_alias, )
699 repo_path = os.path.join(tests_tmp_path, repo_name)
699 repo_path = os.path.join(tests_tmp_path, repo_name)
700 backend = VcsBackend(
700 backend = VcsBackend(
701 alias=backend_alias,
701 alias=backend_alias,
702 repo_path=repo_path,
702 repo_path=repo_path,
703 test_name=request.node.name,
703 test_name=request.node.name,
704 test_repo_container=test_repo)
704 test_repo_container=test_repo)
705 request.addfinalizer(backend.cleanup)
705 request.addfinalizer(backend.cleanup)
706 return backend
706 return backend
707
707
708
708
709 @pytest.fixture
709 @pytest.fixture
710 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
710 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
711 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
711 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
712
712
713
713
714 @pytest.fixture
714 @pytest.fixture
715 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
715 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
716 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
716 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
717
717
718
718
719 @pytest.fixture
719 @pytest.fixture
720 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
720 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
721 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
721 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
722
722
723
723
724 @pytest.fixture
724 @pytest.fixture
725 def vcsbackend_random(vcsbackend_git):
725 def vcsbackend_random(vcsbackend_git):
726 """
726 """
727 Use this to express that your tests need "a vcsbackend".
727 Use this to express that your tests need "a vcsbackend".
728
728
729 The fixture `vcsbackend` would run the test multiple times for each
729 The fixture `vcsbackend` would run the test multiple times for each
730 available vcs backend which is a pure waste of time if the test is
730 available vcs backend which is a pure waste of time if the test is
731 independent of the vcs backend type.
731 independent of the vcs backend type.
732 """
732 """
733 # TODO: johbo: Change this to pick a random backend
733 # TODO: johbo: Change this to pick a random backend
734 return vcsbackend_git
734 return vcsbackend_git
735
735
736
736
737 @pytest.fixture
737 @pytest.fixture
738 def vcsbackend_stub(vcsbackend_git):
738 def vcsbackend_stub(vcsbackend_git):
739 """
739 """
740 Use this to express that your test just needs a stub of a vcsbackend.
740 Use this to express that your test just needs a stub of a vcsbackend.
741
741
742 Plan is to eventually implement an in-memory stub to speed tests up.
742 Plan is to eventually implement an in-memory stub to speed tests up.
743 """
743 """
744 return vcsbackend_git
744 return vcsbackend_git
745
745
746
746
747 class VcsBackend(object):
747 class VcsBackend(object):
748 """
748 """
749 Represents the test configuration for one supported vcs backend.
749 Represents the test configuration for one supported vcs backend.
750 """
750 """
751
751
752 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
752 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
753
753
754 def __init__(self, alias, repo_path, test_name, test_repo_container):
754 def __init__(self, alias, repo_path, test_name, test_repo_container):
755 self.alias = alias
755 self.alias = alias
756 self._repo_path = repo_path
756 self._repo_path = repo_path
757 self._cleanup_repos = []
757 self._cleanup_repos = []
758 self._test_name = test_name
758 self._test_name = test_name
759 self._test_repo_container = test_repo_container
759 self._test_repo_container = test_repo_container
760
760
761 def __getitem__(self, key):
761 def __getitem__(self, key):
762 return self._test_repo_container(key, self.alias).scm_instance()
762 return self._test_repo_container(key, self.alias).scm_instance()
763
763
764 @property
764 @property
765 def repo(self):
765 def repo(self):
766 """
766 """
767 Returns the "current" repository. This is the vcs_test repo of the last
767 Returns the "current" repository. This is the vcs_test repo of the last
768 repo which has been created.
768 repo which has been created.
769 """
769 """
770 Repository = get_backend(self.alias)
770 Repository = get_backend(self.alias)
771 return Repository(self._repo_path)
771 return Repository(self._repo_path)
772
772
773 @property
773 @property
774 def backend(self):
774 def backend(self):
775 """
775 """
776 Returns the backend implementation class.
776 Returns the backend implementation class.
777 """
777 """
778 return get_backend(self.alias)
778 return get_backend(self.alias)
779
779
780 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
780 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
781 repo_name = self._next_repo_name()
781 repo_name = self._next_repo_name()
782 self._repo_path = get_new_dir(repo_name)
782 self._repo_path = get_new_dir(repo_name)
783 repo_class = get_backend(self.alias)
783 repo_class = get_backend(self.alias)
784 src_url = None
784 src_url = None
785 if _clone_repo:
785 if _clone_repo:
786 src_url = _clone_repo.path
786 src_url = _clone_repo.path
787 repo = repo_class(self._repo_path, create=True, src_url=src_url)
787 repo = repo_class(self._repo_path, create=True, src_url=src_url)
788 self._cleanup_repos.append(repo)
788 self._cleanup_repos.append(repo)
789
789
790 commits = commits or [
790 commits = commits or [
791 {'message': 'Commit %s of %s' % (x, repo_name)}
791 {'message': 'Commit %s of %s' % (x, repo_name)}
792 for x in xrange(number_of_commits)]
792 for x in xrange(number_of_commits)]
793 _add_commits_to_repo(repo, commits)
793 _add_commits_to_repo(repo, commits)
794 return repo
794 return repo
795
795
796 def clone_repo(self, repo):
796 def clone_repo(self, repo):
797 return self.create_repo(_clone_repo=repo)
797 return self.create_repo(_clone_repo=repo)
798
798
799 def cleanup(self):
799 def cleanup(self):
800 for repo in self._cleanup_repos:
800 for repo in self._cleanup_repos:
801 shutil.rmtree(repo.path)
801 shutil.rmtree(repo.path)
802
802
803 def new_repo_path(self):
803 def new_repo_path(self):
804 repo_name = self._next_repo_name()
804 repo_name = self._next_repo_name()
805 self._repo_path = get_new_dir(repo_name)
805 self._repo_path = get_new_dir(repo_name)
806 return self._repo_path
806 return self._repo_path
807
807
808 def _next_repo_name(self):
808 def _next_repo_name(self):
809 return "%s_%s" % (
809 return "%s_%s" % (
810 self.invalid_repo_name.sub('_', self._test_name),
810 self.invalid_repo_name.sub('_', self._test_name),
811 len(self._cleanup_repos))
811 len(self._cleanup_repos))
812
812
813 def add_file(self, repo, filename, content='Test content\n'):
813 def add_file(self, repo, filename, content='Test content\n'):
814 imc = repo.in_memory_commit
814 imc = repo.in_memory_commit
815 imc.add(FileNode(filename, content=content))
815 imc.add(FileNode(filename, content=content))
816 imc.commit(
816 imc.commit(
817 message=u'Automatic commit from vcsbackend fixture',
817 message=u'Automatic commit from vcsbackend fixture',
818 author=u'Automatic')
818 author=u'Automatic')
819
819
820 def ensure_file(self, filename, content='Test content\n'):
820 def ensure_file(self, filename, content='Test content\n'):
821 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
821 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
822 self.add_file(self.repo, filename, content)
822 self.add_file(self.repo, filename, content)
823
823
824
824
825 def _add_commits_to_repo(vcs_repo, commits):
825 def _add_commits_to_repo(vcs_repo, commits):
826 commit_ids = {}
826 commit_ids = {}
827 if not commits:
827 if not commits:
828 return commit_ids
828 return commit_ids
829
829
830 imc = vcs_repo.in_memory_commit
830 imc = vcs_repo.in_memory_commit
831 commit = None
831 commit = None
832
832
833 for idx, commit in enumerate(commits):
833 for idx, commit in enumerate(commits):
834 message = unicode(commit.get('message', 'Commit %s' % idx))
834 message = unicode(commit.get('message', 'Commit %s' % idx))
835
835
836 for node in commit.get('added', []):
836 for node in commit.get('added', []):
837 imc.add(FileNode(node.path, content=node.content))
837 imc.add(FileNode(node.path, content=node.content))
838 for node in commit.get('changed', []):
838 for node in commit.get('changed', []):
839 imc.change(FileNode(node.path, content=node.content))
839 imc.change(FileNode(node.path, content=node.content))
840 for node in commit.get('removed', []):
840 for node in commit.get('removed', []):
841 imc.remove(FileNode(node.path))
841 imc.remove(FileNode(node.path))
842
842
843 parents = [
843 parents = [
844 vcs_repo.get_commit(commit_id=commit_ids[p])
844 vcs_repo.get_commit(commit_id=commit_ids[p])
845 for p in commit.get('parents', [])]
845 for p in commit.get('parents', [])]
846
846
847 operations = ('added', 'changed', 'removed')
847 operations = ('added', 'changed', 'removed')
848 if not any((commit.get(o) for o in operations)):
848 if not any((commit.get(o) for o in operations)):
849 imc.add(FileNode('file_%s' % idx, content=message))
849 imc.add(FileNode('file_%s' % idx, content=message))
850
850
851 commit = imc.commit(
851 commit = imc.commit(
852 message=message,
852 message=message,
853 author=unicode(commit.get('author', 'Automatic')),
853 author=unicode(commit.get('author', 'Automatic')),
854 date=commit.get('date'),
854 date=commit.get('date'),
855 branch=commit.get('branch'),
855 branch=commit.get('branch'),
856 parents=parents)
856 parents=parents)
857
857
858 commit_ids[commit.message] = commit.raw_id
858 commit_ids[commit.message] = commit.raw_id
859
859
860 return commit_ids
860 return commit_ids
861
861
862
862
863 @pytest.fixture
863 @pytest.fixture
864 def reposerver(request):
864 def reposerver(request):
865 """
865 """
866 Allows to serve a backend repository
866 Allows to serve a backend repository
867 """
867 """
868
868
869 repo_server = RepoServer()
869 repo_server = RepoServer()
870 request.addfinalizer(repo_server.cleanup)
870 request.addfinalizer(repo_server.cleanup)
871 return repo_server
871 return repo_server
872
872
873
873
874 class RepoServer(object):
874 class RepoServer(object):
875 """
875 """
876 Utility to serve a local repository for the duration of a test case.
876 Utility to serve a local repository for the duration of a test case.
877
877
878 Supports only Subversion so far.
878 Supports only Subversion so far.
879 """
879 """
880
880
881 url = None
881 url = None
882
882
883 def __init__(self):
883 def __init__(self):
884 self._cleanup_servers = []
884 self._cleanup_servers = []
885
885
886 def serve(self, vcsrepo):
886 def serve(self, vcsrepo):
887 if vcsrepo.alias != 'svn':
887 if vcsrepo.alias != 'svn':
888 raise TypeError("Backend %s not supported" % vcsrepo.alias)
888 raise TypeError("Backend %s not supported" % vcsrepo.alias)
889
889
890 proc = subprocess32.Popen(
890 proc = subprocess32.Popen(
891 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
891 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
892 '--root', vcsrepo.path])
892 '--root', vcsrepo.path])
893 self._cleanup_servers.append(proc)
893 self._cleanup_servers.append(proc)
894 self.url = 'svn://localhost'
894 self.url = 'svn://localhost'
895
895
896 def cleanup(self):
896 def cleanup(self):
897 for proc in self._cleanup_servers:
897 for proc in self._cleanup_servers:
898 proc.terminate()
898 proc.terminate()
899
899
900
900
901 @pytest.fixture
901 @pytest.fixture
902 def pr_util(backend, request):
902 def pr_util(backend, request):
903 """
903 """
904 Utility for tests of models and for functional tests around pull requests.
904 Utility for tests of models and for functional tests around pull requests.
905
905
906 It gives an instance of :class:`PRTestUtility` which provides various
906 It gives an instance of :class:`PRTestUtility` which provides various
907 utility methods around one pull request.
907 utility methods around one pull request.
908
908
909 This fixture uses `backend` and inherits its parameterization.
909 This fixture uses `backend` and inherits its parameterization.
910 """
910 """
911
911
912 util = PRTestUtility(backend)
912 util = PRTestUtility(backend)
913
913
914 @request.addfinalizer
914 @request.addfinalizer
915 def cleanup():
915 def cleanup():
916 util.cleanup()
916 util.cleanup()
917
917
918 return util
918 return util
919
919
920
920
921 class PRTestUtility(object):
921 class PRTestUtility(object):
922
922
923 pull_request = None
923 pull_request = None
924 pull_request_id = None
924 pull_request_id = None
925 mergeable_patcher = None
925 mergeable_patcher = None
926 mergeable_mock = None
926 mergeable_mock = None
927 notification_patcher = None
927 notification_patcher = None
928
928
929 def __init__(self, backend):
929 def __init__(self, backend):
930 self.backend = backend
930 self.backend = backend
931
931
932 def create_pull_request(
932 def create_pull_request(
933 self, commits=None, target_head=None, source_head=None,
933 self, commits=None, target_head=None, source_head=None,
934 revisions=None, approved=False, author=None, mergeable=False,
934 revisions=None, approved=False, author=None, mergeable=False,
935 enable_notifications=True, name_suffix=u'', reviewers=None,
935 enable_notifications=True, name_suffix=u'', reviewers=None,
936 title=u"Test", description=u"Description"):
936 title=u"Test", description=u"Description"):
937 self.set_mergeable(mergeable)
937 self.set_mergeable(mergeable)
938 if not enable_notifications:
938 if not enable_notifications:
939 # mock notification side effect
939 # mock notification side effect
940 self.notification_patcher = mock.patch(
940 self.notification_patcher = mock.patch(
941 'rhodecode.model.notification.NotificationModel.create')
941 'rhodecode.model.notification.NotificationModel.create')
942 self.notification_patcher.start()
942 self.notification_patcher.start()
943
943
944 if not self.pull_request:
944 if not self.pull_request:
945 if not commits:
945 if not commits:
946 commits = [
946 commits = [
947 {'message': 'c1'},
947 {'message': 'c1'},
948 {'message': 'c2'},
948 {'message': 'c2'},
949 {'message': 'c3'},
949 {'message': 'c3'},
950 ]
950 ]
951 target_head = 'c1'
951 target_head = 'c1'
952 source_head = 'c2'
952 source_head = 'c2'
953 revisions = ['c2']
953 revisions = ['c2']
954
954
955 self.commit_ids = self.backend.create_master_repo(commits)
955 self.commit_ids = self.backend.create_master_repo(commits)
956 self.target_repository = self.backend.create_repo(
956 self.target_repository = self.backend.create_repo(
957 heads=[target_head], name_suffix=name_suffix)
957 heads=[target_head], name_suffix=name_suffix)
958 self.source_repository = self.backend.create_repo(
958 self.source_repository = self.backend.create_repo(
959 heads=[source_head], name_suffix=name_suffix)
959 heads=[source_head], name_suffix=name_suffix)
960 self.author = author or UserModel().get_by_username(
960 self.author = author or UserModel().get_by_username(
961 TEST_USER_ADMIN_LOGIN)
961 TEST_USER_ADMIN_LOGIN)
962
962
963 model = PullRequestModel()
963 model = PullRequestModel()
964 self.create_parameters = {
964 self.create_parameters = {
965 'created_by': self.author,
965 'created_by': self.author,
966 'source_repo': self.source_repository.repo_name,
966 'source_repo': self.source_repository.repo_name,
967 'source_ref': self._default_branch_reference(source_head),
967 'source_ref': self._default_branch_reference(source_head),
968 'target_repo': self.target_repository.repo_name,
968 'target_repo': self.target_repository.repo_name,
969 'target_ref': self._default_branch_reference(target_head),
969 'target_ref': self._default_branch_reference(target_head),
970 'revisions': [self.commit_ids[r] for r in revisions],
970 'revisions': [self.commit_ids[r] for r in revisions],
971 'reviewers': reviewers or self._get_reviewers(),
971 'reviewers': reviewers or self._get_reviewers(),
972 'title': title,
972 'title': title,
973 'description': description,
973 'description': description,
974 }
974 }
975 self.pull_request = model.create(**self.create_parameters)
975 self.pull_request = model.create(**self.create_parameters)
976 assert model.get_versions(self.pull_request) == []
976 assert model.get_versions(self.pull_request) == []
977
977
978 self.pull_request_id = self.pull_request.pull_request_id
978 self.pull_request_id = self.pull_request.pull_request_id
979
979
980 if approved:
980 if approved:
981 self.approve()
981 self.approve()
982
982
983 Session().add(self.pull_request)
983 Session().add(self.pull_request)
984 Session().commit()
984 Session().commit()
985
985
986 return self.pull_request
986 return self.pull_request
987
987
988 def approve(self):
988 def approve(self):
989 self.create_status_votes(
989 self.create_status_votes(
990 ChangesetStatus.STATUS_APPROVED,
990 ChangesetStatus.STATUS_APPROVED,
991 *self.pull_request.reviewers)
991 *self.pull_request.reviewers)
992
992
993 def close(self):
993 def close(self):
994 PullRequestModel().close_pull_request(self.pull_request, self.author)
994 PullRequestModel().close_pull_request(self.pull_request, self.author)
995
995
996 def _default_branch_reference(self, commit_message):
996 def _default_branch_reference(self, commit_message):
997 reference = '%s:%s:%s' % (
997 reference = '%s:%s:%s' % (
998 'branch',
998 'branch',
999 self.backend.default_branch_name,
999 self.backend.default_branch_name,
1000 self.commit_ids[commit_message])
1000 self.commit_ids[commit_message])
1001 return reference
1001 return reference
1002
1002
1003 def _get_reviewers(self):
1003 def _get_reviewers(self):
1004 model = UserModel()
1004 model = UserModel()
1005 return [
1005 return [
1006 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1006 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1007 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1007 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1008 ]
1008 ]
1009
1009
1010 def update_source_repository(self, head=None):
1010 def update_source_repository(self, head=None):
1011 heads = [head or 'c3']
1011 heads = [head or 'c3']
1012 self.backend.pull_heads(self.source_repository, heads=heads)
1012 self.backend.pull_heads(self.source_repository, heads=heads)
1013
1013
1014 def add_one_commit(self, head=None):
1014 def add_one_commit(self, head=None):
1015 self.update_source_repository(head=head)
1015 self.update_source_repository(head=head)
1016 old_commit_ids = set(self.pull_request.revisions)
1016 old_commit_ids = set(self.pull_request.revisions)
1017 PullRequestModel().update_commits(self.pull_request)
1017 PullRequestModel().update_commits(self.pull_request)
1018 commit_ids = set(self.pull_request.revisions)
1018 commit_ids = set(self.pull_request.revisions)
1019 new_commit_ids = commit_ids - old_commit_ids
1019 new_commit_ids = commit_ids - old_commit_ids
1020 assert len(new_commit_ids) == 1
1020 assert len(new_commit_ids) == 1
1021 return new_commit_ids.pop()
1021 return new_commit_ids.pop()
1022
1022
1023 def remove_one_commit(self):
1023 def remove_one_commit(self):
1024 assert len(self.pull_request.revisions) == 2
1024 assert len(self.pull_request.revisions) == 2
1025 source_vcs = self.source_repository.scm_instance()
1025 source_vcs = self.source_repository.scm_instance()
1026 removed_commit_id = source_vcs.commit_ids[-1]
1026 removed_commit_id = source_vcs.commit_ids[-1]
1027
1027
1028 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1028 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1029 # remove the if once that's sorted out.
1029 # remove the if once that's sorted out.
1030 if self.backend.alias == "git":
1030 if self.backend.alias == "git":
1031 kwargs = {'branch_name': self.backend.default_branch_name}
1031 kwargs = {'branch_name': self.backend.default_branch_name}
1032 else:
1032 else:
1033 kwargs = {}
1033 kwargs = {}
1034 source_vcs.strip(removed_commit_id, **kwargs)
1034 source_vcs.strip(removed_commit_id, **kwargs)
1035
1035
1036 PullRequestModel().update_commits(self.pull_request)
1036 PullRequestModel().update_commits(self.pull_request)
1037 assert len(self.pull_request.revisions) == 1
1037 assert len(self.pull_request.revisions) == 1
1038 return removed_commit_id
1038 return removed_commit_id
1039
1039
1040 def create_comment(self, linked_to=None):
1040 def create_comment(self, linked_to=None):
1041 comment = ChangesetCommentsModel().create(
1041 comment = ChangesetCommentsModel().create(
1042 text=u"Test comment",
1042 text=u"Test comment",
1043 repo=self.target_repository.repo_name,
1043 repo=self.target_repository.repo_name,
1044 user=self.author,
1044 user=self.author,
1045 pull_request=self.pull_request)
1045 pull_request=self.pull_request)
1046 assert comment.pull_request_version_id is None
1046 assert comment.pull_request_version_id is None
1047
1047
1048 if linked_to:
1048 if linked_to:
1049 PullRequestModel()._link_comments_to_version(linked_to)
1049 PullRequestModel()._link_comments_to_version(linked_to)
1050
1050
1051 return comment
1051 return comment
1052
1052
1053 def create_inline_comment(
1053 def create_inline_comment(
1054 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1054 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1055 comment = ChangesetCommentsModel().create(
1055 comment = ChangesetCommentsModel().create(
1056 text=u"Test comment",
1056 text=u"Test comment",
1057 repo=self.target_repository.repo_name,
1057 repo=self.target_repository.repo_name,
1058 user=self.author,
1058 user=self.author,
1059 line_no=line_no,
1059 line_no=line_no,
1060 f_path=file_path,
1060 f_path=file_path,
1061 pull_request=self.pull_request)
1061 pull_request=self.pull_request)
1062 assert comment.pull_request_version_id is None
1062 assert comment.pull_request_version_id is None
1063
1063
1064 if linked_to:
1064 if linked_to:
1065 PullRequestModel()._link_comments_to_version(linked_to)
1065 PullRequestModel()._link_comments_to_version(linked_to)
1066
1066
1067 return comment
1067 return comment
1068
1068
1069 def create_version_of_pull_request(self):
1069 def create_version_of_pull_request(self):
1070 pull_request = self.create_pull_request()
1070 pull_request = self.create_pull_request()
1071 version = PullRequestModel()._create_version_from_snapshot(
1071 version = PullRequestModel()._create_version_from_snapshot(
1072 pull_request)
1072 pull_request)
1073 return version
1073 return version
1074
1074
1075 def create_status_votes(self, status, *reviewers):
1075 def create_status_votes(self, status, *reviewers):
1076 for reviewer in reviewers:
1076 for reviewer in reviewers:
1077 ChangesetStatusModel().set_status(
1077 ChangesetStatusModel().set_status(
1078 repo=self.pull_request.target_repo,
1078 repo=self.pull_request.target_repo,
1079 status=status,
1079 status=status,
1080 user=reviewer.user_id,
1080 user=reviewer.user_id,
1081 pull_request=self.pull_request)
1081 pull_request=self.pull_request)
1082
1082
1083 def set_mergeable(self, value):
1083 def set_mergeable(self, value):
1084 if not self.mergeable_patcher:
1084 if not self.mergeable_patcher:
1085 self.mergeable_patcher = mock.patch.object(
1085 self.mergeable_patcher = mock.patch.object(
1086 VcsSettingsModel, 'get_general_settings')
1086 VcsSettingsModel, 'get_general_settings')
1087 self.mergeable_mock = self.mergeable_patcher.start()
1087 self.mergeable_mock = self.mergeable_patcher.start()
1088 self.mergeable_mock.return_value = {
1088 self.mergeable_mock.return_value = {
1089 'rhodecode_pr_merge_enabled': value}
1089 'rhodecode_pr_merge_enabled': value}
1090
1090
1091 def cleanup(self):
1091 def cleanup(self):
1092 # In case the source repository is already cleaned up, the pull
1092 # In case the source repository is already cleaned up, the pull
1093 # request will already be deleted.
1093 # request will already be deleted.
1094 pull_request = PullRequest().get(self.pull_request_id)
1094 pull_request = PullRequest().get(self.pull_request_id)
1095 if pull_request:
1095 if pull_request:
1096 PullRequestModel().delete(pull_request)
1096 PullRequestModel().delete(pull_request)
1097 Session().commit()
1097 Session().commit()
1098
1098
1099 if self.notification_patcher:
1099 if self.notification_patcher:
1100 self.notification_patcher.stop()
1100 self.notification_patcher.stop()
1101
1101
1102 if self.mergeable_patcher:
1102 if self.mergeable_patcher:
1103 self.mergeable_patcher.stop()
1103 self.mergeable_patcher.stop()
1104
1104
1105
1105
1106 @pytest.fixture
1106 @pytest.fixture
1107 def user_admin(pylonsapp):
1107 def user_admin(pylonsapp):
1108 """
1108 """
1109 Provides the default admin test user as an instance of `db.User`.
1109 Provides the default admin test user as an instance of `db.User`.
1110 """
1110 """
1111 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1111 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1112 return user
1112 return user
1113
1113
1114
1114
1115 @pytest.fixture
1115 @pytest.fixture
1116 def user_regular(pylonsapp):
1116 def user_regular(pylonsapp):
1117 """
1117 """
1118 Provides the default regular test user as an instance of `db.User`.
1118 Provides the default regular test user as an instance of `db.User`.
1119 """
1119 """
1120 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1120 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1121 return user
1121 return user
1122
1122
1123
1123
1124 @pytest.fixture
1124 @pytest.fixture
1125 def user_util(request, pylonsapp):
1125 def user_util(request, pylonsapp):
1126 """
1126 """
1127 Provides a wired instance of `UserUtility` with integrated cleanup.
1127 Provides a wired instance of `UserUtility` with integrated cleanup.
1128 """
1128 """
1129 utility = UserUtility(test_name=request.node.name)
1129 utility = UserUtility(test_name=request.node.name)
1130 request.addfinalizer(utility.cleanup)
1130 request.addfinalizer(utility.cleanup)
1131 return utility
1131 return utility
1132
1132
1133
1133
1134 # TODO: johbo: Split this up into utilities per domain or something similar
1134 # TODO: johbo: Split this up into utilities per domain or something similar
1135 class UserUtility(object):
1135 class UserUtility(object):
1136
1136
1137 def __init__(self, test_name="test"):
1137 def __init__(self, test_name="test"):
1138 self._test_name = self._sanitize_name(test_name)
1138 self._test_name = self._sanitize_name(test_name)
1139 self.fixture = Fixture()
1139 self.fixture = Fixture()
1140 self.repo_group_ids = []
1140 self.repo_group_ids = []
1141 self.user_ids = []
1141 self.user_ids = []
1142 self.user_group_ids = []
1142 self.user_group_ids = []
1143 self.user_repo_permission_ids = []
1143 self.user_repo_permission_ids = []
1144 self.user_group_repo_permission_ids = []
1144 self.user_group_repo_permission_ids = []
1145 self.user_repo_group_permission_ids = []
1145 self.user_repo_group_permission_ids = []
1146 self.user_group_repo_group_permission_ids = []
1146 self.user_group_repo_group_permission_ids = []
1147 self.user_user_group_permission_ids = []
1147 self.user_user_group_permission_ids = []
1148 self.user_group_user_group_permission_ids = []
1148 self.user_group_user_group_permission_ids = []
1149 self.user_permissions = []
1149 self.user_permissions = []
1150
1150
1151 def _sanitize_name(self, name):
1151 def _sanitize_name(self, name):
1152 for char in ['[', ']']:
1152 for char in ['[', ']']:
1153 name = name.replace(char, '_')
1153 name = name.replace(char, '_')
1154 return name
1154 return name
1155
1155
1156 def create_repo_group(
1156 def create_repo_group(
1157 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1157 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1158 group_name = "{prefix}_repogroup_{count}".format(
1158 group_name = "{prefix}_repogroup_{count}".format(
1159 prefix=self._test_name,
1159 prefix=self._test_name,
1160 count=len(self.repo_group_ids))
1160 count=len(self.repo_group_ids))
1161 repo_group = self.fixture.create_repo_group(
1161 repo_group = self.fixture.create_repo_group(
1162 group_name, cur_user=owner)
1162 group_name, cur_user=owner)
1163 if auto_cleanup:
1163 if auto_cleanup:
1164 self.repo_group_ids.append(repo_group.group_id)
1164 self.repo_group_ids.append(repo_group.group_id)
1165 return repo_group
1165 return repo_group
1166
1166
1167 def create_user(self, auto_cleanup=True, **kwargs):
1167 def create_user(self, auto_cleanup=True, **kwargs):
1168 user_name = "{prefix}_user_{count}".format(
1168 user_name = "{prefix}_user_{count}".format(
1169 prefix=self._test_name,
1169 prefix=self._test_name,
1170 count=len(self.user_ids))
1170 count=len(self.user_ids))
1171 user = self.fixture.create_user(user_name, **kwargs)
1171 user = self.fixture.create_user(user_name, **kwargs)
1172 if auto_cleanup:
1172 if auto_cleanup:
1173 self.user_ids.append(user.user_id)
1173 self.user_ids.append(user.user_id)
1174 return user
1174 return user
1175
1175
1176 def create_user_with_group(self):
1176 def create_user_with_group(self):
1177 user = self.create_user()
1177 user = self.create_user()
1178 user_group = self.create_user_group(members=[user])
1178 user_group = self.create_user_group(members=[user])
1179 return user, user_group
1179 return user, user_group
1180
1180
1181 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1181 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1182 group_name = "{prefix}_usergroup_{count}".format(
1182 group_name = "{prefix}_usergroup_{count}".format(
1183 prefix=self._test_name,
1183 prefix=self._test_name,
1184 count=len(self.user_group_ids))
1184 count=len(self.user_group_ids))
1185 user_group = self.fixture.create_user_group(group_name, **kwargs)
1185 user_group = self.fixture.create_user_group(group_name, **kwargs)
1186 if auto_cleanup:
1186 if auto_cleanup:
1187 self.user_group_ids.append(user_group.users_group_id)
1187 self.user_group_ids.append(user_group.users_group_id)
1188 if members:
1188 if members:
1189 for user in members:
1189 for user in members:
1190 UserGroupModel().add_user_to_group(user_group, user)
1190 UserGroupModel().add_user_to_group(user_group, user)
1191 return user_group
1191 return user_group
1192
1192
1193 def grant_user_permission(self, user_name, permission_name):
1193 def grant_user_permission(self, user_name, permission_name):
1194 self._inherit_default_user_permissions(user_name, False)
1194 self._inherit_default_user_permissions(user_name, False)
1195 self.user_permissions.append((user_name, permission_name))
1195 self.user_permissions.append((user_name, permission_name))
1196
1196
1197 def grant_user_permission_to_repo_group(
1197 def grant_user_permission_to_repo_group(
1198 self, repo_group, user, permission_name):
1198 self, repo_group, user, permission_name):
1199 permission = RepoGroupModel().grant_user_permission(
1199 permission = RepoGroupModel().grant_user_permission(
1200 repo_group, user, permission_name)
1200 repo_group, user, permission_name)
1201 self.user_repo_group_permission_ids.append(
1201 self.user_repo_group_permission_ids.append(
1202 (repo_group.group_id, user.user_id))
1202 (repo_group.group_id, user.user_id))
1203 return permission
1203 return permission
1204
1204
1205 def grant_user_group_permission_to_repo_group(
1205 def grant_user_group_permission_to_repo_group(
1206 self, repo_group, user_group, permission_name):
1206 self, repo_group, user_group, permission_name):
1207 permission = RepoGroupModel().grant_user_group_permission(
1207 permission = RepoGroupModel().grant_user_group_permission(
1208 repo_group, user_group, permission_name)
1208 repo_group, user_group, permission_name)
1209 self.user_group_repo_group_permission_ids.append(
1209 self.user_group_repo_group_permission_ids.append(
1210 (repo_group.group_id, user_group.users_group_id))
1210 (repo_group.group_id, user_group.users_group_id))
1211 return permission
1211 return permission
1212
1212
1213 def grant_user_permission_to_repo(
1213 def grant_user_permission_to_repo(
1214 self, repo, user, permission_name):
1214 self, repo, user, permission_name):
1215 permission = RepoModel().grant_user_permission(
1215 permission = RepoModel().grant_user_permission(
1216 repo, user, permission_name)
1216 repo, user, permission_name)
1217 self.user_repo_permission_ids.append(
1217 self.user_repo_permission_ids.append(
1218 (repo.repo_id, user.user_id))
1218 (repo.repo_id, user.user_id))
1219 return permission
1219 return permission
1220
1220
1221 def grant_user_group_permission_to_repo(
1221 def grant_user_group_permission_to_repo(
1222 self, repo, user_group, permission_name):
1222 self, repo, user_group, permission_name):
1223 permission = RepoModel().grant_user_group_permission(
1223 permission = RepoModel().grant_user_group_permission(
1224 repo, user_group, permission_name)
1224 repo, user_group, permission_name)
1225 self.user_group_repo_permission_ids.append(
1225 self.user_group_repo_permission_ids.append(
1226 (repo.repo_id, user_group.users_group_id))
1226 (repo.repo_id, user_group.users_group_id))
1227 return permission
1227 return permission
1228
1228
1229 def grant_user_permission_to_user_group(
1229 def grant_user_permission_to_user_group(
1230 self, target_user_group, user, permission_name):
1230 self, target_user_group, user, permission_name):
1231 permission = UserGroupModel().grant_user_permission(
1231 permission = UserGroupModel().grant_user_permission(
1232 target_user_group, user, permission_name)
1232 target_user_group, user, permission_name)
1233 self.user_user_group_permission_ids.append(
1233 self.user_user_group_permission_ids.append(
1234 (target_user_group.users_group_id, user.user_id))
1234 (target_user_group.users_group_id, user.user_id))
1235 return permission
1235 return permission
1236
1236
1237 def grant_user_group_permission_to_user_group(
1237 def grant_user_group_permission_to_user_group(
1238 self, target_user_group, user_group, permission_name):
1238 self, target_user_group, user_group, permission_name):
1239 permission = UserGroupModel().grant_user_group_permission(
1239 permission = UserGroupModel().grant_user_group_permission(
1240 target_user_group, user_group, permission_name)
1240 target_user_group, user_group, permission_name)
1241 self.user_group_user_group_permission_ids.append(
1241 self.user_group_user_group_permission_ids.append(
1242 (target_user_group.users_group_id, user_group.users_group_id))
1242 (target_user_group.users_group_id, user_group.users_group_id))
1243 return permission
1243 return permission
1244
1244
1245 def revoke_user_permission(self, user_name, permission_name):
1245 def revoke_user_permission(self, user_name, permission_name):
1246 self._inherit_default_user_permissions(user_name, True)
1246 self._inherit_default_user_permissions(user_name, True)
1247 UserModel().revoke_perm(user_name, permission_name)
1247 UserModel().revoke_perm(user_name, permission_name)
1248
1248
1249 def _inherit_default_user_permissions(self, user_name, value):
1249 def _inherit_default_user_permissions(self, user_name, value):
1250 user = UserModel().get_by_username(user_name)
1250 user = UserModel().get_by_username(user_name)
1251 user.inherit_default_permissions = value
1251 user.inherit_default_permissions = value
1252 Session().add(user)
1252 Session().add(user)
1253 Session().commit()
1253 Session().commit()
1254
1254
1255 def cleanup(self):
1255 def cleanup(self):
1256 self._cleanup_permissions()
1256 self._cleanup_permissions()
1257 self._cleanup_repo_groups()
1257 self._cleanup_repo_groups()
1258 self._cleanup_user_groups()
1258 self._cleanup_user_groups()
1259 self._cleanup_users()
1259 self._cleanup_users()
1260
1260
1261 def _cleanup_permissions(self):
1261 def _cleanup_permissions(self):
1262 if self.user_permissions:
1262 if self.user_permissions:
1263 for user_name, permission_name in self.user_permissions:
1263 for user_name, permission_name in self.user_permissions:
1264 self.revoke_user_permission(user_name, permission_name)
1264 self.revoke_user_permission(user_name, permission_name)
1265
1265
1266 for permission in self.user_repo_permission_ids:
1266 for permission in self.user_repo_permission_ids:
1267 RepoModel().revoke_user_permission(*permission)
1267 RepoModel().revoke_user_permission(*permission)
1268
1268
1269 for permission in self.user_group_repo_permission_ids:
1269 for permission in self.user_group_repo_permission_ids:
1270 RepoModel().revoke_user_group_permission(*permission)
1270 RepoModel().revoke_user_group_permission(*permission)
1271
1271
1272 for permission in self.user_repo_group_permission_ids:
1272 for permission in self.user_repo_group_permission_ids:
1273 RepoGroupModel().revoke_user_permission(*permission)
1273 RepoGroupModel().revoke_user_permission(*permission)
1274
1274
1275 for permission in self.user_group_repo_group_permission_ids:
1275 for permission in self.user_group_repo_group_permission_ids:
1276 RepoGroupModel().revoke_user_group_permission(*permission)
1276 RepoGroupModel().revoke_user_group_permission(*permission)
1277
1277
1278 for permission in self.user_user_group_permission_ids:
1278 for permission in self.user_user_group_permission_ids:
1279 UserGroupModel().revoke_user_permission(*permission)
1279 UserGroupModel().revoke_user_permission(*permission)
1280
1280
1281 for permission in self.user_group_user_group_permission_ids:
1281 for permission in self.user_group_user_group_permission_ids:
1282 UserGroupModel().revoke_user_group_permission(*permission)
1282 UserGroupModel().revoke_user_group_permission(*permission)
1283
1283
1284 def _cleanup_repo_groups(self):
1284 def _cleanup_repo_groups(self):
1285 def _repo_group_compare(first_group_id, second_group_id):
1285 def _repo_group_compare(first_group_id, second_group_id):
1286 """
1286 """
1287 Gives higher priority to the groups with the most complex paths
1287 Gives higher priority to the groups with the most complex paths
1288 """
1288 """
1289 first_group = RepoGroup.get(first_group_id)
1289 first_group = RepoGroup.get(first_group_id)
1290 second_group = RepoGroup.get(second_group_id)
1290 second_group = RepoGroup.get(second_group_id)
1291 first_group_parts = (
1291 first_group_parts = (
1292 len(first_group.group_name.split('/')) if first_group else 0)
1292 len(first_group.group_name.split('/')) if first_group else 0)
1293 second_group_parts = (
1293 second_group_parts = (
1294 len(second_group.group_name.split('/')) if second_group else 0)
1294 len(second_group.group_name.split('/')) if second_group else 0)
1295 return cmp(second_group_parts, first_group_parts)
1295 return cmp(second_group_parts, first_group_parts)
1296
1296
1297 sorted_repo_group_ids = sorted(
1297 sorted_repo_group_ids = sorted(
1298 self.repo_group_ids, cmp=_repo_group_compare)
1298 self.repo_group_ids, cmp=_repo_group_compare)
1299 for repo_group_id in sorted_repo_group_ids:
1299 for repo_group_id in sorted_repo_group_ids:
1300 self.fixture.destroy_repo_group(repo_group_id)
1300 self.fixture.destroy_repo_group(repo_group_id)
1301
1301
1302 def _cleanup_user_groups(self):
1302 def _cleanup_user_groups(self):
1303 def _user_group_compare(first_group_id, second_group_id):
1303 def _user_group_compare(first_group_id, second_group_id):
1304 """
1304 """
1305 Gives higher priority to the groups with the most complex paths
1305 Gives higher priority to the groups with the most complex paths
1306 """
1306 """
1307 first_group = UserGroup.get(first_group_id)
1307 first_group = UserGroup.get(first_group_id)
1308 second_group = UserGroup.get(second_group_id)
1308 second_group = UserGroup.get(second_group_id)
1309 first_group_parts = (
1309 first_group_parts = (
1310 len(first_group.users_group_name.split('/'))
1310 len(first_group.users_group_name.split('/'))
1311 if first_group else 0)
1311 if first_group else 0)
1312 second_group_parts = (
1312 second_group_parts = (
1313 len(second_group.users_group_name.split('/'))
1313 len(second_group.users_group_name.split('/'))
1314 if second_group else 0)
1314 if second_group else 0)
1315 return cmp(second_group_parts, first_group_parts)
1315 return cmp(second_group_parts, first_group_parts)
1316
1316
1317 sorted_user_group_ids = sorted(
1317 sorted_user_group_ids = sorted(
1318 self.user_group_ids, cmp=_user_group_compare)
1318 self.user_group_ids, cmp=_user_group_compare)
1319 for user_group_id in sorted_user_group_ids:
1319 for user_group_id in sorted_user_group_ids:
1320 self.fixture.destroy_user_group(user_group_id)
1320 self.fixture.destroy_user_group(user_group_id)
1321
1321
1322 def _cleanup_users(self):
1322 def _cleanup_users(self):
1323 for user_id in self.user_ids:
1323 for user_id in self.user_ids:
1324 self.fixture.destroy_user(user_id)
1324 self.fixture.destroy_user(user_id)
1325
1325
1326
1326
1327 # TODO: Think about moving this into a pytest-pyro package and make it a
1327 # TODO: Think about moving this into a pytest-pyro package and make it a
1328 # pytest plugin
1328 # pytest plugin
1329 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1329 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1330 def pytest_runtest_makereport(item, call):
1330 def pytest_runtest_makereport(item, call):
1331 """
1331 """
1332 Adding the remote traceback if the exception has this information.
1332 Adding the remote traceback if the exception has this information.
1333
1333
1334 Pyro4 attaches this information as the attribute `_pyroTraceback`
1334 Pyro4 attaches this information as the attribute `_vcs_server_traceback`
1335 to the exception instance.
1335 to the exception instance.
1336 """
1336 """
1337 outcome = yield
1337 outcome = yield
1338 report = outcome.get_result()
1338 report = outcome.get_result()
1339 if call.excinfo:
1339 if call.excinfo:
1340 _add_pyro_remote_traceback(report, call.excinfo.value)
1340 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1341
1341
1342
1342
1343 def _add_pyro_remote_traceback(report, exc):
1343 def _add_vcsserver_remote_traceback(report, exc):
1344 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1344 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1345
1345
1346 if pyro_traceback:
1346 if vcsserver_traceback:
1347 traceback = ''.join(pyro_traceback)
1347 section = 'VCSServer remote traceback ' + report.when
1348 section = 'Pyro4 remote traceback ' + report.when
1348 report.sections.append((section, vcsserver_traceback))
1349 report.sections.append((section, traceback))
1350
1349
1351
1350
1352 @pytest.fixture(scope='session')
1351 @pytest.fixture(scope='session')
1353 def testrun():
1352 def testrun():
1354 return {
1353 return {
1355 'uuid': uuid.uuid4(),
1354 'uuid': uuid.uuid4(),
1356 'start': datetime.datetime.utcnow().isoformat(),
1355 'start': datetime.datetime.utcnow().isoformat(),
1357 'timestamp': int(time.time()),
1356 'timestamp': int(time.time()),
1358 }
1357 }
1359
1358
1360
1359
1361 @pytest.fixture(autouse=True)
1360 @pytest.fixture(autouse=True)
1362 def collect_appenlight_stats(request, testrun):
1361 def collect_appenlight_stats(request, testrun):
1363 """
1362 """
1364 This fixture reports memory consumtion of single tests.
1363 This fixture reports memory consumtion of single tests.
1365
1364
1366 It gathers data based on `psutil` and sends them to Appenlight. The option
1365 It gathers data based on `psutil` and sends them to Appenlight. The option
1367 ``--ae`` has te be used to enable this fixture and the API key for your
1366 ``--ae`` has te be used to enable this fixture and the API key for your
1368 application has to be provided in ``--ae-key``.
1367 application has to be provided in ``--ae-key``.
1369 """
1368 """
1370 try:
1369 try:
1371 # cygwin cannot have yet psutil support.
1370 # cygwin cannot have yet psutil support.
1372 import psutil
1371 import psutil
1373 except ImportError:
1372 except ImportError:
1374 return
1373 return
1375
1374
1376 if not request.config.getoption('--appenlight'):
1375 if not request.config.getoption('--appenlight'):
1377 return
1376 return
1378 else:
1377 else:
1379 # Only request the pylonsapp fixture if appenlight tracking is
1378 # Only request the pylonsapp fixture if appenlight tracking is
1380 # enabled. This will speed up a test run of unit tests by 2 to 3
1379 # enabled. This will speed up a test run of unit tests by 2 to 3
1381 # seconds if appenlight is not enabled.
1380 # seconds if appenlight is not enabled.
1382 pylonsapp = request.getfuncargvalue("pylonsapp")
1381 pylonsapp = request.getfuncargvalue("pylonsapp")
1383 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1382 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1384 client = AppenlightClient(
1383 client = AppenlightClient(
1385 url=url,
1384 url=url,
1386 api_key=request.config.getoption('--appenlight-api-key'),
1385 api_key=request.config.getoption('--appenlight-api-key'),
1387 namespace=request.node.nodeid,
1386 namespace=request.node.nodeid,
1388 request=str(testrun['uuid']),
1387 request=str(testrun['uuid']),
1389 testrun=testrun)
1388 testrun=testrun)
1390
1389
1391 client.collect({
1390 client.collect({
1392 'message': "Starting",
1391 'message': "Starting",
1393 })
1392 })
1394
1393
1395 server_and_port = pylonsapp.config['vcs.server']
1394 server_and_port = pylonsapp.config['vcs.server']
1396 server = create_vcsserver_proxy(server_and_port)
1395 server = create_vcsserver_proxy(server_and_port)
1397 with server:
1396 with server:
1398 vcs_pid = server.get_pid()
1397 vcs_pid = server.get_pid()
1399 server.run_gc()
1398 server.run_gc()
1400 vcs_process = psutil.Process(vcs_pid)
1399 vcs_process = psutil.Process(vcs_pid)
1401 mem = vcs_process.memory_info()
1400 mem = vcs_process.memory_info()
1402 client.tag_before('vcsserver.rss', mem.rss)
1401 client.tag_before('vcsserver.rss', mem.rss)
1403 client.tag_before('vcsserver.vms', mem.vms)
1402 client.tag_before('vcsserver.vms', mem.vms)
1404
1403
1405 test_process = psutil.Process()
1404 test_process = psutil.Process()
1406 mem = test_process.memory_info()
1405 mem = test_process.memory_info()
1407 client.tag_before('test.rss', mem.rss)
1406 client.tag_before('test.rss', mem.rss)
1408 client.tag_before('test.vms', mem.vms)
1407 client.tag_before('test.vms', mem.vms)
1409
1408
1410 client.tag_before('time', time.time())
1409 client.tag_before('time', time.time())
1411
1410
1412 @request.addfinalizer
1411 @request.addfinalizer
1413 def send_stats():
1412 def send_stats():
1414 client.tag_after('time', time.time())
1413 client.tag_after('time', time.time())
1415 with server:
1414 with server:
1416 gc_stats = server.run_gc()
1415 gc_stats = server.run_gc()
1417 for tag, value in gc_stats.items():
1416 for tag, value in gc_stats.items():
1418 client.tag_after(tag, value)
1417 client.tag_after(tag, value)
1419 mem = vcs_process.memory_info()
1418 mem = vcs_process.memory_info()
1420 client.tag_after('vcsserver.rss', mem.rss)
1419 client.tag_after('vcsserver.rss', mem.rss)
1421 client.tag_after('vcsserver.vms', mem.vms)
1420 client.tag_after('vcsserver.vms', mem.vms)
1422
1421
1423 mem = test_process.memory_info()
1422 mem = test_process.memory_info()
1424 client.tag_after('test.rss', mem.rss)
1423 client.tag_after('test.rss', mem.rss)
1425 client.tag_after('test.vms', mem.vms)
1424 client.tag_after('test.vms', mem.vms)
1426
1425
1427 client.collect({
1426 client.collect({
1428 'message': "Finished",
1427 'message': "Finished",
1429 })
1428 })
1430 client.send_stats()
1429 client.send_stats()
1431
1430
1432 return client
1431 return client
1433
1432
1434
1433
1435 class AppenlightClient():
1434 class AppenlightClient():
1436
1435
1437 url_template = '{url}?protocol_version=0.5'
1436 url_template = '{url}?protocol_version=0.5'
1438
1437
1439 def __init__(
1438 def __init__(
1440 self, url, api_key, add_server=True, add_timestamp=True,
1439 self, url, api_key, add_server=True, add_timestamp=True,
1441 namespace=None, request=None, testrun=None):
1440 namespace=None, request=None, testrun=None):
1442 self.url = self.url_template.format(url=url)
1441 self.url = self.url_template.format(url=url)
1443 self.api_key = api_key
1442 self.api_key = api_key
1444 self.add_server = add_server
1443 self.add_server = add_server
1445 self.add_timestamp = add_timestamp
1444 self.add_timestamp = add_timestamp
1446 self.namespace = namespace
1445 self.namespace = namespace
1447 self.request = request
1446 self.request = request
1448 self.server = socket.getfqdn(socket.gethostname())
1447 self.server = socket.getfqdn(socket.gethostname())
1449 self.tags_before = {}
1448 self.tags_before = {}
1450 self.tags_after = {}
1449 self.tags_after = {}
1451 self.stats = []
1450 self.stats = []
1452 self.testrun = testrun or {}
1451 self.testrun = testrun or {}
1453
1452
1454 def tag_before(self, tag, value):
1453 def tag_before(self, tag, value):
1455 self.tags_before[tag] = value
1454 self.tags_before[tag] = value
1456
1455
1457 def tag_after(self, tag, value):
1456 def tag_after(self, tag, value):
1458 self.tags_after[tag] = value
1457 self.tags_after[tag] = value
1459
1458
1460 def collect(self, data):
1459 def collect(self, data):
1461 if self.add_server:
1460 if self.add_server:
1462 data.setdefault('server', self.server)
1461 data.setdefault('server', self.server)
1463 if self.add_timestamp:
1462 if self.add_timestamp:
1464 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1463 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1465 if self.namespace:
1464 if self.namespace:
1466 data.setdefault('namespace', self.namespace)
1465 data.setdefault('namespace', self.namespace)
1467 if self.request:
1466 if self.request:
1468 data.setdefault('request', self.request)
1467 data.setdefault('request', self.request)
1469 self.stats.append(data)
1468 self.stats.append(data)
1470
1469
1471 def send_stats(self):
1470 def send_stats(self):
1472 tags = [
1471 tags = [
1473 ('testrun', self.request),
1472 ('testrun', self.request),
1474 ('testrun.start', self.testrun['start']),
1473 ('testrun.start', self.testrun['start']),
1475 ('testrun.timestamp', self.testrun['timestamp']),
1474 ('testrun.timestamp', self.testrun['timestamp']),
1476 ('test', self.namespace),
1475 ('test', self.namespace),
1477 ]
1476 ]
1478 for key, value in self.tags_before.items():
1477 for key, value in self.tags_before.items():
1479 tags.append((key + '.before', value))
1478 tags.append((key + '.before', value))
1480 try:
1479 try:
1481 delta = self.tags_after[key] - value
1480 delta = self.tags_after[key] - value
1482 tags.append((key + '.delta', delta))
1481 tags.append((key + '.delta', delta))
1483 except Exception:
1482 except Exception:
1484 pass
1483 pass
1485 for key, value in self.tags_after.items():
1484 for key, value in self.tags_after.items():
1486 tags.append((key + '.after', value))
1485 tags.append((key + '.after', value))
1487 self.collect({
1486 self.collect({
1488 'message': "Collected tags",
1487 'message': "Collected tags",
1489 'tags': tags,
1488 'tags': tags,
1490 })
1489 })
1491
1490
1492 response = requests.post(
1491 response = requests.post(
1493 self.url,
1492 self.url,
1494 headers={
1493 headers={
1495 'X-appenlight-api-key': self.api_key},
1494 'X-appenlight-api-key': self.api_key},
1496 json=self.stats,
1495 json=self.stats,
1497 )
1496 )
1498
1497
1499 if not response.status_code == 200:
1498 if not response.status_code == 200:
1500 pprint.pprint(self.stats)
1499 pprint.pprint(self.stats)
1501 print response.headers
1500 print response.headers
1502 print response.text
1501 print response.text
1503 raise Exception('Sending to appenlight failed')
1502 raise Exception('Sending to appenlight failed')
1504
1503
1505
1504
1506 @pytest.fixture
1505 @pytest.fixture
1507 def gist_util(request, pylonsapp):
1506 def gist_util(request, pylonsapp):
1508 """
1507 """
1509 Provides a wired instance of `GistUtility` with integrated cleanup.
1508 Provides a wired instance of `GistUtility` with integrated cleanup.
1510 """
1509 """
1511 utility = GistUtility()
1510 utility = GistUtility()
1512 request.addfinalizer(utility.cleanup)
1511 request.addfinalizer(utility.cleanup)
1513 return utility
1512 return utility
1514
1513
1515
1514
1516 class GistUtility(object):
1515 class GistUtility(object):
1517 def __init__(self):
1516 def __init__(self):
1518 self.fixture = Fixture()
1517 self.fixture = Fixture()
1519 self.gist_ids = []
1518 self.gist_ids = []
1520
1519
1521 def create_gist(self, **kwargs):
1520 def create_gist(self, **kwargs):
1522 gist = self.fixture.create_gist(**kwargs)
1521 gist = self.fixture.create_gist(**kwargs)
1523 self.gist_ids.append(gist.gist_id)
1522 self.gist_ids.append(gist.gist_id)
1524 return gist
1523 return gist
1525
1524
1526 def cleanup(self):
1525 def cleanup(self):
1527 for id_ in self.gist_ids:
1526 for id_ in self.gist_ids:
1528 self.fixture.destroy_gists(str(id_))
1527 self.fixture.destroy_gists(str(id_))
1529
1528
1530
1529
1531 @pytest.fixture
1530 @pytest.fixture
1532 def enabled_backends(request):
1531 def enabled_backends(request):
1533 backends = request.config.option.backends
1532 backends = request.config.option.backends
1534 return backends[:]
1533 return backends[:]
1535
1534
1536
1535
1537 @pytest.fixture
1536 @pytest.fixture
1538 def settings_util(request):
1537 def settings_util(request):
1539 """
1538 """
1540 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1539 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1541 """
1540 """
1542 utility = SettingsUtility()
1541 utility = SettingsUtility()
1543 request.addfinalizer(utility.cleanup)
1542 request.addfinalizer(utility.cleanup)
1544 return utility
1543 return utility
1545
1544
1546
1545
1547 class SettingsUtility(object):
1546 class SettingsUtility(object):
1548 def __init__(self):
1547 def __init__(self):
1549 self.rhodecode_ui_ids = []
1548 self.rhodecode_ui_ids = []
1550 self.rhodecode_setting_ids = []
1549 self.rhodecode_setting_ids = []
1551 self.repo_rhodecode_ui_ids = []
1550 self.repo_rhodecode_ui_ids = []
1552 self.repo_rhodecode_setting_ids = []
1551 self.repo_rhodecode_setting_ids = []
1553
1552
1554 def create_repo_rhodecode_ui(
1553 def create_repo_rhodecode_ui(
1555 self, repo, section, value, key=None, active=True, cleanup=True):
1554 self, repo, section, value, key=None, active=True, cleanup=True):
1556 key = key or hashlib.sha1(
1555 key = key or hashlib.sha1(
1557 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1556 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1558
1557
1559 setting = RepoRhodeCodeUi()
1558 setting = RepoRhodeCodeUi()
1560 setting.repository_id = repo.repo_id
1559 setting.repository_id = repo.repo_id
1561 setting.ui_section = section
1560 setting.ui_section = section
1562 setting.ui_value = value
1561 setting.ui_value = value
1563 setting.ui_key = key
1562 setting.ui_key = key
1564 setting.ui_active = active
1563 setting.ui_active = active
1565 Session().add(setting)
1564 Session().add(setting)
1566 Session().commit()
1565 Session().commit()
1567
1566
1568 if cleanup:
1567 if cleanup:
1569 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1568 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1570 return setting
1569 return setting
1571
1570
1572 def create_rhodecode_ui(
1571 def create_rhodecode_ui(
1573 self, section, value, key=None, active=True, cleanup=True):
1572 self, section, value, key=None, active=True, cleanup=True):
1574 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1573 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1575
1574
1576 setting = RhodeCodeUi()
1575 setting = RhodeCodeUi()
1577 setting.ui_section = section
1576 setting.ui_section = section
1578 setting.ui_value = value
1577 setting.ui_value = value
1579 setting.ui_key = key
1578 setting.ui_key = key
1580 setting.ui_active = active
1579 setting.ui_active = active
1581 Session().add(setting)
1580 Session().add(setting)
1582 Session().commit()
1581 Session().commit()
1583
1582
1584 if cleanup:
1583 if cleanup:
1585 self.rhodecode_ui_ids.append(setting.ui_id)
1584 self.rhodecode_ui_ids.append(setting.ui_id)
1586 return setting
1585 return setting
1587
1586
1588 def create_repo_rhodecode_setting(
1587 def create_repo_rhodecode_setting(
1589 self, repo, name, value, type_, cleanup=True):
1588 self, repo, name, value, type_, cleanup=True):
1590 setting = RepoRhodeCodeSetting(
1589 setting = RepoRhodeCodeSetting(
1591 repo.repo_id, key=name, val=value, type=type_)
1590 repo.repo_id, key=name, val=value, type=type_)
1592 Session().add(setting)
1591 Session().add(setting)
1593 Session().commit()
1592 Session().commit()
1594
1593
1595 if cleanup:
1594 if cleanup:
1596 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1595 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1597 return setting
1596 return setting
1598
1597
1599 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1598 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1600 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1599 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1601 Session().add(setting)
1600 Session().add(setting)
1602 Session().commit()
1601 Session().commit()
1603
1602
1604 if cleanup:
1603 if cleanup:
1605 self.rhodecode_setting_ids.append(setting.app_settings_id)
1604 self.rhodecode_setting_ids.append(setting.app_settings_id)
1606
1605
1607 return setting
1606 return setting
1608
1607
1609 def cleanup(self):
1608 def cleanup(self):
1610 for id_ in self.rhodecode_ui_ids:
1609 for id_ in self.rhodecode_ui_ids:
1611 setting = RhodeCodeUi.get(id_)
1610 setting = RhodeCodeUi.get(id_)
1612 Session().delete(setting)
1611 Session().delete(setting)
1613
1612
1614 for id_ in self.rhodecode_setting_ids:
1613 for id_ in self.rhodecode_setting_ids:
1615 setting = RhodeCodeSetting.get(id_)
1614 setting = RhodeCodeSetting.get(id_)
1616 Session().delete(setting)
1615 Session().delete(setting)
1617
1616
1618 for id_ in self.repo_rhodecode_ui_ids:
1617 for id_ in self.repo_rhodecode_ui_ids:
1619 setting = RepoRhodeCodeUi.get(id_)
1618 setting = RepoRhodeCodeUi.get(id_)
1620 Session().delete(setting)
1619 Session().delete(setting)
1621
1620
1622 for id_ in self.repo_rhodecode_setting_ids:
1621 for id_ in self.repo_rhodecode_setting_ids:
1623 setting = RepoRhodeCodeSetting.get(id_)
1622 setting = RepoRhodeCodeSetting.get(id_)
1624 Session().delete(setting)
1623 Session().delete(setting)
1625
1624
1626 Session().commit()
1625 Session().commit()
1627
1626
1628
1627
1629 @pytest.fixture
1628 @pytest.fixture
1630 def no_notifications(request):
1629 def no_notifications(request):
1631 notification_patcher = mock.patch(
1630 notification_patcher = mock.patch(
1632 'rhodecode.model.notification.NotificationModel.create')
1631 'rhodecode.model.notification.NotificationModel.create')
1633 notification_patcher.start()
1632 notification_patcher.start()
1634 request.addfinalizer(notification_patcher.stop)
1633 request.addfinalizer(notification_patcher.stop)
1635
1634
1636
1635
1637 @pytest.fixture
1636 @pytest.fixture
1638 def silence_action_logger(request):
1637 def silence_action_logger(request):
1639 notification_patcher = mock.patch(
1638 notification_patcher = mock.patch(
1640 'rhodecode.lib.utils.action_logger')
1639 'rhodecode.lib.utils.action_logger')
1641 notification_patcher.start()
1640 notification_patcher.start()
1642 request.addfinalizer(notification_patcher.stop)
1641 request.addfinalizer(notification_patcher.stop)
1643
1642
1644
1643
1645 @pytest.fixture(scope='session')
1644 @pytest.fixture(scope='session')
1646 def repeat(request):
1645 def repeat(request):
1647 """
1646 """
1648 The number of repetitions is based on this fixture.
1647 The number of repetitions is based on this fixture.
1649
1648
1650 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1649 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1651 tests are not too slow in our default test suite.
1650 tests are not too slow in our default test suite.
1652 """
1651 """
1653 return request.config.getoption('--repeat')
1652 return request.config.getoption('--repeat')
1654
1653
1655
1654
1656 @pytest.fixture
1655 @pytest.fixture
1657 def rhodecode_fixtures():
1656 def rhodecode_fixtures():
1658 return Fixture()
1657 return Fixture()
1659
1658
1660
1659
1661 @pytest.fixture
1660 @pytest.fixture
1662 def request_stub():
1661 def request_stub():
1663 """
1662 """
1664 Stub request object.
1663 Stub request object.
1665 """
1664 """
1666 request = pyramid.testing.DummyRequest()
1665 request = pyramid.testing.DummyRequest()
1667 request.scheme = 'https'
1666 request.scheme = 'https'
1668 return request
1667 return request
1669
1668
1670
1669
1671 @pytest.fixture
1670 @pytest.fixture
1672 def config_stub(request, request_stub):
1671 def config_stub(request, request_stub):
1673 """
1672 """
1674 Set up pyramid.testing and return the Configurator.
1673 Set up pyramid.testing and return the Configurator.
1675 """
1674 """
1676 config = pyramid.testing.setUp(request=request_stub)
1675 config = pyramid.testing.setUp(request=request_stub)
1677
1676
1678 @request.addfinalizer
1677 @request.addfinalizer
1679 def cleanup():
1678 def cleanup():
1680 pyramid.testing.tearDown()
1679 pyramid.testing.tearDown()
1681
1680
1682 return config
1681 return config
1683
1682
1684
1683
1685 @pytest.fixture
1684 @pytest.fixture
1686 def StubIntegrationType():
1685 def StubIntegrationType():
1687 class _StubIntegrationType(IntegrationTypeBase):
1686 class _StubIntegrationType(IntegrationTypeBase):
1688 """ Test integration type class """
1687 """ Test integration type class """
1689
1688
1690 key = 'test'
1689 key = 'test'
1691 display_name = 'Test integration type'
1690 display_name = 'Test integration type'
1692 description = 'A test integration type for testing'
1691 description = 'A test integration type for testing'
1693 icon = 'test_icon_html_image'
1692 icon = 'test_icon_html_image'
1694
1693
1695 def __init__(self, settings):
1694 def __init__(self, settings):
1696 super(_StubIntegrationType, self).__init__(settings)
1695 super(_StubIntegrationType, self).__init__(settings)
1697 self.sent_events = [] # for testing
1696 self.sent_events = [] # for testing
1698
1697
1699 def send_event(self, event):
1698 def send_event(self, event):
1700 self.sent_events.append(event)
1699 self.sent_events.append(event)
1701
1700
1702 def settings_schema(self):
1701 def settings_schema(self):
1703 class SettingsSchema(colander.Schema):
1702 class SettingsSchema(colander.Schema):
1704 test_string_field = colander.SchemaNode(
1703 test_string_field = colander.SchemaNode(
1705 colander.String(),
1704 colander.String(),
1706 missing=colander.required,
1705 missing=colander.required,
1707 title='test string field',
1706 title='test string field',
1708 )
1707 )
1709 test_int_field = colander.SchemaNode(
1708 test_int_field = colander.SchemaNode(
1710 colander.Int(),
1709 colander.Int(),
1711 title='some integer setting',
1710 title='some integer setting',
1712 )
1711 )
1713 return SettingsSchema()
1712 return SettingsSchema()
1714
1713
1715
1714
1716 integration_type_registry.register_integration_type(_StubIntegrationType)
1715 integration_type_registry.register_integration_type(_StubIntegrationType)
1717 return _StubIntegrationType
1716 return _StubIntegrationType
1718
1717
1719 @pytest.fixture
1718 @pytest.fixture
1720 def stub_integration_settings():
1719 def stub_integration_settings():
1721 return {
1720 return {
1722 'test_string_field': 'some data',
1721 'test_string_field': 'some data',
1723 'test_int_field': 100,
1722 'test_int_field': 100,
1724 }
1723 }
1725
1724
1726
1725
1727 @pytest.fixture
1726 @pytest.fixture
1728 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1727 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1729 stub_integration_settings):
1728 stub_integration_settings):
1730 integration = IntegrationModel().create(
1729 integration = IntegrationModel().create(
1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1730 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1732 name='test repo integration',
1731 name='test repo integration',
1733 repo=repo_stub, repo_group=None, child_repos_only=None)
1732 repo=repo_stub, repo_group=None, child_repos_only=None)
1734
1733
1735 @request.addfinalizer
1734 @request.addfinalizer
1736 def cleanup():
1735 def cleanup():
1737 IntegrationModel().delete(integration)
1736 IntegrationModel().delete(integration)
1738
1737
1739 return integration
1738 return integration
1740
1739
1741
1740
1742 @pytest.fixture
1741 @pytest.fixture
1743 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1742 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1744 stub_integration_settings):
1743 stub_integration_settings):
1745 integration = IntegrationModel().create(
1744 integration = IntegrationModel().create(
1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1745 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 name='test repogroup integration',
1746 name='test repogroup integration',
1748 repo=None, repo_group=test_repo_group, child_repos_only=True)
1747 repo=None, repo_group=test_repo_group, child_repos_only=True)
1749
1748
1750 @request.addfinalizer
1749 @request.addfinalizer
1751 def cleanup():
1750 def cleanup():
1752 IntegrationModel().delete(integration)
1751 IntegrationModel().delete(integration)
1753
1752
1754 return integration
1753 return integration
1755
1754
1756
1755
1757 @pytest.fixture
1756 @pytest.fixture
1758 def repogroup_recursive_integration_stub(request, test_repo_group,
1757 def repogroup_recursive_integration_stub(request, test_repo_group,
1759 StubIntegrationType, stub_integration_settings):
1758 StubIntegrationType, stub_integration_settings):
1760 integration = IntegrationModel().create(
1759 integration = IntegrationModel().create(
1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1760 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 name='test recursive repogroup integration',
1761 name='test recursive repogroup integration',
1763 repo=None, repo_group=test_repo_group, child_repos_only=False)
1762 repo=None, repo_group=test_repo_group, child_repos_only=False)
1764
1763
1765 @request.addfinalizer
1764 @request.addfinalizer
1766 def cleanup():
1765 def cleanup():
1767 IntegrationModel().delete(integration)
1766 IntegrationModel().delete(integration)
1768
1767
1769 return integration
1768 return integration
1770
1769
1771
1770
1772 @pytest.fixture
1771 @pytest.fixture
1773 def global_integration_stub(request, StubIntegrationType,
1772 def global_integration_stub(request, StubIntegrationType,
1774 stub_integration_settings):
1773 stub_integration_settings):
1775 integration = IntegrationModel().create(
1774 integration = IntegrationModel().create(
1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 name='test global integration',
1776 name='test global integration',
1778 repo=None, repo_group=None, child_repos_only=None)
1777 repo=None, repo_group=None, child_repos_only=None)
1779
1778
1780 @request.addfinalizer
1779 @request.addfinalizer
1781 def cleanup():
1780 def cleanup():
1782 IntegrationModel().delete(integration)
1781 IntegrationModel().delete(integration)
1783
1782
1784 return integration
1783 return integration
1785
1784
1786
1785
1787 @pytest.fixture
1786 @pytest.fixture
1788 def root_repos_integration_stub(request, StubIntegrationType,
1787 def root_repos_integration_stub(request, StubIntegrationType,
1789 stub_integration_settings):
1788 stub_integration_settings):
1790 integration = IntegrationModel().create(
1789 integration = IntegrationModel().create(
1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 name='test global integration',
1791 name='test global integration',
1793 repo=None, repo_group=None, child_repos_only=True)
1792 repo=None, repo_group=None, child_repos_only=True)
1794
1793
1795 @request.addfinalizer
1794 @request.addfinalizer
1796 def cleanup():
1795 def cleanup():
1797 IntegrationModel().delete(integration)
1796 IntegrationModel().delete(integration)
1798
1797
1799 return integration
1798 return integration
General Comments 0
You need to be logged in to leave comments. Login now