Show More
@@ -1,4 +1,5 b'' | |||||
1 | """Routes configuration |
|
1 | """ | |
|
2 | Routes configuration | |||
2 |
|
3 | |||
3 | The more specific and detailed routes should be defined first so they |
|
4 | The more specific and detailed routes should be defined first so they | |
4 | may take precedent over the more generic routes. For more information |
|
5 | may take precedent over the more generic routes. For more information | |
@@ -15,24 +16,28 b' def make_map(config):' | |||||
15 | map.minimization = False |
|
16 | map.minimization = False | |
16 | map.explicit = False |
|
17 | map.explicit = False | |
17 |
|
18 | |||
|
19 | def check_repo(environ, match_dict): | |||
|
20 | """ | |||
|
21 | check for valid repository for proper 404 handling | |||
|
22 | :param environ: | |||
|
23 | :param match_dict: | |||
|
24 | """ | |||
|
25 | repo_name = match_dict.get('repo_name') | |||
|
26 | return not cr(repo_name, config['base_path']) | |||
|
27 | ||||
18 | # The ErrorController route (handles 404/500 error pages); it should |
|
28 | # The ErrorController route (handles 404/500 error pages); it should | |
19 | # likely stay at the top, ensuring it can always be resolved |
|
29 | # likely stay at the top, ensuring it can always be resolved | |
20 | map.connect('/error/{action}', controller='error') |
|
30 | map.connect('/error/{action}', controller='error') | |
21 | map.connect('/error/{action}/{id}', controller='error') |
|
31 | map.connect('/error/{action}/{id}', controller='error') | |
22 |
|
32 | |||
|
33 | #========================================================================== | |||
23 | # CUSTOM ROUTES HERE |
|
34 | # CUSTOM ROUTES HERE | |
|
35 | #========================================================================== | |||
|
36 | ||||
|
37 | #MAIN PAGE | |||
24 | map.connect('hg_home', '/', controller='hg', action='index') |
|
38 | map.connect('hg_home', '/', controller='hg', action='index') | |
25 |
|
39 | |||
26 | def check_repo(environ, match_dict): |
|
40 | #ADMIN REPOSITORY REST ROUTES | |
27 | """ |
|
|||
28 | check for valid repository for proper 404 handling |
|
|||
29 | @param environ: |
|
|||
30 | @param match_dict: |
|
|||
31 | """ |
|
|||
32 | repo_name = match_dict.get('repo_name') |
|
|||
33 | return not cr(repo_name, config['base_path']) |
|
|||
34 |
|
||||
35 | #REST REPO MAP |
|
|||
36 | with map.submapper(path_prefix='/_admin', controller='admin/repos') as m: |
|
41 | with map.submapper(path_prefix='/_admin', controller='admin/repos') as m: | |
37 | m.connect("repos", "/repos", |
|
42 | m.connect("repos", "/repos", | |
38 | action="create", conditions=dict(method=["POST"])) |
|
43 | action="create", conditions=dict(method=["POST"])) | |
@@ -67,11 +72,14 b' def make_map(config):' | |||||
67 | m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*}", |
|
72 | m.connect('delete_repo_user', "/repos_delete_user/{repo_name:.*}", | |
68 | action="delete_perm_user", conditions=dict(method=["DELETE"], |
|
73 | action="delete_perm_user", conditions=dict(method=["DELETE"], | |
69 | function=check_repo)) |
|
74 | function=check_repo)) | |
70 |
|
75 | |||
|
76 | #ADMIN USER REST ROUTES | |||
71 | map.resource('user', 'users', controller='admin/users', path_prefix='/_admin') |
|
77 | map.resource('user', 'users', controller='admin/users', path_prefix='/_admin') | |
|
78 | ||||
|
79 | #ADMIN PERMISSIONS REST ROUTES | |||
72 | map.resource('permission', 'permissions', controller='admin/permissions', path_prefix='/_admin') |
|
80 | map.resource('permission', 'permissions', controller='admin/permissions', path_prefix='/_admin') | |
73 |
|
81 | |||
74 | #REST SETTINGS MAP |
|
82 | #ADMIN SETTINGS REST ROUTES | |
75 | with map.submapper(path_prefix='/_admin', controller='admin/settings') as m: |
|
83 | with map.submapper(path_prefix='/_admin', controller='admin/settings') as m: | |
76 | m.connect("admin_settings", "/settings", |
|
84 | m.connect("admin_settings", "/settings", | |
77 | action="create", conditions=dict(method=["POST"])) |
|
85 | action="create", conditions=dict(method=["POST"])) | |
@@ -101,8 +109,8 b' def make_map(config):' | |||||
101 | action="my_account_update", conditions=dict(method=["PUT"])) |
|
109 | action="my_account_update", conditions=dict(method=["PUT"])) | |
102 | m.connect("admin_settings_create_repository", "/create_repository", |
|
110 | m.connect("admin_settings_create_repository", "/create_repository", | |
103 | action="create_repository", conditions=dict(method=["GET"])) |
|
111 | action="create_repository", conditions=dict(method=["GET"])) | |
104 |
|
112 | |||
105 | #ADMIN |
|
113 | #ADMIN MAIN PAGES | |
106 | with map.submapper(path_prefix='/_admin', controller='admin/admin') as m: |
|
114 | with map.submapper(path_prefix='/_admin', controller='admin/admin') as m: | |
107 | m.connect('admin_home', '', action='index')#main page |
|
115 | m.connect('admin_home', '', action='index')#main page | |
108 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
116 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', | |
@@ -110,13 +118,13 b' def make_map(config):' | |||||
110 | #SEARCH |
|
118 | #SEARCH | |
111 | map.connect('search', '/_admin/search', controller='search',) |
|
119 | map.connect('search', '/_admin/search', controller='search',) | |
112 | map.connect('search_repo', '/_admin/search/{search_repo:.*}', controller='search') |
|
120 | map.connect('search_repo', '/_admin/search/{search_repo:.*}', controller='search') | |
113 |
|
121 | |||
114 | #LOGIN/LOGOUT/REGISTER/SIGN IN |
|
122 | #LOGIN/LOGOUT/REGISTER/SIGN IN | |
115 | map.connect('login_home', '/_admin/login', controller='login') |
|
123 | map.connect('login_home', '/_admin/login', controller='login') | |
116 | map.connect('logout_home', '/_admin/logout', controller='login', action='logout') |
|
124 | map.connect('logout_home', '/_admin/logout', controller='login', action='logout') | |
117 | map.connect('register', '/_admin/register', controller='login', action='register') |
|
125 | map.connect('register', '/_admin/register', controller='login', action='register') | |
118 | map.connect('reset_password', '/_admin/password_reset', controller='login', action='password_reset') |
|
126 | map.connect('reset_password', '/_admin/password_reset', controller='login', action='password_reset') | |
119 |
|
127 | |||
120 | #FEEDS |
|
128 | #FEEDS | |
121 | map.connect('rss_feed_home', '/{repo_name:.*}/feed/rss', |
|
129 | map.connect('rss_feed_home', '/{repo_name:.*}/feed/rss', | |
122 | controller='feed', action='rss', |
|
130 | controller='feed', action='rss', | |
@@ -124,9 +132,9 b' def make_map(config):' | |||||
124 | map.connect('atom_feed_home', '/{repo_name:.*}/feed/atom', |
|
132 | map.connect('atom_feed_home', '/{repo_name:.*}/feed/atom', | |
125 | controller='feed', action='atom', |
|
133 | controller='feed', action='atom', | |
126 | conditions=dict(function=check_repo)) |
|
134 | conditions=dict(function=check_repo)) | |
127 |
|
135 | |||
128 |
|
136 | |||
129 | #OTHERS |
|
137 | #REPOSITORY ROUTES | |
130 | map.connect('changeset_home', '/{repo_name:.*}/changeset/{revision}', |
|
138 | map.connect('changeset_home', '/{repo_name:.*}/changeset/{revision}', | |
131 | controller='changeset', revision='tip', |
|
139 | controller='changeset', revision='tip', | |
132 | conditions=dict(function=check_repo)) |
|
140 | conditions=dict(function=check_repo)) | |
@@ -142,7 +150,7 b' def make_map(config):' | |||||
142 | map.connect('tags_home', '/{repo_name:.*}/tags', |
|
150 | map.connect('tags_home', '/{repo_name:.*}/tags', | |
143 | controller='tags', conditions=dict(function=check_repo)) |
|
151 | controller='tags', conditions=dict(function=check_repo)) | |
144 | map.connect('changelog_home', '/{repo_name:.*}/changelog', |
|
152 | map.connect('changelog_home', '/{repo_name:.*}/changelog', | |
145 |
controller='changelog', conditions=dict(function=check_repo)) |
|
153 | controller='changelog', conditions=dict(function=check_repo)) | |
146 | map.connect('files_home', '/{repo_name:.*}/files/{revision}/{f_path:.*}', |
|
154 | map.connect('files_home', '/{repo_name:.*}/files/{revision}/{f_path:.*}', | |
147 | controller='files', revision='tip', f_path='', |
|
155 | controller='files', revision='tip', f_path='', | |
148 | conditions=dict(function=check_repo)) |
|
156 | conditions=dict(function=check_repo)) | |
@@ -157,10 +165,10 b' def make_map(config):' | |||||
157 | conditions=dict(function=check_repo)) |
|
165 | conditions=dict(function=check_repo)) | |
158 | map.connect('files_annotate_home', '/{repo_name:.*}/annotate/{revision}/{f_path:.*}', |
|
166 | map.connect('files_annotate_home', '/{repo_name:.*}/annotate/{revision}/{f_path:.*}', | |
159 | controller='files', action='annotate', revision='tip', f_path='', |
|
167 | controller='files', action='annotate', revision='tip', f_path='', | |
160 |
conditions=dict(function=check_repo)) |
|
168 | conditions=dict(function=check_repo)) | |
161 | map.connect('files_archive_home', '/{repo_name:.*}/archive/{revision}/{fileformat}', |
|
169 | map.connect('files_archive_home', '/{repo_name:.*}/archive/{revision}/{fileformat}', | |
162 | controller='files', action='archivefile', revision='tip', |
|
170 | controller='files', action='archivefile', revision='tip', | |
163 |
conditions=dict(function=check_repo)) |
|
171 | conditions=dict(function=check_repo)) | |
164 | map.connect('repo_settings_delete', '/{repo_name:.*}/settings', |
|
172 | map.connect('repo_settings_delete', '/{repo_name:.*}/settings', | |
165 | controller='settings', action="delete", |
|
173 | controller='settings', action="delete", | |
166 | conditions=dict(method=["DELETE"], function=check_repo)) |
|
174 | conditions=dict(method=["DELETE"], function=check_repo)) | |
@@ -177,5 +185,5 b' def make_map(config):' | |||||
177 | map.connect('repo_fork_home', '/{repo_name:.*}/fork', |
|
185 | map.connect('repo_fork_home', '/{repo_name:.*}/fork', | |
178 | controller='settings', action='fork', |
|
186 | controller='settings', action='fork', | |
179 | conditions=dict(function=check_repo)) |
|
187 | conditions=dict(function=check_repo)) | |
180 |
|
188 | |||
181 | return map |
|
189 | return map |
@@ -193,7 +193,7 b' class ReposController(BaseController):' | |||||
193 | def delete_perm_user(self, repo_name): |
|
193 | def delete_perm_user(self, repo_name): | |
194 | """ |
|
194 | """ | |
195 | DELETE an existing repository permission user |
|
195 | DELETE an existing repository permission user | |
196 |
|
|
196 | :param repo_name: | |
197 | """ |
|
197 | """ | |
198 |
|
198 | |||
199 | try: |
|
199 | try: |
@@ -66,7 +66,7 b' class PasswordGenerator(object):' | |||||
66 |
|
66 | |||
67 | def get_crypt_password(password): |
|
67 | def get_crypt_password(password): | |
68 | """Cryptographic function used for password hashing based on sha1 |
|
68 | """Cryptographic function used for password hashing based on sha1 | |
69 |
|
|
69 | :param password: password to hash | |
70 | """ |
|
70 | """ | |
71 | return bcrypt.hashpw(password, bcrypt.gensalt(10)) |
|
71 | return bcrypt.hashpw(password, bcrypt.gensalt(10)) | |
72 |
|
72 | |||
@@ -120,7 +120,7 b' def set_available_permissions(config):' | |||||
120 | permission given in db. We don't wannt to check each time from db for new |
|
120 | permission given in db. We don't wannt to check each time from db for new | |
121 | permissions since adding a new permission also requires application restart |
|
121 | permissions since adding a new permission also requires application restart | |
122 | ie. to decorate new views with the newly created permission |
|
122 | ie. to decorate new views with the newly created permission | |
123 |
|
|
123 | :param config: | |
124 | """ |
|
124 | """ | |
125 | log.info('getting information about all available permissions') |
|
125 | log.info('getting information about all available permissions') | |
126 | try: |
|
126 | try: | |
@@ -138,7 +138,7 b' def fill_data(user):' | |||||
138 | """ |
|
138 | """ | |
139 | Fills user data with those from database and log out user if not present |
|
139 | Fills user data with those from database and log out user if not present | |
140 | in database |
|
140 | in database | |
141 |
|
|
141 | :param user: | |
142 | """ |
|
142 | """ | |
143 | sa = meta.Session |
|
143 | sa = meta.Session | |
144 | dbuser = sa.query(User).get(user.user_id) |
|
144 | dbuser = sa.query(User).get(user.user_id) | |
@@ -156,7 +156,7 b' def fill_data(user):' | |||||
156 | def fill_perms(user): |
|
156 | def fill_perms(user): | |
157 | """ |
|
157 | """ | |
158 | Fills user permission attribute with permissions taken from database |
|
158 | Fills user permission attribute with permissions taken from database | |
159 |
|
|
159 | :param user: | |
160 | """ |
|
160 | """ | |
161 |
|
161 | |||
162 | sa = meta.Session |
|
162 | sa = meta.Session | |
@@ -228,7 +228,7 b' def fill_perms(user):' | |||||
228 | def get_user(session): |
|
228 | def get_user(session): | |
229 | """ |
|
229 | """ | |
230 | Gets user from session, and wraps permissions into user |
|
230 | Gets user from session, and wraps permissions into user | |
231 |
|
|
231 | :param session: | |
232 | """ |
|
232 | """ | |
233 | user = session.get('rhodecode_user', AuthUser()) |
|
233 | user = session.get('rhodecode_user', AuthUser()) | |
234 | if user.is_authenticated: |
|
234 | if user.is_authenticated: |
@@ -28,8 +28,8 b' from webhelpers.text import chop_at, col' | |||||
28 | class _Link(object): |
|
28 | class _Link(object): | |
29 | ''' |
|
29 | ''' | |
30 | Make a url based on label and url with help of url_for |
|
30 | Make a url based on label and url with help of url_for | |
31 |
|
|
31 | :param label:name of link if not defined url is used | |
32 |
|
|
32 | :param url: the url for link | |
33 | ''' |
|
33 | ''' | |
34 |
|
34 | |||
35 | def __call__(self, label='', *url_, **urlargs): |
|
35 | def __call__(self, label='', *url_, **urlargs): | |
@@ -52,8 +52,8 b' get_error = _GetError()' | |||||
52 | def recursive_replace(str, replace=' '): |
|
52 | def recursive_replace(str, replace=' '): | |
53 | """ |
|
53 | """ | |
54 | Recursive replace of given sign to just one instance |
|
54 | Recursive replace of given sign to just one instance | |
55 |
|
|
55 | :param str: given string | |
56 |
|
|
56 | :param replace:char to find and replace multiple instances | |
57 |
|
57 | |||
58 | Examples:: |
|
58 | Examples:: | |
59 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
59 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
@@ -72,7 +72,7 b' class _ToolTip(object):' | |||||
72 | """ |
|
72 | """ | |
73 | Special function just to wrap our text into nice formatted autowrapped |
|
73 | Special function just to wrap our text into nice formatted autowrapped | |
74 | text |
|
74 | text | |
75 |
|
|
75 | :param tooltip_title: | |
76 | """ |
|
76 | """ | |
77 |
|
77 | |||
78 | return wrap_paragraphs(escape(tooltip_title), trim_at)\ |
|
78 | return wrap_paragraphs(escape(tooltip_title), trim_at)\ | |
@@ -226,7 +226,7 b' class CodeHtmlFormatter(HtmlFormatter):' | |||||
226 | def pygmentize(filenode, **kwargs): |
|
226 | def pygmentize(filenode, **kwargs): | |
227 | """ |
|
227 | """ | |
228 | pygmentize function using pygments |
|
228 | pygmentize function using pygments | |
229 |
|
|
229 | :param filenode: | |
230 | """ |
|
230 | """ | |
231 | return literal(code_highlight(filenode.content, |
|
231 | return literal(code_highlight(filenode.content, | |
232 | filenode.lexer, CodeHtmlFormatter(**kwargs))) |
|
232 | filenode.lexer, CodeHtmlFormatter(**kwargs))) | |
@@ -234,7 +234,7 b' def pygmentize(filenode, **kwargs):' | |||||
234 | def pygmentize_annotation(filenode, **kwargs): |
|
234 | def pygmentize_annotation(filenode, **kwargs): | |
235 | """ |
|
235 | """ | |
236 | pygmentize function for annotation |
|
236 | pygmentize function for annotation | |
237 |
|
|
237 | :param filenode: | |
238 | """ |
|
238 | """ | |
239 |
|
239 | |||
240 | color_dict = {} |
|
240 | color_dict = {} |
@@ -53,9 +53,9 b' def repo_size(ui, repo, hooktype=None, *' | |||||
53 | def user_action_mapper(ui, repo, hooktype=None, **kwargs): |
|
53 | def user_action_mapper(ui, repo, hooktype=None, **kwargs): | |
54 | """ |
|
54 | """ | |
55 | Maps user last push action to new changeset id, from mercurial |
|
55 | Maps user last push action to new changeset id, from mercurial | |
56 |
|
|
56 | :param ui: | |
57 |
|
|
57 | :param repo: | |
58 |
|
|
58 | :param hooktype: | |
59 | """ |
|
59 | """ | |
60 |
|
60 | |||
61 | try: |
|
61 | try: |
@@ -115,8 +115,8 b' class ResultWrapper(object):' | |||||
115 | Smart function that implements chunking the content |
|
115 | Smart function that implements chunking the content | |
116 | but not overlap chunks so it doesn't highlight the same |
|
116 | but not overlap chunks so it doesn't highlight the same | |
117 | close occurrences twice. |
|
117 | close occurrences twice. | |
118 |
|
|
118 | :param matcher: | |
119 |
|
|
119 | :param size: | |
120 | """ |
|
120 | """ | |
121 | memory = [(0, 0)] |
|
121 | memory = [(0, 0)] | |
122 | for span in self.matcher.spans(): |
|
122 | for span in self.matcher.spans(): |
@@ -109,8 +109,8 b' class DaemonLock(object):' | |||||
109 | def makelock(self, lockname, pidfile): |
|
109 | def makelock(self, lockname, pidfile): | |
110 | """ |
|
110 | """ | |
111 | this function will make an actual lock |
|
111 | this function will make an actual lock | |
112 |
|
|
112 | :param lockname: acctual pid of file | |
113 |
|
|
113 | :param pidfile: the file to write the pid in | |
114 | """ |
|
114 | """ | |
115 | if self.debug: |
|
115 | if self.debug: | |
116 | print 'creating a file %s and pid: %s' % (pidfile, lockname) |
|
116 | print 'creating a file %s and pid: %s' % (pidfile, lockname) |
@@ -108,7 +108,7 b' class SmtpMailer(object):' | |||||
108 | ''' |
|
108 | ''' | |
109 | Get content based on type, if content is a string do open first |
|
109 | Get content based on type, if content is a string do open first | |
110 | else just read because it's a probably open file object |
|
110 | else just read because it's a probably open file object | |
111 |
|
|
111 | :param msg_file: | |
112 | ''' |
|
112 | ''' | |
113 | if isinstance(msg_file, str): |
|
113 | if isinstance(msg_file, str): | |
114 | return open(msg_file, "rb").read() |
|
114 | return open(msg_file, "rb").read() |
@@ -36,7 +36,7 b' import os' | |||||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 |
|
38 | |||
39 |
def get_repo_slug(request): |
|
39 | def get_repo_slug(request): | |
40 | return request.environ['pylons.routes_dict'].get('repo_name') |
|
40 | return request.environ['pylons.routes_dict'].get('repo_name') | |
41 |
|
41 | |||
42 | def is_mercurial(environ): |
|
42 | def is_mercurial(environ): | |
@@ -49,14 +49,26 b' def is_mercurial(environ):' | |||||
49 | return True |
|
49 | return True | |
50 | return False |
|
50 | return False | |
51 |
|
51 | |||
|
52 | def is_git(environ): | |||
|
53 | """ | |||
|
54 | Returns True if request's target is git server. ``HTTP_USER_AGENT`` would | |||
|
55 | then have git client version given. | |||
|
56 | ||||
|
57 | :param environ: | |||
|
58 | """ | |||
|
59 | http_user_agent = environ.get('HTTP_USER_AGENT') | |||
|
60 | if http_user_agent.startswith('git'): | |||
|
61 | return True | |||
|
62 | return False | |||
|
63 | ||||
52 | def action_logger(user, action, repo, ipaddr, sa=None): |
|
64 | def action_logger(user, action, repo, ipaddr, sa=None): | |
53 | """ |
|
65 | """ | |
54 | Action logger for various action made by users |
|
66 | Action logger for various action made by users | |
55 | """ |
|
67 | """ | |
56 |
|
68 | |||
57 | if not sa: |
|
69 | if not sa: | |
58 |
sa = meta.Session |
|
70 | sa = meta.Session | |
59 |
|
71 | |||
60 | try: |
|
72 | try: | |
61 | if hasattr(user, 'user_id'): |
|
73 | if hasattr(user, 'user_id'): | |
62 | user_id = user.user_id |
|
74 | user_id = user.user_id | |
@@ -64,7 +76,7 b' def action_logger(user, action, repo, ip' | |||||
64 | user_id = sa.query(User).filter(User.username == user).one() |
|
76 | user_id = sa.query(User).filter(User.username == user).one() | |
65 | else: |
|
77 | else: | |
66 | raise Exception('You have to provide user object or username') |
|
78 | raise Exception('You have to provide user object or username') | |
67 |
|
79 | |||
68 | repo_name = repo.lstrip('/') |
|
80 | repo_name = repo.lstrip('/') | |
69 | user_log = UserLog() |
|
81 | user_log = UserLog() | |
70 | user_log.user_id = user_id |
|
82 | user_log.user_id = user_id | |
@@ -82,7 +94,7 b' def action_logger(user, action, repo, ip' | |||||
82 | raise |
|
94 | raise | |
83 | sa.rollback() |
|
95 | sa.rollback() | |
84 | log.error('could not log user action:%s', str(e)) |
|
96 | log.error('could not log user action:%s', str(e)) | |
85 |
|
97 | |||
86 | def check_repo_dir(paths): |
|
98 | def check_repo_dir(paths): | |
87 | repos_path = paths[0][1].split('/') |
|
99 | repos_path = paths[0][1].split('/') | |
88 | if repos_path[-1] in ['*', '**']: |
|
100 | if repos_path[-1] in ['*', '**']: | |
@@ -122,7 +134,7 b' def ask_ok(prompt, retries=4, complaint=' | |||||
122 | retries = retries - 1 |
|
134 | retries = retries - 1 | |
123 | if retries < 0: raise IOError |
|
135 | if retries < 0: raise IOError | |
124 | print complaint |
|
136 | print complaint | |
125 |
|
137 | |||
126 | @cache_region('super_short_term', 'cached_hg_ui') |
|
138 | @cache_region('super_short_term', 'cached_hg_ui') | |
127 | def get_hg_ui_cached(): |
|
139 | def get_hg_ui_cached(): | |
128 | try: |
|
140 | try: | |
@@ -139,13 +151,13 b' def get_hg_settings():' | |||||
139 | ret = sa.query(RhodeCodeSettings).all() |
|
151 | ret = sa.query(RhodeCodeSettings).all() | |
140 | finally: |
|
152 | finally: | |
141 | meta.Session.remove() |
|
153 | meta.Session.remove() | |
142 |
|
154 | |||
143 | if not ret: |
|
155 | if not ret: | |
144 | raise Exception('Could not get application settings !') |
|
156 | raise Exception('Could not get application settings !') | |
145 | settings = {} |
|
157 | settings = {} | |
146 | for each in ret: |
|
158 | for each in ret: | |
147 |
settings['rhodecode_' + each.app_settings_name] = each.app_settings_value |
|
159 | settings['rhodecode_' + each.app_settings_name] = each.app_settings_value | |
148 |
|
160 | |||
149 | return settings |
|
161 | return settings | |
150 |
|
162 | |||
151 | def get_hg_ui_settings(): |
|
163 | def get_hg_ui_settings(): | |
@@ -154,7 +166,7 b' def get_hg_ui_settings():' | |||||
154 | ret = sa.query(RhodeCodeUi).all() |
|
166 | ret = sa.query(RhodeCodeUi).all() | |
155 | finally: |
|
167 | finally: | |
156 | meta.Session.remove() |
|
168 | meta.Session.remove() | |
157 |
|
169 | |||
158 | if not ret: |
|
170 | if not ret: | |
159 | raise Exception('Could not get application ui settings !') |
|
171 | raise Exception('Could not get application ui settings !') | |
160 | settings = {} |
|
172 | settings = {} | |
@@ -163,15 +175,15 b' def get_hg_ui_settings():' | |||||
163 | v = each.ui_value |
|
175 | v = each.ui_value | |
164 | if k == '/': |
|
176 | if k == '/': | |
165 | k = 'root_path' |
|
177 | k = 'root_path' | |
166 |
|
178 | |||
167 | if k.find('.') != -1: |
|
179 | if k.find('.') != -1: | |
168 | k = k.replace('.', '_') |
|
180 | k = k.replace('.', '_') | |
169 |
|
181 | |||
170 | if each.ui_section == 'hooks': |
|
182 | if each.ui_section == 'hooks': | |
171 | v = each.ui_active |
|
183 | v = each.ui_active | |
172 |
|
184 | |||
173 |
settings[each.ui_section + '_' + k] = v |
|
185 | settings[each.ui_section + '_' + k] = v | |
174 |
|
186 | |||
175 | return settings |
|
187 | return settings | |
176 |
|
188 | |||
177 | #propagated from mercurial documentation |
|
189 | #propagated from mercurial documentation | |
@@ -185,15 +197,15 b" ui_sections = ['alias', 'auth'," | |||||
185 | 'paths', 'profiling', |
|
197 | 'paths', 'profiling', | |
186 | 'server', 'trusted', |
|
198 | 'server', 'trusted', | |
187 | 'ui', 'web', ] |
|
199 | 'ui', 'web', ] | |
188 |
|
200 | |||
189 |
def make_ui(read_from='file', path=None, checkpaths=True): |
|
201 | def make_ui(read_from='file', path=None, checkpaths=True): | |
190 | """ |
|
202 | """ | |
191 | A function that will read python rc files or database |
|
203 | A function that will read python rc files or database | |
192 | and make an mercurial ui object from read options |
|
204 | and make an mercurial ui object from read options | |
193 |
|
205 | |||
194 |
|
|
206 | :param path: path to mercurial config file | |
195 |
|
|
207 | :param checkpaths: check the path | |
196 |
|
|
208 | :param read_from: read from 'file' or 'db' | |
197 | """ |
|
209 | """ | |
198 |
|
210 | |||
199 | baseui = ui.ui() |
|
211 | baseui = ui.ui() | |
@@ -209,52 +221,52 b" def make_ui(read_from='file', path=None," | |||||
209 | for k, v in cfg.items(section): |
|
221 | for k, v in cfg.items(section): | |
210 | baseui.setconfig(section, k, v) |
|
222 | baseui.setconfig(section, k, v) | |
211 | log.debug('settings ui from file[%s]%s:%s', section, k, v) |
|
223 | log.debug('settings ui from file[%s]%s:%s', section, k, v) | |
212 |
if checkpaths:check_repo_dir(cfg.items('paths')) |
|
224 | if checkpaths:check_repo_dir(cfg.items('paths')) | |
213 |
|
225 | |||
214 |
|
226 | |||
215 | elif read_from == 'db': |
|
227 | elif read_from == 'db': | |
216 | hg_ui = get_hg_ui_cached() |
|
228 | hg_ui = get_hg_ui_cached() | |
217 | for ui_ in hg_ui: |
|
229 | for ui_ in hg_ui: | |
218 | if ui_.ui_active: |
|
230 | if ui_.ui_active: | |
219 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
231 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, ui_.ui_key, ui_.ui_value) | |
220 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
232 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) | |
221 |
|
233 | |||
222 |
|
234 | |||
223 | return baseui |
|
235 | return baseui | |
224 |
|
236 | |||
225 |
|
237 | |||
226 | def set_rhodecode_config(config): |
|
238 | def set_rhodecode_config(config): | |
227 | hgsettings = get_hg_settings() |
|
239 | hgsettings = get_hg_settings() | |
228 |
|
240 | |||
229 | for k, v in hgsettings.items(): |
|
241 | for k, v in hgsettings.items(): | |
230 | config[k] = v |
|
242 | config[k] = v | |
231 |
|
243 | |||
232 | def invalidate_cache(name, *args): |
|
244 | def invalidate_cache(name, *args): | |
233 | """Invalidates given name cache""" |
|
245 | """Invalidates given name cache""" | |
234 |
|
246 | |||
235 | from beaker.cache import region_invalidate |
|
247 | from beaker.cache import region_invalidate | |
236 | log.info('INVALIDATING CACHE FOR %s', name) |
|
248 | log.info('INVALIDATING CACHE FOR %s', name) | |
237 |
|
249 | |||
238 | """propagate our arguments to make sure invalidation works. First |
|
250 | """propagate our arguments to make sure invalidation works. First | |
239 | argument has to be the name of cached func name give to cache decorator |
|
251 | argument has to be the name of cached func name give to cache decorator | |
240 | without that the invalidation would not work""" |
|
252 | without that the invalidation would not work""" | |
241 | tmp = [name] |
|
253 | tmp = [name] | |
242 | tmp.extend(args) |
|
254 | tmp.extend(args) | |
243 | args = tuple(tmp) |
|
255 | args = tuple(tmp) | |
244 |
|
256 | |||
245 | if name == 'cached_repo_list': |
|
257 | if name == 'cached_repo_list': | |
246 | from rhodecode.model.hg_model import _get_repos_cached |
|
258 | from rhodecode.model.hg_model import _get_repos_cached | |
247 | region_invalidate(_get_repos_cached, None, *args) |
|
259 | region_invalidate(_get_repos_cached, None, *args) | |
248 |
|
260 | |||
249 | if name == 'full_changelog': |
|
261 | if name == 'full_changelog': | |
250 | from rhodecode.model.hg_model import _full_changelog_cached |
|
262 | from rhodecode.model.hg_model import _full_changelog_cached | |
251 | region_invalidate(_full_changelog_cached, None, *args) |
|
263 | region_invalidate(_full_changelog_cached, None, *args) | |
252 |
|
264 | |||
253 | class EmptyChangeset(BaseChangeset): |
|
265 | class EmptyChangeset(BaseChangeset): | |
254 | """ |
|
266 | """ | |
255 | An dummy empty changeset. |
|
267 | An dummy empty changeset. | |
256 | """ |
|
268 | """ | |
257 |
|
269 | |||
258 | revision = -1 |
|
270 | revision = -1 | |
259 | message = '' |
|
271 | message = '' | |
260 | author = '' |
|
272 | author = '' | |
@@ -266,35 +278,35 b' class EmptyChangeset(BaseChangeset):' | |||||
266 | representation. |
|
278 | representation. | |
267 | """ |
|
279 | """ | |
268 | return '0' * 40 |
|
280 | return '0' * 40 | |
269 |
|
281 | |||
270 | @LazyProperty |
|
282 | @LazyProperty | |
271 | def short_id(self): |
|
283 | def short_id(self): | |
272 | return self.raw_id[:12] |
|
284 | return self.raw_id[:12] | |
273 |
|
285 | |||
274 | def get_file_changeset(self, path): |
|
286 | def get_file_changeset(self, path): | |
275 | return self |
|
287 | return self | |
276 |
|
288 | |||
277 | def get_file_content(self, path): |
|
289 | def get_file_content(self, path): | |
278 | return u'' |
|
290 | return u'' | |
279 |
|
291 | |||
280 | def get_file_size(self, path): |
|
292 | def get_file_size(self, path): | |
281 | return 0 |
|
293 | return 0 | |
282 |
|
294 | |||
283 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
295 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): | |
284 | """ |
|
296 | """ | |
285 | maps all found repositories into db |
|
297 | maps all found repositories into db | |
286 | """ |
|
298 | """ | |
287 | from rhodecode.model.repo_model import RepoModel |
|
299 | from rhodecode.model.repo_model import RepoModel | |
288 |
|
300 | |||
289 | sa = meta.Session |
|
301 | sa = meta.Session | |
290 | user = sa.query(User).filter(User.admin == True).first() |
|
302 | user = sa.query(User).filter(User.admin == True).first() | |
291 |
|
303 | |||
292 | rm = RepoModel() |
|
304 | rm = RepoModel() | |
293 |
|
305 | |||
294 | for name, repo in initial_repo_list.items(): |
|
306 | for name, repo in initial_repo_list.items(): | |
295 | if not sa.query(Repository).filter(Repository.repo_name == name).scalar(): |
|
307 | if not sa.query(Repository).filter(Repository.repo_name == name).scalar(): | |
296 | log.info('repository %s not found creating default', name) |
|
308 | log.info('repository %s not found creating default', name) | |
297 |
|
309 | |||
298 | form_data = { |
|
310 | form_data = { | |
299 | 'repo_name':name, |
|
311 | 'repo_name':name, | |
300 | 'description':repo.description if repo.description != 'unknown' else \ |
|
312 | 'description':repo.description if repo.description != 'unknown' else \ | |
@@ -311,7 +323,7 b' def repo2db_mapper(initial_repo_list, re' | |||||
311 | sa.delete(repo) |
|
323 | sa.delete(repo) | |
312 | sa.commit() |
|
324 | sa.commit() | |
313 |
|
325 | |||
314 |
|
326 | |||
315 | meta.Session.remove() |
|
327 | meta.Session.remove() | |
316 |
|
328 | |||
317 | from UserDict import DictMixin |
|
329 | from UserDict import DictMixin | |
@@ -421,25 +433,25 b' class OrderedDict(dict, DictMixin):' | |||||
421 | #=============================================================================== |
|
433 | #=============================================================================== | |
422 | def create_test_index(repo_location, full_index): |
|
434 | def create_test_index(repo_location, full_index): | |
423 | """Makes default test index |
|
435 | """Makes default test index | |
424 |
|
|
436 | :param repo_location: | |
425 |
|
|
437 | :param full_index: | |
426 | """ |
|
438 | """ | |
427 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
439 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon | |
428 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
440 | from rhodecode.lib.pidlock import DaemonLock, LockHeld | |
429 | from rhodecode.lib.indexers import IDX_LOCATION |
|
441 | from rhodecode.lib.indexers import IDX_LOCATION | |
430 | import shutil |
|
442 | import shutil | |
431 |
|
443 | |||
432 | if os.path.exists(IDX_LOCATION): |
|
444 | if os.path.exists(IDX_LOCATION): | |
433 | shutil.rmtree(IDX_LOCATION) |
|
445 | shutil.rmtree(IDX_LOCATION) | |
434 |
|
446 | |||
435 | try: |
|
447 | try: | |
436 | l = DaemonLock() |
|
448 | l = DaemonLock() | |
437 | WhooshIndexingDaemon(repo_location=repo_location)\ |
|
449 | WhooshIndexingDaemon(repo_location=repo_location)\ | |
438 | .run(full_index=full_index) |
|
450 | .run(full_index=full_index) | |
439 | l.release() |
|
451 | l.release() | |
440 | except LockHeld: |
|
452 | except LockHeld: | |
441 |
pass |
|
453 | pass | |
442 |
|
454 | |||
443 | def create_test_env(repos_test_path, config): |
|
455 | def create_test_env(repos_test_path, config): | |
444 | """Makes a fresh database and |
|
456 | """Makes a fresh database and | |
445 | install test repository into tmp dir |
|
457 | install test repository into tmp dir | |
@@ -448,7 +460,7 b' def create_test_env(repos_test_path, con' | |||||
448 | import tarfile |
|
460 | import tarfile | |
449 | import shutil |
|
461 | import shutil | |
450 | from os.path import dirname as dn, join as jn, abspath |
|
462 | from os.path import dirname as dn, join as jn, abspath | |
451 |
|
463 | |||
452 | log = logging.getLogger('TestEnvCreator') |
|
464 | log = logging.getLogger('TestEnvCreator') | |
453 | # create logger |
|
465 | # create logger | |
454 | log.setLevel(logging.DEBUG) |
|
466 | log.setLevel(logging.DEBUG) | |
@@ -456,20 +468,20 b' def create_test_env(repos_test_path, con' | |||||
456 | # create console handler and set level to debug |
|
468 | # create console handler and set level to debug | |
457 | ch = logging.StreamHandler() |
|
469 | ch = logging.StreamHandler() | |
458 | ch.setLevel(logging.DEBUG) |
|
470 | ch.setLevel(logging.DEBUG) | |
459 |
|
471 | |||
460 | # create formatter |
|
472 | # create formatter | |
461 | formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") |
|
473 | formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") | |
462 |
|
474 | |||
463 | # add formatter to ch |
|
475 | # add formatter to ch | |
464 | ch.setFormatter(formatter) |
|
476 | ch.setFormatter(formatter) | |
465 |
|
477 | |||
466 | # add ch to logger |
|
478 | # add ch to logger | |
467 | log.addHandler(ch) |
|
479 | log.addHandler(ch) | |
468 |
|
480 | |||
469 | #PART ONE create db |
|
481 | #PART ONE create db | |
470 | dbname = config['sqlalchemy.db1.url'].split('/')[-1] |
|
482 | dbname = config['sqlalchemy.db1.url'].split('/')[-1] | |
471 | log.debug('making test db %s', dbname) |
|
483 | log.debug('making test db %s', dbname) | |
472 |
|
484 | |||
473 | dbmanage = DbManage(log_sql=True, dbname=dbname, root=config['here'], |
|
485 | dbmanage = DbManage(log_sql=True, dbname=dbname, root=config['here'], | |
474 | tests=True) |
|
486 | tests=True) | |
475 | dbmanage.create_tables(override=True) |
|
487 | dbmanage.create_tables(override=True) | |
@@ -478,12 +490,12 b' def create_test_env(repos_test_path, con' | |||||
478 | dbmanage.admin_prompt() |
|
490 | dbmanage.admin_prompt() | |
479 | dbmanage.create_permissions() |
|
491 | dbmanage.create_permissions() | |
480 | dbmanage.populate_default_permissions() |
|
492 | dbmanage.populate_default_permissions() | |
481 |
|
493 | |||
482 | #PART TWO make test repo |
|
494 | #PART TWO make test repo | |
483 | log.debug('making test vcs repo') |
|
495 | log.debug('making test vcs repo') | |
484 | if os.path.isdir('/tmp/vcs_test'): |
|
496 | if os.path.isdir('/tmp/vcs_test'): | |
485 | shutil.rmtree('/tmp/vcs_test') |
|
497 | shutil.rmtree('/tmp/vcs_test') | |
486 |
|
498 | |||
487 | cur_dir = dn(dn(abspath(__file__))) |
|
499 | cur_dir = dn(dn(abspath(__file__))) | |
488 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz")) |
|
500 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test.tar.gz")) | |
489 | tar.extractall('/tmp') |
|
501 | tar.extractall('/tmp') |
General Comments 0
You need to be logged in to leave comments.
Login now