##// END OF EJS Templates
integrations: add integration support...
dan -
r411:df8dc98d default
parent child Browse files
Show More
@@ -0,0 +1,52 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22 from rhodecode.integrations.registry import IntegrationTypeRegistry
23 from rhodecode.integrations.types import slack
24
25 log = logging.getLogger(__name__)
26
27
28 # TODO: dan: This is currently global until we figure out what to do about
29 # VCS's not having a pyramid context - move it to pyramid app configuration
30 # includeme level later to allow per instance integration setup
31 integration_type_registry = IntegrationTypeRegistry()
32 integration_type_registry.register_integration_type(slack.SlackIntegrationType)
33
34 def integrations_event_handler(event):
35 """
36 Takes an event and passes it to all enabled integrations
37 """
38 from rhodecode.model.integration import IntegrationModel
39
40 integration_model = IntegrationModel()
41 integrations = integration_model.get_for_event(event)
42 for integration in integrations:
43 try:
44 integration_model.send_event(integration, event)
45 except Exception:
46 log.exception(
47 'failure occured when sending event %s to integration %s' % (
48 event, integration))
49
50
51 def includeme(config):
52 config.include('rhodecode.integrations.routes')
@@ -0,0 +1,37 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2012-2016 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 import logging
21
22 log = logging.getLogger()
23
24
25 class IntegrationTypeRegistry(dict):
26 """
27 Registry Class to hold IntegrationTypes
28 """
29 def register_integration_type(self, IntegrationType):
30 key = IntegrationType.key
31 if key in self:
32 log.warning(
33 'Overriding existing integration type %s (%s) with %s' % (
34 self[key], key, IntegrationType))
35
36 self[key] = IntegrationType
37
@@ -0,0 +1,133 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22
23 from rhodecode.model.db import Repository, Integration
24 from rhodecode.config.routing import (
25 ADMIN_PREFIX, add_route_requirements, URL_NAME_REQUIREMENTS)
26 from rhodecode.integrations import integration_type_registry
27
28 log = logging.getLogger(__name__)
29
30
31 def includeme(config):
32 config.add_route('global_integrations_home',
33 ADMIN_PREFIX + '/integrations')
34 config.add_route('global_integrations_list',
35 ADMIN_PREFIX + '/integrations/{integration}')
36 for route_name in ['global_integrations_home', 'global_integrations_list']:
37 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
38 attr='index',
39 renderer='rhodecode:templates/admin/integrations/list.html',
40 request_method='GET',
41 route_name=route_name)
42
43 config.add_route('global_integrations_create',
44 ADMIN_PREFIX + '/integrations/{integration}/new',
45 custom_predicates=(valid_integration,))
46 config.add_route('global_integrations_edit',
47 ADMIN_PREFIX + '/integrations/{integration}/{integration_id}',
48 custom_predicates=(valid_integration,))
49 for route_name in ['global_integrations_create', 'global_integrations_edit']:
50 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
51 attr='settings_get',
52 renderer='rhodecode:templates/admin/integrations/edit.html',
53 request_method='GET',
54 route_name=route_name)
55 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
56 attr='settings_post',
57 renderer='rhodecode:templates/admin/integrations/edit.html',
58 request_method='POST',
59 route_name=route_name)
60
61 config.add_route('repo_integrations_home',
62 add_route_requirements(
63 '{repo_name}/settings/integrations',
64 URL_NAME_REQUIREMENTS
65 ),
66 custom_predicates=(valid_repo,))
67 config.add_route('repo_integrations_list',
68 add_route_requirements(
69 '{repo_name}/settings/integrations/{integration}',
70 URL_NAME_REQUIREMENTS
71 ),
72 custom_predicates=(valid_repo, valid_integration))
73 for route_name in ['repo_integrations_home', 'repo_integrations_list']:
74 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
75 attr='index',
76 request_method='GET',
77 route_name=route_name)
78
79 config.add_route('repo_integrations_create',
80 add_route_requirements(
81 '{repo_name}/settings/integrations/{integration}/new',
82 URL_NAME_REQUIREMENTS
83 ),
84 custom_predicates=(valid_repo, valid_integration))
85 config.add_route('repo_integrations_edit',
86 add_route_requirements(
87 '{repo_name}/settings/integrations/{integration}/{integration_id}',
88 URL_NAME_REQUIREMENTS
89 ),
90 custom_predicates=(valid_repo, valid_integration))
91 for route_name in ['repo_integrations_edit', 'repo_integrations_create']:
92 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
93 attr='settings_get',
94 renderer='rhodecode:templates/admin/integrations/edit.html',
95 request_method='GET',
96 route_name=route_name)
97 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
98 attr='settings_post',
99 renderer='rhodecode:templates/admin/integrations/edit.html',
100 request_method='POST',
101 route_name=route_name)
102
103
104 def valid_repo(info, request):
105 repo = Repository.get_by_repo_name(info['match']['repo_name'])
106 if repo:
107 return True
108
109
110 def valid_integration(info, request):
111 integration_type = info['match']['integration']
112 integration_id = info['match'].get('integration_id')
113 repo_name = info['match'].get('repo_name')
114
115 if integration_type not in integration_type_registry:
116 return False
117
118 repo = None
119 if repo_name:
120 repo = Repository.get_by_repo_name(info['match']['repo_name'])
121 if not repo:
122 return False
123
124 if integration_id:
125 integration = Integration.get(integration_id)
126 if not integration:
127 return False
128 if integration.integration_type != integration_type:
129 return False
130 if repo and repo.repo_id != integration.repo_id:
131 return False
132
133 return True
@@ -0,0 +1,48 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22
23 from rhodecode.translation import lazy_ugettext
24
25
26 class IntegrationSettingsSchemaBase(colander.MappingSchema):
27 """
28 This base schema is intended for use in integrations.
29 It adds a few default settings (e.g., "enabled"), so that integration
30 authors don't have to maintain a bunch of boilerplate.
31 """
32 enabled = colander.SchemaNode(
33 colander.Bool(),
34 default=True,
35 description=lazy_ugettext('Enable or disable this integration.'),
36 missing=False,
37 title=lazy_ugettext('Enabled'),
38 widget='bool',
39 )
40
41 name = colander.SchemaNode(
42 colander.String(),
43 description=lazy_ugettext('Short name for this integration.'),
44 missing=colander.required,
45 title=lazy_ugettext('Integration name'),
46 widget='string',
47 )
48
@@ -0,0 +1,19 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,43 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 from rhodecode.integrations.schema import IntegrationSettingsSchemaBase
22
23
24 class IntegrationTypeBase(object):
25 """ Base class for IntegrationType plugins """
26
27 def __init__(self, settings):
28 """
29 :param settings: dict of settings to be used for the integration
30 """
31 self.settings = settings
32
33
34 @classmethod
35 def settings_schema(cls):
36 """
37 A colander schema of settings for the integration type
38
39 Subclasses can return their own schema but should always
40 inherit from IntegrationSettingsSchemaBase
41 """
42 return IntegrationSettingsSchemaBase()
43
@@ -0,0 +1,199 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 from __future__ import unicode_literals
22
23 import re
24 import logging
25 import requests
26 import colander
27 from celery.task import task
28 from mako.template import Template
29
30 from rhodecode import events
31 from rhodecode.translation import lazy_ugettext
32 from rhodecode.lib import helpers as h
33 from rhodecode.lib.celerylib import run_task
34 from rhodecode.lib.colander_utils import strip_whitespace
35 from rhodecode.integrations.types.base import IntegrationTypeBase
36 from rhodecode.integrations.schema import IntegrationSettingsSchemaBase
37
38 log = logging.getLogger()
39
40
41 class SlackSettingsSchema(IntegrationSettingsSchemaBase):
42 service = colander.SchemaNode(
43 colander.String(),
44 title=lazy_ugettext('Slack service URL'),
45 description=h.literal(lazy_ugettext(
46 'This can be setup at the '
47 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
48 'slack app manager</a>')),
49 default='',
50 placeholder='https://hooks.slack.com/services/...',
51 preparer=strip_whitespace,
52 validator=colander.url,
53 widget='string'
54 )
55 username = colander.SchemaNode(
56 colander.String(),
57 title=lazy_ugettext('Username'),
58 description=lazy_ugettext('Username to show notifications coming from.'),
59 missing='Rhodecode',
60 preparer=strip_whitespace,
61 widget='string',
62 placeholder='Rhodecode'
63 )
64 channel = colander.SchemaNode(
65 colander.String(),
66 title=lazy_ugettext('Channel'),
67 description=lazy_ugettext('Channel to send notifications to.'),
68 missing='',
69 preparer=strip_whitespace,
70 widget='string',
71 placeholder='#general'
72 )
73 icon_emoji = colander.SchemaNode(
74 colander.String(),
75 title=lazy_ugettext('Emoji'),
76 description=lazy_ugettext('Emoji to use eg. :studio_microphone:'),
77 missing='',
78 preparer=strip_whitespace,
79 widget='string',
80 placeholder=':studio_microphone:'
81 )
82
83
84 repo_push_template = Template(r'''
85 *${data['actor']['username']}* pushed to \
86 %if data['push']['branches']:
87 ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} \
88 ${', '.join('<%s|%s>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} \
89 %else:
90 unknown branch \
91 %endif
92 in <${data['repo']['url']}|${data['repo']['repo_name']}>
93 >>>
94 %for commit in data['push']['commits']:
95 <${commit['url']}|${commit['short_id']}> - ${commit['message_html']|html_to_slack_links}
96 %endfor
97 ''')
98
99
100 class SlackIntegrationType(IntegrationTypeBase):
101 key = 'slack'
102 display_name = lazy_ugettext('Slack')
103 SettingsSchema = SlackSettingsSchema
104 valid_events = [
105 events.PullRequestCloseEvent,
106 events.PullRequestMergeEvent,
107 events.PullRequestUpdateEvent,
108 events.PullRequestReviewEvent,
109 events.PullRequestCreateEvent,
110 events.RepoPushEvent,
111 events.RepoCreateEvent,
112 ]
113
114 def send_event(self, event):
115 if event.__class__ not in self.valid_events:
116 log.debug('event not valid: %r' % event)
117 return
118
119 if event.name not in self.settings['events']:
120 log.debug('event ignored: %r' % event)
121 return
122
123 data = event.as_dict()
124
125 text = '*%s* caused a *%s* event' % (
126 data['actor']['username'], event.name)
127
128 if isinstance(event, events.PullRequestEvent):
129 text = self.format_pull_request_event(event, data)
130 elif isinstance(event, events.RepoPushEvent):
131 text = self.format_repo_push_event(data)
132 elif isinstance(event, events.RepoCreateEvent):
133 text = self.format_repo_create_event(data)
134 else:
135 log.error('unhandled event type: %r' % event)
136
137 run_task(post_text_to_slack, self.settings, text)
138
139 @classmethod
140 def settings_schema(cls):
141 schema = SlackSettingsSchema()
142 schema.add(colander.SchemaNode(
143 colander.Set(),
144 widget='checkbox_list',
145 choices=sorted([e.name for e in cls.valid_events]),
146 description="Events activated for this integration",
147 default=[e.name for e in cls.valid_events],
148 name='events'
149 ))
150 return schema
151
152 def format_pull_request_event(self, event, data):
153 action = {
154 events.PullRequestCloseEvent: 'closed',
155 events.PullRequestMergeEvent: 'merged',
156 events.PullRequestUpdateEvent: 'updated',
157 events.PullRequestReviewEvent: 'reviewed',
158 events.PullRequestCreateEvent: 'created',
159 }.get(event.__class__, '<unknown action>')
160
161 return ('Pull request <{url}|#{number}> ({title}) '
162 '{action} by {user}').format(
163 user=data['actor']['username'],
164 number=data['pullrequest']['pull_request_id'],
165 url=data['pullrequest']['url'],
166 title=data['pullrequest']['title'],
167 action=action
168 )
169
170 def format_repo_push_event(self, data):
171 result = repo_push_template.render(
172 data=data,
173 html_to_slack_links=html_to_slack_links,
174 )
175 return result
176
177 def format_repo_create_msg(self, data):
178 return '<{}|{}> ({}) repository created by *{}*'.format(
179 data['repo']['url'],
180 data['repo']['repo_name'],
181 data['repo']['repo_type'],
182 data['actor']['username'],
183 )
184
185
186 def html_to_slack_links(message):
187 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
188 r'<\1|\2>', message)
189
190
191 @task(ignore_result=True)
192 def post_text_to_slack(settings, text):
193 resp = requests.post(settings['service'], json={
194 "channel": settings.get('channel', ''),
195 "username": settings.get('username', 'Rhodecode'),
196 "text": text,
197 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:')
198 })
199 resp.raise_for_status() # raise exception on a failed request
@@ -0,0 +1,257 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22 import logging
23 import pylons
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 from pyramid.renderers import render
27 from pyramid.response import Response
28
29 from rhodecode.lib import auth
30 from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
31 from rhodecode.model.db import Repository, Session, Integration
32 from rhodecode.model.scm import ScmModel
33 from rhodecode.model.integration import IntegrationModel
34 from rhodecode.admin.navigation import navigation_list
35 from rhodecode.translation import _
36 from rhodecode.integrations import integration_type_registry
37
38 log = logging.getLogger(__name__)
39
40
41 class IntegrationSettingsViewBase(object):
42 """ Base Integration settings view used by both repo / global settings """
43
44 def __init__(self, context, request):
45 self.context = context
46 self.request = request
47 self._load_general_context()
48
49 if not self.perm_check(request.user):
50 raise HTTPForbidden()
51
52 def _load_general_context(self):
53 """
54 This avoids boilerplate for repo/global+list/edit+views/templates
55 by doing all possible contexts at the same time however it should
56 be split up into separate functions once more "contexts" exist
57 """
58
59 self.IntegrationType = None
60 self.repo = None
61 self.integration = None
62 self.integrations = {}
63
64 request = self.request
65
66 if 'repo_name' in request.matchdict: # we're in a repo context
67 repo_name = request.matchdict['repo_name']
68 self.repo = Repository.get_by_repo_name(repo_name)
69
70 if 'integration' in request.matchdict: # we're in integration context
71 integration_type = request.matchdict['integration']
72 self.IntegrationType = integration_type_registry[integration_type]
73
74 if 'integration_id' in request.matchdict: # single integration context
75 integration_id = request.matchdict['integration_id']
76 self.integration = Integration.get(integration_id)
77 else: # list integrations context
78 for integration in IntegrationModel().get_integrations(self.repo):
79 self.integrations.setdefault(integration.integration_type, []
80 ).append(integration)
81
82 self.settings = self.integration and self.integration.settings or {}
83
84 def _template_c_context(self):
85 # TODO: dan: this is a stopgap in order to inherit from current pylons
86 # based admin/repo settings templates - this should be removed entirely
87 # after port to pyramid
88
89 c = pylons.tmpl_context
90 c.active = 'integrations'
91 c.rhodecode_user = self.request.user
92 c.repo = self.repo
93 c.repo_name = self.repo and self.repo.repo_name or None
94 if self.repo:
95 c.repo_info = self.repo
96 c.rhodecode_db_repo = self.repo
97 c.repository_pull_requests = ScmModel().get_pull_requests(self.repo)
98 else:
99 c.navlist = navigation_list(self.request)
100
101 return c
102
103 def _form_schema(self):
104 return self.IntegrationType.settings_schema()
105
106 def settings_get(self, defaults=None, errors=None):
107 """
108 View that displays the plugin settings as a form.
109 """
110 defaults = defaults or {}
111 errors = errors or {}
112
113 schema = self._form_schema()
114
115 if not defaults:
116 if self.integration:
117 defaults['enabled'] = self.integration.enabled
118 defaults['name'] = self.integration.name
119 else:
120 if self.repo:
121 scope = self.repo.repo_name
122 else:
123 scope = _('Global')
124
125 defaults['name'] = '{} {} integration'.format(scope,
126 self.IntegrationType.display_name)
127 defaults['enabled'] = True
128
129 for node in schema:
130 setting = self.settings.get(node.name)
131 if setting is not None:
132 defaults.setdefault(node.name, setting)
133 else:
134 if node.default:
135 defaults.setdefault(node.name, node.default)
136
137 template_context = {
138 'defaults': defaults,
139 'errors': errors,
140 'schema': schema,
141 'current_IntegrationType': self.IntegrationType,
142 'integration': self.integration,
143 'settings': self.settings,
144 'resource': self.context,
145 'c': self._template_c_context(),
146 }
147
148 return template_context
149
150 @auth.CSRFRequired()
151 def settings_post(self):
152 """
153 View that validates and stores the plugin settings.
154 """
155 if self.request.params.get('delete'):
156 Session().delete(self.integration)
157 Session().commit()
158 self.request.session.flash(
159 _('Integration {integration_name} deleted successfully.').format(
160 integration_name=self.integration.name),
161 queue='success')
162 if self.repo:
163 redirect_to = self.request.route_url(
164 'repo_integrations_home', repo_name=self.repo.repo_name)
165 else:
166 redirect_to = self.request.route_url('global_integrations_home')
167 raise HTTPFound(redirect_to)
168
169 schema = self._form_schema()
170
171 params = {}
172 for node in schema.children:
173 if type(node.typ) in (colander.Set, colander.List):
174 val = self.request.params.getall(node.name)
175 else:
176 val = self.request.params.get(node.name)
177 if val:
178 params[node.name] = val
179
180 try:
181 valid_data = schema.deserialize(params)
182 except colander.Invalid, e:
183 # Display error message and display form again.
184 self.request.session.flash(
185 _('Errors exist when saving plugin settings. '
186 'Please check the form inputs.'),
187 queue='error')
188 return self.settings_get(errors=e.asdict(), defaults=params)
189
190 if not self.integration:
191 self.integration = Integration(
192 integration_type=self.IntegrationType.key)
193 if self.repo:
194 self.integration.repo = self.repo
195 Session.add(self.integration)
196
197 self.integration.enabled = valid_data.pop('enabled', False)
198 self.integration.name = valid_data.pop('name')
199 self.integration.settings = valid_data
200
201 Session.commit()
202
203 # Display success message and redirect.
204 self.request.session.flash(
205 _('Integration {integration_name} updated successfully.').format(
206 integration_name=self.IntegrationType.display_name,
207 queue='success'))
208 if self.repo:
209 redirect_to = self.request.route_url(
210 'repo_integrations_edit', repo_name=self.repo.repo_name,
211 integration=self.integration.integration_type,
212 integration_id=self.integration.integration_id)
213 else:
214 redirect_to = self.request.route_url(
215 'global_integrations_edit',
216 integration=self.integration.integration_type,
217 integration_id=self.integration.integration_id)
218
219 return HTTPFound(redirect_to)
220
221 def index(self):
222 current_integrations = self.integrations
223 if self.IntegrationType:
224 current_integrations = {
225 self.IntegrationType.key: self.integrations.get(
226 self.IntegrationType.key, [])
227 }
228
229 template_context = {
230 'current_IntegrationType': self.IntegrationType,
231 'current_integrations': current_integrations,
232 'current_integration': 'none',
233 'available_integrations': integration_type_registry,
234 'c': self._template_c_context()
235 }
236
237 if self.repo:
238 html = render('rhodecode:templates/admin/integrations/list.html',
239 template_context,
240 request=self.request)
241 else:
242 html = render('rhodecode:templates/admin/integrations/list.html',
243 template_context,
244 request=self.request)
245
246 return Response(html)
247
248
249 class GlobalIntegrationsView(IntegrationSettingsViewBase):
250 def perm_check(self, user):
251 return auth.HasPermissionAll('hg.admin').check_permissions(user=user)
252
253
254 class RepoIntegrationsView(IntegrationSettingsViewBase):
255 def perm_check(self, user):
256 return auth.HasRepoPermissionAll('repository.admin'
257 )(repo_name=self.repo.repo_name, user=user)
This diff has been collapsed as it changes many lines, (3516 lines changed) Show them Hide them
@@ -0,0 +1,3516 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 """
22 Database Models for RhodeCode Enterprise
23 """
24
25 import os
26 import sys
27 import time
28 import hashlib
29 import logging
30 import datetime
31 import warnings
32 import ipaddress
33 import functools
34 import traceback
35 import collections
36
37
38 from sqlalchemy import *
39 from sqlalchemy.exc import IntegrityError
40 from sqlalchemy.ext.declarative import declared_attr
41 from sqlalchemy.ext.hybrid import hybrid_property
42 from sqlalchemy.orm import (
43 relationship, joinedload, class_mapper, validates, aliased)
44 from sqlalchemy.sql.expression import true
45 from beaker.cache import cache_region, region_invalidate
46 from webob.exc import HTTPNotFound
47 from zope.cachedescriptors.property import Lazy as LazyProperty
48
49 from pylons import url
50 from pylons.i18n.translation import lazy_ugettext as _
51
52 from rhodecode.lib.vcs import get_backend
53 from rhodecode.lib.vcs.utils.helpers import get_scm
54 from rhodecode.lib.vcs.exceptions import VCSError
55 from rhodecode.lib.vcs.backends.base import (
56 EmptyCommit, Reference, MergeFailureReason)
57 from rhodecode.lib.utils2 import (
58 str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe,
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict)
60 from rhodecode.lib.ext_json import json
61 from rhodecode.lib.caching_query import FromCache
62 from rhodecode.lib.encrypt import AESCipher
63
64 from rhodecode.model.meta import Base, Session
65
66 URL_SEP = '/'
67 log = logging.getLogger(__name__)
68
69 # =============================================================================
70 # BASE CLASSES
71 # =============================================================================
72
73 # this is propagated from .ini file rhodecode.encrypted_values.secret or
74 # beaker.session.secret if first is not set.
75 # and initialized at environment.py
76 ENCRYPTION_KEY = None
77
78 # used to sort permissions by types, '#' used here is not allowed to be in
79 # usernames, and it's very early in sorted string.printable table.
80 PERMISSION_TYPE_SORT = {
81 'admin': '####',
82 'write': '###',
83 'read': '##',
84 'none': '#',
85 }
86
87
88 def display_sort(obj):
89 """
90 Sort function used to sort permissions in .permissions() function of
91 Repository, RepoGroup, UserGroup. Also it put the default user in front
92 of all other resources
93 """
94
95 if obj.username == User.DEFAULT_USER:
96 return '#####'
97 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
98 return prefix + obj.username
99
100
101 def _hash_key(k):
102 return md5_safe(k)
103
104
105 class EncryptedTextValue(TypeDecorator):
106 """
107 Special column for encrypted long text data, use like::
108
109 value = Column("encrypted_value", EncryptedValue(), nullable=False)
110
111 This column is intelligent so if value is in unencrypted form it return
112 unencrypted form, but on save it always encrypts
113 """
114 impl = Text
115
116 def process_bind_param(self, value, dialect):
117 if not value:
118 return value
119 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
120 # protect against double encrypting if someone manually starts
121 # doing
122 raise ValueError('value needs to be in unencrypted format, ie. '
123 'not starting with enc$aes')
124 return 'enc$aes_hmac$%s' % AESCipher(
125 ENCRYPTION_KEY, hmac=True).encrypt(value)
126
127 def process_result_value(self, value, dialect):
128 import rhodecode
129
130 if not value:
131 return value
132
133 parts = value.split('$', 3)
134 if not len(parts) == 3:
135 # probably not encrypted values
136 return value
137 else:
138 if parts[0] != 'enc':
139 # parts ok but without our header ?
140 return value
141 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
142 'rhodecode.encrypted_values.strict') or True)
143 # at that stage we know it's our encryption
144 if parts[1] == 'aes':
145 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
146 elif parts[1] == 'aes_hmac':
147 decrypted_data = AESCipher(
148 ENCRYPTION_KEY, hmac=True,
149 strict_verification=enc_strict_mode).decrypt(parts[2])
150 else:
151 raise ValueError(
152 'Encryption type part is wrong, must be `aes` '
153 'or `aes_hmac`, got `%s` instead' % (parts[1]))
154 return decrypted_data
155
156
157 class BaseModel(object):
158 """
159 Base Model for all classes
160 """
161
162 @classmethod
163 def _get_keys(cls):
164 """return column names for this model """
165 return class_mapper(cls).c.keys()
166
167 def get_dict(self):
168 """
169 return dict with keys and values corresponding
170 to this model data """
171
172 d = {}
173 for k in self._get_keys():
174 d[k] = getattr(self, k)
175
176 # also use __json__() if present to get additional fields
177 _json_attr = getattr(self, '__json__', None)
178 if _json_attr:
179 # update with attributes from __json__
180 if callable(_json_attr):
181 _json_attr = _json_attr()
182 for k, val in _json_attr.iteritems():
183 d[k] = val
184 return d
185
186 def get_appstruct(self):
187 """return list with keys and values tuples corresponding
188 to this model data """
189
190 l = []
191 for k in self._get_keys():
192 l.append((k, getattr(self, k),))
193 return l
194
195 def populate_obj(self, populate_dict):
196 """populate model with data from given populate_dict"""
197
198 for k in self._get_keys():
199 if k in populate_dict:
200 setattr(self, k, populate_dict[k])
201
202 @classmethod
203 def query(cls):
204 return Session().query(cls)
205
206 @classmethod
207 def get(cls, id_):
208 if id_:
209 return cls.query().get(id_)
210
211 @classmethod
212 def get_or_404(cls, id_):
213 try:
214 id_ = int(id_)
215 except (TypeError, ValueError):
216 raise HTTPNotFound
217
218 res = cls.query().get(id_)
219 if not res:
220 raise HTTPNotFound
221 return res
222
223 @classmethod
224 def getAll(cls):
225 # deprecated and left for backward compatibility
226 return cls.get_all()
227
228 @classmethod
229 def get_all(cls):
230 return cls.query().all()
231
232 @classmethod
233 def delete(cls, id_):
234 obj = cls.query().get(id_)
235 Session().delete(obj)
236
237 @classmethod
238 def identity_cache(cls, session, attr_name, value):
239 exist_in_session = []
240 for (item_cls, pkey), instance in session.identity_map.items():
241 if cls == item_cls and getattr(instance, attr_name) == value:
242 exist_in_session.append(instance)
243 if exist_in_session:
244 if len(exist_in_session) == 1:
245 return exist_in_session[0]
246 log.exception(
247 'multiple objects with attr %s and '
248 'value %s found with same name: %r',
249 attr_name, value, exist_in_session)
250
251 def __repr__(self):
252 if hasattr(self, '__unicode__'):
253 # python repr needs to return str
254 try:
255 return safe_str(self.__unicode__())
256 except UnicodeDecodeError:
257 pass
258 return '<DB:%s>' % (self.__class__.__name__)
259
260
261 class RhodeCodeSetting(Base, BaseModel):
262 __tablename__ = 'rhodecode_settings'
263 __table_args__ = (
264 UniqueConstraint('app_settings_name'),
265 {'extend_existing': True, 'mysql_engine': 'InnoDB',
266 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
267 )
268
269 SETTINGS_TYPES = {
270 'str': safe_str,
271 'int': safe_int,
272 'unicode': safe_unicode,
273 'bool': str2bool,
274 'list': functools.partial(aslist, sep=',')
275 }
276 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
277 GLOBAL_CONF_KEY = 'app_settings'
278
279 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
280 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
281 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
282 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
283
284 def __init__(self, key='', val='', type='unicode'):
285 self.app_settings_name = key
286 self.app_settings_type = type
287 self.app_settings_value = val
288
289 @validates('_app_settings_value')
290 def validate_settings_value(self, key, val):
291 assert type(val) == unicode
292 return val
293
294 @hybrid_property
295 def app_settings_value(self):
296 v = self._app_settings_value
297 _type = self.app_settings_type
298 if _type:
299 _type = self.app_settings_type.split('.')[0]
300 # decode the encrypted value
301 if 'encrypted' in self.app_settings_type:
302 cipher = EncryptedTextValue()
303 v = safe_unicode(cipher.process_result_value(v, None))
304
305 converter = self.SETTINGS_TYPES.get(_type) or \
306 self.SETTINGS_TYPES['unicode']
307 return converter(v)
308
309 @app_settings_value.setter
310 def app_settings_value(self, val):
311 """
312 Setter that will always make sure we use unicode in app_settings_value
313
314 :param val:
315 """
316 val = safe_unicode(val)
317 # encode the encrypted value
318 if 'encrypted' in self.app_settings_type:
319 cipher = EncryptedTextValue()
320 val = safe_unicode(cipher.process_bind_param(val, None))
321 self._app_settings_value = val
322
323 @hybrid_property
324 def app_settings_type(self):
325 return self._app_settings_type
326
327 @app_settings_type.setter
328 def app_settings_type(self, val):
329 if val.split('.')[0] not in self.SETTINGS_TYPES:
330 raise Exception('type must be one of %s got %s'
331 % (self.SETTINGS_TYPES.keys(), val))
332 self._app_settings_type = val
333
334 def __unicode__(self):
335 return u"<%s('%s:%s[%s]')>" % (
336 self.__class__.__name__,
337 self.app_settings_name, self.app_settings_value,
338 self.app_settings_type
339 )
340
341
342 class RhodeCodeUi(Base, BaseModel):
343 __tablename__ = 'rhodecode_ui'
344 __table_args__ = (
345 UniqueConstraint('ui_key'),
346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
348 )
349
350 HOOK_REPO_SIZE = 'changegroup.repo_size'
351 # HG
352 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
353 HOOK_PULL = 'outgoing.pull_logger'
354 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
355 HOOK_PUSH = 'changegroup.push_logger'
356
357 # TODO: johbo: Unify way how hooks are configured for git and hg,
358 # git part is currently hardcoded.
359
360 # SVN PATTERNS
361 SVN_BRANCH_ID = 'vcs_svn_branch'
362 SVN_TAG_ID = 'vcs_svn_tag'
363
364 ui_id = Column(
365 "ui_id", Integer(), nullable=False, unique=True, default=None,
366 primary_key=True)
367 ui_section = Column(
368 "ui_section", String(255), nullable=True, unique=None, default=None)
369 ui_key = Column(
370 "ui_key", String(255), nullable=True, unique=None, default=None)
371 ui_value = Column(
372 "ui_value", String(255), nullable=True, unique=None, default=None)
373 ui_active = Column(
374 "ui_active", Boolean(), nullable=True, unique=None, default=True)
375
376 def __repr__(self):
377 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
378 self.ui_key, self.ui_value)
379
380
381 class RepoRhodeCodeSetting(Base, BaseModel):
382 __tablename__ = 'repo_rhodecode_settings'
383 __table_args__ = (
384 UniqueConstraint(
385 'app_settings_name', 'repository_id',
386 name='uq_repo_rhodecode_setting_name_repo_id'),
387 {'extend_existing': True, 'mysql_engine': 'InnoDB',
388 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
389 )
390
391 repository_id = Column(
392 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
393 nullable=False)
394 app_settings_id = Column(
395 "app_settings_id", Integer(), nullable=False, unique=True,
396 default=None, primary_key=True)
397 app_settings_name = Column(
398 "app_settings_name", String(255), nullable=True, unique=None,
399 default=None)
400 _app_settings_value = Column(
401 "app_settings_value", String(4096), nullable=True, unique=None,
402 default=None)
403 _app_settings_type = Column(
404 "app_settings_type", String(255), nullable=True, unique=None,
405 default=None)
406
407 repository = relationship('Repository')
408
409 def __init__(self, repository_id, key='', val='', type='unicode'):
410 self.repository_id = repository_id
411 self.app_settings_name = key
412 self.app_settings_type = type
413 self.app_settings_value = val
414
415 @validates('_app_settings_value')
416 def validate_settings_value(self, key, val):
417 assert type(val) == unicode
418 return val
419
420 @hybrid_property
421 def app_settings_value(self):
422 v = self._app_settings_value
423 type_ = self.app_settings_type
424 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
425 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
426 return converter(v)
427
428 @app_settings_value.setter
429 def app_settings_value(self, val):
430 """
431 Setter that will always make sure we use unicode in app_settings_value
432
433 :param val:
434 """
435 self._app_settings_value = safe_unicode(val)
436
437 @hybrid_property
438 def app_settings_type(self):
439 return self._app_settings_type
440
441 @app_settings_type.setter
442 def app_settings_type(self, val):
443 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
444 if val not in SETTINGS_TYPES:
445 raise Exception('type must be one of %s got %s'
446 % (SETTINGS_TYPES.keys(), val))
447 self._app_settings_type = val
448
449 def __unicode__(self):
450 return u"<%s('%s:%s:%s[%s]')>" % (
451 self.__class__.__name__, self.repository.repo_name,
452 self.app_settings_name, self.app_settings_value,
453 self.app_settings_type
454 )
455
456
457 class RepoRhodeCodeUi(Base, BaseModel):
458 __tablename__ = 'repo_rhodecode_ui'
459 __table_args__ = (
460 UniqueConstraint(
461 'repository_id', 'ui_section', 'ui_key',
462 name='uq_repo_rhodecode_ui_repository_id_section_key'),
463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
465 )
466
467 repository_id = Column(
468 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
469 nullable=False)
470 ui_id = Column(
471 "ui_id", Integer(), nullable=False, unique=True, default=None,
472 primary_key=True)
473 ui_section = Column(
474 "ui_section", String(255), nullable=True, unique=None, default=None)
475 ui_key = Column(
476 "ui_key", String(255), nullable=True, unique=None, default=None)
477 ui_value = Column(
478 "ui_value", String(255), nullable=True, unique=None, default=None)
479 ui_active = Column(
480 "ui_active", Boolean(), nullable=True, unique=None, default=True)
481
482 repository = relationship('Repository')
483
484 def __repr__(self):
485 return '<%s[%s:%s]%s=>%s]>' % (
486 self.__class__.__name__, self.repository.repo_name,
487 self.ui_section, self.ui_key, self.ui_value)
488
489
490 class User(Base, BaseModel):
491 __tablename__ = 'users'
492 __table_args__ = (
493 UniqueConstraint('username'), UniqueConstraint('email'),
494 Index('u_username_idx', 'username'),
495 Index('u_email_idx', 'email'),
496 {'extend_existing': True, 'mysql_engine': 'InnoDB',
497 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
498 )
499 DEFAULT_USER = 'default'
500 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
501 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
502
503 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
504 username = Column("username", String(255), nullable=True, unique=None, default=None)
505 password = Column("password", String(255), nullable=True, unique=None, default=None)
506 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
507 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
508 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
509 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
510 _email = Column("email", String(255), nullable=True, unique=None, default=None)
511 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
512 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
513 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
514 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
515 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
516 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
517 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
518
519 user_log = relationship('UserLog')
520 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
521
522 repositories = relationship('Repository')
523 repository_groups = relationship('RepoGroup')
524 user_groups = relationship('UserGroup')
525
526 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
527 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
528
529 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
530 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
531 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
532
533 group_member = relationship('UserGroupMember', cascade='all')
534
535 notifications = relationship('UserNotification', cascade='all')
536 # notifications assigned to this user
537 user_created_notifications = relationship('Notification', cascade='all')
538 # comments created by this user
539 user_comments = relationship('ChangesetComment', cascade='all')
540 # user profile extra info
541 user_emails = relationship('UserEmailMap', cascade='all')
542 user_ip_map = relationship('UserIpMap', cascade='all')
543 user_auth_tokens = relationship('UserApiKeys', cascade='all')
544 # gists
545 user_gists = relationship('Gist', cascade='all')
546 # user pull requests
547 user_pull_requests = relationship('PullRequest', cascade='all')
548 # external identities
549 extenal_identities = relationship(
550 'ExternalIdentity',
551 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
552 cascade='all')
553
554 def __unicode__(self):
555 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
556 self.user_id, self.username)
557
558 @hybrid_property
559 def email(self):
560 return self._email
561
562 @email.setter
563 def email(self, val):
564 self._email = val.lower() if val else None
565
566 @property
567 def firstname(self):
568 # alias for future
569 return self.name
570
571 @property
572 def emails(self):
573 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
574 return [self.email] + [x.email for x in other]
575
576 @property
577 def auth_tokens(self):
578 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
579
580 @property
581 def extra_auth_tokens(self):
582 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
583
584 @property
585 def feed_token(self):
586 feed_tokens = UserApiKeys.query()\
587 .filter(UserApiKeys.user == self)\
588 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
589 .all()
590 if feed_tokens:
591 return feed_tokens[0].api_key
592 else:
593 # use the main token so we don't end up with nothing...
594 return self.api_key
595
596 @classmethod
597 def extra_valid_auth_tokens(cls, user, role=None):
598 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
599 .filter(or_(UserApiKeys.expires == -1,
600 UserApiKeys.expires >= time.time()))
601 if role:
602 tokens = tokens.filter(or_(UserApiKeys.role == role,
603 UserApiKeys.role == UserApiKeys.ROLE_ALL))
604 return tokens.all()
605
606 @property
607 def ip_addresses(self):
608 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
609 return [x.ip_addr for x in ret]
610
611 @property
612 def username_and_name(self):
613 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
614
615 @property
616 def username_or_name_or_email(self):
617 full_name = self.full_name if self.full_name is not ' ' else None
618 return self.username or full_name or self.email
619
620 @property
621 def full_name(self):
622 return '%s %s' % (self.firstname, self.lastname)
623
624 @property
625 def full_name_or_username(self):
626 return ('%s %s' % (self.firstname, self.lastname)
627 if (self.firstname and self.lastname) else self.username)
628
629 @property
630 def full_contact(self):
631 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
632
633 @property
634 def short_contact(self):
635 return '%s %s' % (self.firstname, self.lastname)
636
637 @property
638 def is_admin(self):
639 return self.admin
640
641 @property
642 def AuthUser(self):
643 """
644 Returns instance of AuthUser for this user
645 """
646 from rhodecode.lib.auth import AuthUser
647 return AuthUser(user_id=self.user_id, api_key=self.api_key,
648 username=self.username)
649
650 @hybrid_property
651 def user_data(self):
652 if not self._user_data:
653 return {}
654
655 try:
656 return json.loads(self._user_data)
657 except TypeError:
658 return {}
659
660 @user_data.setter
661 def user_data(self, val):
662 if not isinstance(val, dict):
663 raise Exception('user_data must be dict, got %s' % type(val))
664 try:
665 self._user_data = json.dumps(val)
666 except Exception:
667 log.error(traceback.format_exc())
668
669 @classmethod
670 def get_by_username(cls, username, case_insensitive=False,
671 cache=False, identity_cache=False):
672 session = Session()
673
674 if case_insensitive:
675 q = cls.query().filter(
676 func.lower(cls.username) == func.lower(username))
677 else:
678 q = cls.query().filter(cls.username == username)
679
680 if cache:
681 if identity_cache:
682 val = cls.identity_cache(session, 'username', username)
683 if val:
684 return val
685 else:
686 q = q.options(
687 FromCache("sql_cache_short",
688 "get_user_by_name_%s" % _hash_key(username)))
689
690 return q.scalar()
691
692 @classmethod
693 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
694 q = cls.query().filter(cls.api_key == auth_token)
695
696 if cache:
697 q = q.options(FromCache("sql_cache_short",
698 "get_auth_token_%s" % auth_token))
699 res = q.scalar()
700
701 if fallback and not res:
702 #fallback to additional keys
703 _res = UserApiKeys.query()\
704 .filter(UserApiKeys.api_key == auth_token)\
705 .filter(or_(UserApiKeys.expires == -1,
706 UserApiKeys.expires >= time.time()))\
707 .first()
708 if _res:
709 res = _res.user
710 return res
711
712 @classmethod
713 def get_by_email(cls, email, case_insensitive=False, cache=False):
714
715 if case_insensitive:
716 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
717
718 else:
719 q = cls.query().filter(cls.email == email)
720
721 if cache:
722 q = q.options(FromCache("sql_cache_short",
723 "get_email_key_%s" % email))
724
725 ret = q.scalar()
726 if ret is None:
727 q = UserEmailMap.query()
728 # try fetching in alternate email map
729 if case_insensitive:
730 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
731 else:
732 q = q.filter(UserEmailMap.email == email)
733 q = q.options(joinedload(UserEmailMap.user))
734 if cache:
735 q = q.options(FromCache("sql_cache_short",
736 "get_email_map_key_%s" % email))
737 ret = getattr(q.scalar(), 'user', None)
738
739 return ret
740
741 @classmethod
742 def get_from_cs_author(cls, author):
743 """
744 Tries to get User objects out of commit author string
745
746 :param author:
747 """
748 from rhodecode.lib.helpers import email, author_name
749 # Valid email in the attribute passed, see if they're in the system
750 _email = email(author)
751 if _email:
752 user = cls.get_by_email(_email, case_insensitive=True)
753 if user:
754 return user
755 # Maybe we can match by username?
756 _author = author_name(author)
757 user = cls.get_by_username(_author, case_insensitive=True)
758 if user:
759 return user
760
761 def update_userdata(self, **kwargs):
762 usr = self
763 old = usr.user_data
764 old.update(**kwargs)
765 usr.user_data = old
766 Session().add(usr)
767 log.debug('updated userdata with ', kwargs)
768
769 def update_lastlogin(self):
770 """Update user lastlogin"""
771 self.last_login = datetime.datetime.now()
772 Session().add(self)
773 log.debug('updated user %s lastlogin', self.username)
774
775 def update_lastactivity(self):
776 """Update user lastactivity"""
777 usr = self
778 old = usr.user_data
779 old.update({'last_activity': time.time()})
780 usr.user_data = old
781 Session().add(usr)
782 log.debug('updated user %s lastactivity', usr.username)
783
784 def update_password(self, new_password, change_api_key=False):
785 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
786
787 self.password = get_crypt_password(new_password)
788 if change_api_key:
789 self.api_key = generate_auth_token(self.username)
790 Session().add(self)
791
792 @classmethod
793 def get_first_super_admin(cls):
794 user = User.query().filter(User.admin == true()).first()
795 if user is None:
796 raise Exception('FATAL: Missing administrative account!')
797 return user
798
799 @classmethod
800 def get_all_super_admins(cls):
801 """
802 Returns all admin accounts sorted by username
803 """
804 return User.query().filter(User.admin == true())\
805 .order_by(User.username.asc()).all()
806
807 @classmethod
808 def get_default_user(cls, cache=False):
809 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
810 if user is None:
811 raise Exception('FATAL: Missing default account!')
812 return user
813
814 def _get_default_perms(self, user, suffix=''):
815 from rhodecode.model.permission import PermissionModel
816 return PermissionModel().get_default_perms(user.user_perms, suffix)
817
818 def get_default_perms(self, suffix=''):
819 return self._get_default_perms(self, suffix)
820
821 def get_api_data(self, include_secrets=False, details='full'):
822 """
823 Common function for generating user related data for API
824
825 :param include_secrets: By default secrets in the API data will be replaced
826 by a placeholder value to prevent exposing this data by accident. In case
827 this data shall be exposed, set this flag to ``True``.
828
829 :param details: details can be 'basic|full' basic gives only a subset of
830 the available user information that includes user_id, name and emails.
831 """
832 user = self
833 user_data = self.user_data
834 data = {
835 'user_id': user.user_id,
836 'username': user.username,
837 'firstname': user.name,
838 'lastname': user.lastname,
839 'email': user.email,
840 'emails': user.emails,
841 }
842 if details == 'basic':
843 return data
844
845 api_key_length = 40
846 api_key_replacement = '*' * api_key_length
847
848 extras = {
849 'api_key': api_key_replacement,
850 'api_keys': [api_key_replacement],
851 'active': user.active,
852 'admin': user.admin,
853 'extern_type': user.extern_type,
854 'extern_name': user.extern_name,
855 'last_login': user.last_login,
856 'ip_addresses': user.ip_addresses,
857 'language': user_data.get('language')
858 }
859 data.update(extras)
860
861 if include_secrets:
862 data['api_key'] = user.api_key
863 data['api_keys'] = user.auth_tokens
864 return data
865
866 def __json__(self):
867 data = {
868 'full_name': self.full_name,
869 'full_name_or_username': self.full_name_or_username,
870 'short_contact': self.short_contact,
871 'full_contact': self.full_contact,
872 }
873 data.update(self.get_api_data())
874 return data
875
876
877 class UserApiKeys(Base, BaseModel):
878 __tablename__ = 'user_api_keys'
879 __table_args__ = (
880 Index('uak_api_key_idx', 'api_key'),
881 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
882 UniqueConstraint('api_key'),
883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
885 )
886 __mapper_args__ = {}
887
888 # ApiKey role
889 ROLE_ALL = 'token_role_all'
890 ROLE_HTTP = 'token_role_http'
891 ROLE_VCS = 'token_role_vcs'
892 ROLE_API = 'token_role_api'
893 ROLE_FEED = 'token_role_feed'
894 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
895
896 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
897 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
898 api_key = Column("api_key", String(255), nullable=False, unique=True)
899 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
900 expires = Column('expires', Float(53), nullable=False)
901 role = Column('role', String(255), nullable=True)
902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
903
904 user = relationship('User', lazy='joined')
905
906 @classmethod
907 def _get_role_name(cls, role):
908 return {
909 cls.ROLE_ALL: _('all'),
910 cls.ROLE_HTTP: _('http/web interface'),
911 cls.ROLE_VCS: _('vcs (git/hg protocol)'),
912 cls.ROLE_API: _('api calls'),
913 cls.ROLE_FEED: _('feed access'),
914 }.get(role, role)
915
916 @property
917 def expired(self):
918 if self.expires == -1:
919 return False
920 return time.time() > self.expires
921
922 @property
923 def role_humanized(self):
924 return self._get_role_name(self.role)
925
926
927 class UserEmailMap(Base, BaseModel):
928 __tablename__ = 'user_email_map'
929 __table_args__ = (
930 Index('uem_email_idx', 'email'),
931 UniqueConstraint('email'),
932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
934 )
935 __mapper_args__ = {}
936
937 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
938 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
939 _email = Column("email", String(255), nullable=True, unique=False, default=None)
940 user = relationship('User', lazy='joined')
941
942 @validates('_email')
943 def validate_email(self, key, email):
944 # check if this email is not main one
945 main_email = Session().query(User).filter(User.email == email).scalar()
946 if main_email is not None:
947 raise AttributeError('email %s is present is user table' % email)
948 return email
949
950 @hybrid_property
951 def email(self):
952 return self._email
953
954 @email.setter
955 def email(self, val):
956 self._email = val.lower() if val else None
957
958
959 class UserIpMap(Base, BaseModel):
960 __tablename__ = 'user_ip_map'
961 __table_args__ = (
962 UniqueConstraint('user_id', 'ip_addr'),
963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
965 )
966 __mapper_args__ = {}
967
968 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
969 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
970 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
971 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
972 description = Column("description", String(10000), nullable=True, unique=None, default=None)
973 user = relationship('User', lazy='joined')
974
975 @classmethod
976 def _get_ip_range(cls, ip_addr):
977 net = ipaddress.ip_network(ip_addr, strict=False)
978 return [str(net.network_address), str(net.broadcast_address)]
979
980 def __json__(self):
981 return {
982 'ip_addr': self.ip_addr,
983 'ip_range': self._get_ip_range(self.ip_addr),
984 }
985
986 def __unicode__(self):
987 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
988 self.user_id, self.ip_addr)
989
990 class UserLog(Base, BaseModel):
991 __tablename__ = 'user_logs'
992 __table_args__ = (
993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
995 )
996 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
998 username = Column("username", String(255), nullable=True, unique=None, default=None)
999 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1000 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1001 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1002 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1003 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1004
1005 def __unicode__(self):
1006 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1007 self.repository_name,
1008 self.action)
1009
1010 @property
1011 def action_as_day(self):
1012 return datetime.date(*self.action_date.timetuple()[:3])
1013
1014 user = relationship('User')
1015 repository = relationship('Repository', cascade='')
1016
1017
1018 class UserGroup(Base, BaseModel):
1019 __tablename__ = 'users_groups'
1020 __table_args__ = (
1021 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1022 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1023 )
1024
1025 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1026 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1027 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1028 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1029 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1030 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1031 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1032 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1033
1034 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1035 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1036 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1037 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1038 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1039 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1040
1041 user = relationship('User')
1042
1043 @hybrid_property
1044 def group_data(self):
1045 if not self._group_data:
1046 return {}
1047
1048 try:
1049 return json.loads(self._group_data)
1050 except TypeError:
1051 return {}
1052
1053 @group_data.setter
1054 def group_data(self, val):
1055 try:
1056 self._group_data = json.dumps(val)
1057 except Exception:
1058 log.error(traceback.format_exc())
1059
1060 def __unicode__(self):
1061 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1062 self.users_group_id,
1063 self.users_group_name)
1064
1065 @classmethod
1066 def get_by_group_name(cls, group_name, cache=False,
1067 case_insensitive=False):
1068 if case_insensitive:
1069 q = cls.query().filter(func.lower(cls.users_group_name) ==
1070 func.lower(group_name))
1071
1072 else:
1073 q = cls.query().filter(cls.users_group_name == group_name)
1074 if cache:
1075 q = q.options(FromCache(
1076 "sql_cache_short",
1077 "get_group_%s" % _hash_key(group_name)))
1078 return q.scalar()
1079
1080 @classmethod
1081 def get(cls, user_group_id, cache=False):
1082 user_group = cls.query()
1083 if cache:
1084 user_group = user_group.options(FromCache("sql_cache_short",
1085 "get_users_group_%s" % user_group_id))
1086 return user_group.get(user_group_id)
1087
1088 def permissions(self, with_admins=True, with_owner=True):
1089 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1090 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1091 joinedload(UserUserGroupToPerm.user),
1092 joinedload(UserUserGroupToPerm.permission),)
1093
1094 # get owners and admins and permissions. We do a trick of re-writing
1095 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1096 # has a global reference and changing one object propagates to all
1097 # others. This means if admin is also an owner admin_row that change
1098 # would propagate to both objects
1099 perm_rows = []
1100 for _usr in q.all():
1101 usr = AttributeDict(_usr.user.get_dict())
1102 usr.permission = _usr.permission.permission_name
1103 perm_rows.append(usr)
1104
1105 # filter the perm rows by 'default' first and then sort them by
1106 # admin,write,read,none permissions sorted again alphabetically in
1107 # each group
1108 perm_rows = sorted(perm_rows, key=display_sort)
1109
1110 _admin_perm = 'usergroup.admin'
1111 owner_row = []
1112 if with_owner:
1113 usr = AttributeDict(self.user.get_dict())
1114 usr.owner_row = True
1115 usr.permission = _admin_perm
1116 owner_row.append(usr)
1117
1118 super_admin_rows = []
1119 if with_admins:
1120 for usr in User.get_all_super_admins():
1121 # if this admin is also owner, don't double the record
1122 if usr.user_id == owner_row[0].user_id:
1123 owner_row[0].admin_row = True
1124 else:
1125 usr = AttributeDict(usr.get_dict())
1126 usr.admin_row = True
1127 usr.permission = _admin_perm
1128 super_admin_rows.append(usr)
1129
1130 return super_admin_rows + owner_row + perm_rows
1131
1132 def permission_user_groups(self):
1133 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1134 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1135 joinedload(UserGroupUserGroupToPerm.target_user_group),
1136 joinedload(UserGroupUserGroupToPerm.permission),)
1137
1138 perm_rows = []
1139 for _user_group in q.all():
1140 usr = AttributeDict(_user_group.user_group.get_dict())
1141 usr.permission = _user_group.permission.permission_name
1142 perm_rows.append(usr)
1143
1144 return perm_rows
1145
1146 def _get_default_perms(self, user_group, suffix=''):
1147 from rhodecode.model.permission import PermissionModel
1148 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1149
1150 def get_default_perms(self, suffix=''):
1151 return self._get_default_perms(self, suffix)
1152
1153 def get_api_data(self, with_group_members=True, include_secrets=False):
1154 """
1155 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1156 basically forwarded.
1157
1158 """
1159 user_group = self
1160
1161 data = {
1162 'users_group_id': user_group.users_group_id,
1163 'group_name': user_group.users_group_name,
1164 'group_description': user_group.user_group_description,
1165 'active': user_group.users_group_active,
1166 'owner': user_group.user.username,
1167 }
1168 if with_group_members:
1169 users = []
1170 for user in user_group.members:
1171 user = user.user
1172 users.append(user.get_api_data(include_secrets=include_secrets))
1173 data['users'] = users
1174
1175 return data
1176
1177
1178 class UserGroupMember(Base, BaseModel):
1179 __tablename__ = 'users_groups_members'
1180 __table_args__ = (
1181 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1182 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1183 )
1184
1185 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1187 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1188
1189 user = relationship('User', lazy='joined')
1190 users_group = relationship('UserGroup')
1191
1192 def __init__(self, gr_id='', u_id=''):
1193 self.users_group_id = gr_id
1194 self.user_id = u_id
1195
1196
1197 class RepositoryField(Base, BaseModel):
1198 __tablename__ = 'repositories_fields'
1199 __table_args__ = (
1200 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1201 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1202 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1203 )
1204 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1205
1206 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1207 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1208 field_key = Column("field_key", String(250))
1209 field_label = Column("field_label", String(1024), nullable=False)
1210 field_value = Column("field_value", String(10000), nullable=False)
1211 field_desc = Column("field_desc", String(1024), nullable=False)
1212 field_type = Column("field_type", String(255), nullable=False, unique=None)
1213 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1214
1215 repository = relationship('Repository')
1216
1217 @property
1218 def field_key_prefixed(self):
1219 return 'ex_%s' % self.field_key
1220
1221 @classmethod
1222 def un_prefix_key(cls, key):
1223 if key.startswith(cls.PREFIX):
1224 return key[len(cls.PREFIX):]
1225 return key
1226
1227 @classmethod
1228 def get_by_key_name(cls, key, repo):
1229 row = cls.query()\
1230 .filter(cls.repository == repo)\
1231 .filter(cls.field_key == key).scalar()
1232 return row
1233
1234
1235 class Repository(Base, BaseModel):
1236 __tablename__ = 'repositories'
1237 __table_args__ = (
1238 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1241 )
1242 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1243 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1244
1245 STATE_CREATED = 'repo_state_created'
1246 STATE_PENDING = 'repo_state_pending'
1247 STATE_ERROR = 'repo_state_error'
1248
1249 LOCK_AUTOMATIC = 'lock_auto'
1250 LOCK_API = 'lock_api'
1251 LOCK_WEB = 'lock_web'
1252 LOCK_PULL = 'lock_pull'
1253
1254 NAME_SEP = URL_SEP
1255
1256 repo_id = Column(
1257 "repo_id", Integer(), nullable=False, unique=True, default=None,
1258 primary_key=True)
1259 _repo_name = Column(
1260 "repo_name", Text(), nullable=False, default=None)
1261 _repo_name_hash = Column(
1262 "repo_name_hash", String(255), nullable=False, unique=True)
1263 repo_state = Column("repo_state", String(255), nullable=True)
1264
1265 clone_uri = Column(
1266 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1267 default=None)
1268 repo_type = Column(
1269 "repo_type", String(255), nullable=False, unique=False, default=None)
1270 user_id = Column(
1271 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1272 unique=False, default=None)
1273 private = Column(
1274 "private", Boolean(), nullable=True, unique=None, default=None)
1275 enable_statistics = Column(
1276 "statistics", Boolean(), nullable=True, unique=None, default=True)
1277 enable_downloads = Column(
1278 "downloads", Boolean(), nullable=True, unique=None, default=True)
1279 description = Column(
1280 "description", String(10000), nullable=True, unique=None, default=None)
1281 created_on = Column(
1282 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1283 default=datetime.datetime.now)
1284 updated_on = Column(
1285 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1286 default=datetime.datetime.now)
1287 _landing_revision = Column(
1288 "landing_revision", String(255), nullable=False, unique=False,
1289 default=None)
1290 enable_locking = Column(
1291 "enable_locking", Boolean(), nullable=False, unique=None,
1292 default=False)
1293 _locked = Column(
1294 "locked", String(255), nullable=True, unique=False, default=None)
1295 _changeset_cache = Column(
1296 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1297
1298 fork_id = Column(
1299 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1300 nullable=True, unique=False, default=None)
1301 group_id = Column(
1302 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1303 unique=False, default=None)
1304
1305 user = relationship('User', lazy='joined')
1306 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1307 group = relationship('RepoGroup', lazy='joined')
1308 repo_to_perm = relationship(
1309 'UserRepoToPerm', cascade='all',
1310 order_by='UserRepoToPerm.repo_to_perm_id')
1311 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1312 stats = relationship('Statistics', cascade='all', uselist=False)
1313
1314 followers = relationship(
1315 'UserFollowing',
1316 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1317 cascade='all')
1318 extra_fields = relationship(
1319 'RepositoryField', cascade="all, delete, delete-orphan")
1320 logs = relationship('UserLog')
1321 comments = relationship(
1322 'ChangesetComment', cascade="all, delete, delete-orphan")
1323 pull_requests_source = relationship(
1324 'PullRequest',
1325 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1326 cascade="all, delete, delete-orphan")
1327 pull_requests_target = relationship(
1328 'PullRequest',
1329 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1330 cascade="all, delete, delete-orphan")
1331 ui = relationship('RepoRhodeCodeUi', cascade="all")
1332 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1333
1334 def __unicode__(self):
1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1336 safe_unicode(self.repo_name))
1337
1338 @hybrid_property
1339 def landing_rev(self):
1340 # always should return [rev_type, rev]
1341 if self._landing_revision:
1342 _rev_info = self._landing_revision.split(':')
1343 if len(_rev_info) < 2:
1344 _rev_info.insert(0, 'rev')
1345 return [_rev_info[0], _rev_info[1]]
1346 return [None, None]
1347
1348 @landing_rev.setter
1349 def landing_rev(self, val):
1350 if ':' not in val:
1351 raise ValueError('value must be delimited with `:` and consist '
1352 'of <rev_type>:<rev>, got %s instead' % val)
1353 self._landing_revision = val
1354
1355 @hybrid_property
1356 def locked(self):
1357 if self._locked:
1358 user_id, timelocked, reason = self._locked.split(':')
1359 lock_values = int(user_id), timelocked, reason
1360 else:
1361 lock_values = [None, None, None]
1362 return lock_values
1363
1364 @locked.setter
1365 def locked(self, val):
1366 if val and isinstance(val, (list, tuple)):
1367 self._locked = ':'.join(map(str, val))
1368 else:
1369 self._locked = None
1370
1371 @hybrid_property
1372 def changeset_cache(self):
1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1374 dummy = EmptyCommit().__json__()
1375 if not self._changeset_cache:
1376 return dummy
1377 try:
1378 return json.loads(self._changeset_cache)
1379 except TypeError:
1380 return dummy
1381 except Exception:
1382 log.error(traceback.format_exc())
1383 return dummy
1384
1385 @changeset_cache.setter
1386 def changeset_cache(self, val):
1387 try:
1388 self._changeset_cache = json.dumps(val)
1389 except Exception:
1390 log.error(traceback.format_exc())
1391
1392 @hybrid_property
1393 def repo_name(self):
1394 return self._repo_name
1395
1396 @repo_name.setter
1397 def repo_name(self, value):
1398 self._repo_name = value
1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1400
1401 @classmethod
1402 def normalize_repo_name(cls, repo_name):
1403 """
1404 Normalizes os specific repo_name to the format internally stored inside
1405 database using URL_SEP
1406
1407 :param cls:
1408 :param repo_name:
1409 """
1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1411
1412 @classmethod
1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1414 session = Session()
1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1416
1417 if cache:
1418 if identity_cache:
1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1420 if val:
1421 return val
1422 else:
1423 q = q.options(
1424 FromCache("sql_cache_short",
1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1426
1427 return q.scalar()
1428
1429 @classmethod
1430 def get_by_full_path(cls, repo_full_path):
1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1432 repo_name = cls.normalize_repo_name(repo_name)
1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1434
1435 @classmethod
1436 def get_repo_forks(cls, repo_id):
1437 return cls.query().filter(Repository.fork_id == repo_id)
1438
1439 @classmethod
1440 def base_path(cls):
1441 """
1442 Returns base path when all repos are stored
1443
1444 :param cls:
1445 """
1446 q = Session().query(RhodeCodeUi)\
1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1449 return q.one().ui_value
1450
1451 @classmethod
1452 def is_valid(cls, repo_name):
1453 """
1454 returns True if given repo name is a valid filesystem repository
1455
1456 :param cls:
1457 :param repo_name:
1458 """
1459 from rhodecode.lib.utils import is_valid_repo
1460
1461 return is_valid_repo(repo_name, cls.base_path())
1462
1463 @classmethod
1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1465 case_insensitive=True):
1466 q = Repository.query()
1467
1468 if not isinstance(user_id, Optional):
1469 q = q.filter(Repository.user_id == user_id)
1470
1471 if not isinstance(group_id, Optional):
1472 q = q.filter(Repository.group_id == group_id)
1473
1474 if case_insensitive:
1475 q = q.order_by(func.lower(Repository.repo_name))
1476 else:
1477 q = q.order_by(Repository.repo_name)
1478 return q.all()
1479
1480 @property
1481 def forks(self):
1482 """
1483 Return forks of this repo
1484 """
1485 return Repository.get_repo_forks(self.repo_id)
1486
1487 @property
1488 def parent(self):
1489 """
1490 Returns fork parent
1491 """
1492 return self.fork
1493
1494 @property
1495 def just_name(self):
1496 return self.repo_name.split(self.NAME_SEP)[-1]
1497
1498 @property
1499 def groups_with_parents(self):
1500 groups = []
1501 if self.group is None:
1502 return groups
1503
1504 cur_gr = self.group
1505 groups.insert(0, cur_gr)
1506 while 1:
1507 gr = getattr(cur_gr, 'parent_group', None)
1508 cur_gr = cur_gr.parent_group
1509 if gr is None:
1510 break
1511 groups.insert(0, gr)
1512
1513 return groups
1514
1515 @property
1516 def groups_and_repo(self):
1517 return self.groups_with_parents, self
1518
1519 @LazyProperty
1520 def repo_path(self):
1521 """
1522 Returns base full path for that repository means where it actually
1523 exists on a filesystem
1524 """
1525 q = Session().query(RhodeCodeUi).filter(
1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1528 return q.one().ui_value
1529
1530 @property
1531 def repo_full_path(self):
1532 p = [self.repo_path]
1533 # we need to split the name by / since this is how we store the
1534 # names in the database, but that eventually needs to be converted
1535 # into a valid system path
1536 p += self.repo_name.split(self.NAME_SEP)
1537 return os.path.join(*map(safe_unicode, p))
1538
1539 @property
1540 def cache_keys(self):
1541 """
1542 Returns associated cache keys for that repo
1543 """
1544 return CacheKey.query()\
1545 .filter(CacheKey.cache_args == self.repo_name)\
1546 .order_by(CacheKey.cache_key)\
1547 .all()
1548
1549 def get_new_name(self, repo_name):
1550 """
1551 returns new full repository name based on assigned group and new new
1552
1553 :param group_name:
1554 """
1555 path_prefix = self.group.full_path_splitted if self.group else []
1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1557
1558 @property
1559 def _config(self):
1560 """
1561 Returns db based config object.
1562 """
1563 from rhodecode.lib.utils import make_db_config
1564 return make_db_config(clear_session=False, repo=self)
1565
1566 def permissions(self, with_admins=True, with_owner=True):
1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1568 q = q.options(joinedload(UserRepoToPerm.repository),
1569 joinedload(UserRepoToPerm.user),
1570 joinedload(UserRepoToPerm.permission),)
1571
1572 # get owners and admins and permissions. We do a trick of re-writing
1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1574 # has a global reference and changing one object propagates to all
1575 # others. This means if admin is also an owner admin_row that change
1576 # would propagate to both objects
1577 perm_rows = []
1578 for _usr in q.all():
1579 usr = AttributeDict(_usr.user.get_dict())
1580 usr.permission = _usr.permission.permission_name
1581 perm_rows.append(usr)
1582
1583 # filter the perm rows by 'default' first and then sort them by
1584 # admin,write,read,none permissions sorted again alphabetically in
1585 # each group
1586 perm_rows = sorted(perm_rows, key=display_sort)
1587
1588 _admin_perm = 'repository.admin'
1589 owner_row = []
1590 if with_owner:
1591 usr = AttributeDict(self.user.get_dict())
1592 usr.owner_row = True
1593 usr.permission = _admin_perm
1594 owner_row.append(usr)
1595
1596 super_admin_rows = []
1597 if with_admins:
1598 for usr in User.get_all_super_admins():
1599 # if this admin is also owner, don't double the record
1600 if usr.user_id == owner_row[0].user_id:
1601 owner_row[0].admin_row = True
1602 else:
1603 usr = AttributeDict(usr.get_dict())
1604 usr.admin_row = True
1605 usr.permission = _admin_perm
1606 super_admin_rows.append(usr)
1607
1608 return super_admin_rows + owner_row + perm_rows
1609
1610 def permission_user_groups(self):
1611 q = UserGroupRepoToPerm.query().filter(
1612 UserGroupRepoToPerm.repository == self)
1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1614 joinedload(UserGroupRepoToPerm.users_group),
1615 joinedload(UserGroupRepoToPerm.permission),)
1616
1617 perm_rows = []
1618 for _user_group in q.all():
1619 usr = AttributeDict(_user_group.users_group.get_dict())
1620 usr.permission = _user_group.permission.permission_name
1621 perm_rows.append(usr)
1622
1623 return perm_rows
1624
1625 def get_api_data(self, include_secrets=False):
1626 """
1627 Common function for generating repo api data
1628
1629 :param include_secrets: See :meth:`User.get_api_data`.
1630
1631 """
1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1633 # move this methods on models level.
1634 from rhodecode.model.settings import SettingsModel
1635
1636 repo = self
1637 _user_id, _time, _reason = self.locked
1638
1639 data = {
1640 'repo_id': repo.repo_id,
1641 'repo_name': repo.repo_name,
1642 'repo_type': repo.repo_type,
1643 'clone_uri': repo.clone_uri or '',
1644 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1645 'private': repo.private,
1646 'created_on': repo.created_on,
1647 'description': repo.description,
1648 'landing_rev': repo.landing_rev,
1649 'owner': repo.user.username,
1650 'fork_of': repo.fork.repo_name if repo.fork else None,
1651 'enable_statistics': repo.enable_statistics,
1652 'enable_locking': repo.enable_locking,
1653 'enable_downloads': repo.enable_downloads,
1654 'last_changeset': repo.changeset_cache,
1655 'locked_by': User.get(_user_id).get_api_data(
1656 include_secrets=include_secrets) if _user_id else None,
1657 'locked_date': time_to_datetime(_time) if _time else None,
1658 'lock_reason': _reason if _reason else None,
1659 }
1660
1661 # TODO: mikhail: should be per-repo settings here
1662 rc_config = SettingsModel().get_all_settings()
1663 repository_fields = str2bool(
1664 rc_config.get('rhodecode_repository_fields'))
1665 if repository_fields:
1666 for f in self.extra_fields:
1667 data[f.field_key_prefixed] = f.field_value
1668
1669 return data
1670
1671 @classmethod
1672 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1673 if not lock_time:
1674 lock_time = time.time()
1675 if not lock_reason:
1676 lock_reason = cls.LOCK_AUTOMATIC
1677 repo.locked = [user_id, lock_time, lock_reason]
1678 Session().add(repo)
1679 Session().commit()
1680
1681 @classmethod
1682 def unlock(cls, repo):
1683 repo.locked = None
1684 Session().add(repo)
1685 Session().commit()
1686
1687 @classmethod
1688 def getlock(cls, repo):
1689 return repo.locked
1690
1691 def is_user_lock(self, user_id):
1692 if self.lock[0]:
1693 lock_user_id = safe_int(self.lock[0])
1694 user_id = safe_int(user_id)
1695 # both are ints, and they are equal
1696 return all([lock_user_id, user_id]) and lock_user_id == user_id
1697
1698 return False
1699
1700 def get_locking_state(self, action, user_id, only_when_enabled=True):
1701 """
1702 Checks locking on this repository, if locking is enabled and lock is
1703 present returns a tuple of make_lock, locked, locked_by.
1704 make_lock can have 3 states None (do nothing) True, make lock
1705 False release lock, This value is later propagated to hooks, which
1706 do the locking. Think about this as signals passed to hooks what to do.
1707
1708 """
1709 # TODO: johbo: This is part of the business logic and should be moved
1710 # into the RepositoryModel.
1711
1712 if action not in ('push', 'pull'):
1713 raise ValueError("Invalid action value: %s" % repr(action))
1714
1715 # defines if locked error should be thrown to user
1716 currently_locked = False
1717 # defines if new lock should be made, tri-state
1718 make_lock = None
1719 repo = self
1720 user = User.get(user_id)
1721
1722 lock_info = repo.locked
1723
1724 if repo and (repo.enable_locking or not only_when_enabled):
1725 if action == 'push':
1726 # check if it's already locked !, if it is compare users
1727 locked_by_user_id = lock_info[0]
1728 if user.user_id == locked_by_user_id:
1729 log.debug(
1730 'Got `push` action from user %s, now unlocking', user)
1731 # unlock if we have push from user who locked
1732 make_lock = False
1733 else:
1734 # we're not the same user who locked, ban with
1735 # code defined in settings (default is 423 HTTP Locked) !
1736 log.debug('Repo %s is currently locked by %s', repo, user)
1737 currently_locked = True
1738 elif action == 'pull':
1739 # [0] user [1] date
1740 if lock_info[0] and lock_info[1]:
1741 log.debug('Repo %s is currently locked by %s', repo, user)
1742 currently_locked = True
1743 else:
1744 log.debug('Setting lock on repo %s by %s', repo, user)
1745 make_lock = True
1746
1747 else:
1748 log.debug('Repository %s do not have locking enabled', repo)
1749
1750 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1751 make_lock, currently_locked, lock_info)
1752
1753 from rhodecode.lib.auth import HasRepoPermissionAny
1754 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1755 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1756 # if we don't have at least write permission we cannot make a lock
1757 log.debug('lock state reset back to FALSE due to lack '
1758 'of at least read permission')
1759 make_lock = False
1760
1761 return make_lock, currently_locked, lock_info
1762
1763 @property
1764 def last_db_change(self):
1765 return self.updated_on
1766
1767 @property
1768 def clone_uri_hidden(self):
1769 clone_uri = self.clone_uri
1770 if clone_uri:
1771 import urlobject
1772 url_obj = urlobject.URLObject(clone_uri)
1773 if url_obj.password:
1774 clone_uri = url_obj.with_password('*****')
1775 return clone_uri
1776
1777 def clone_url(self, **override):
1778 qualified_home_url = url('home', qualified=True)
1779
1780 uri_tmpl = None
1781 if 'with_id' in override:
1782 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1783 del override['with_id']
1784
1785 if 'uri_tmpl' in override:
1786 uri_tmpl = override['uri_tmpl']
1787 del override['uri_tmpl']
1788
1789 # we didn't override our tmpl from **overrides
1790 if not uri_tmpl:
1791 uri_tmpl = self.DEFAULT_CLONE_URI
1792 try:
1793 from pylons import tmpl_context as c
1794 uri_tmpl = c.clone_uri_tmpl
1795 except Exception:
1796 # in any case if we call this outside of request context,
1797 # ie, not having tmpl_context set up
1798 pass
1799
1800 return get_clone_url(uri_tmpl=uri_tmpl,
1801 qualifed_home_url=qualified_home_url,
1802 repo_name=self.repo_name,
1803 repo_id=self.repo_id, **override)
1804
1805 def set_state(self, state):
1806 self.repo_state = state
1807 Session().add(self)
1808 #==========================================================================
1809 # SCM PROPERTIES
1810 #==========================================================================
1811
1812 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1813 return get_commit_safe(
1814 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1815
1816 def get_changeset(self, rev=None, pre_load=None):
1817 warnings.warn("Use get_commit", DeprecationWarning)
1818 commit_id = None
1819 commit_idx = None
1820 if isinstance(rev, basestring):
1821 commit_id = rev
1822 else:
1823 commit_idx = rev
1824 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1825 pre_load=pre_load)
1826
1827 def get_landing_commit(self):
1828 """
1829 Returns landing commit, or if that doesn't exist returns the tip
1830 """
1831 _rev_type, _rev = self.landing_rev
1832 commit = self.get_commit(_rev)
1833 if isinstance(commit, EmptyCommit):
1834 return self.get_commit()
1835 return commit
1836
1837 def update_commit_cache(self, cs_cache=None, config=None):
1838 """
1839 Update cache of last changeset for repository, keys should be::
1840
1841 short_id
1842 raw_id
1843 revision
1844 parents
1845 message
1846 date
1847 author
1848
1849 :param cs_cache:
1850 """
1851 from rhodecode.lib.vcs.backends.base import BaseChangeset
1852 if cs_cache is None:
1853 # use no-cache version here
1854 scm_repo = self.scm_instance(cache=False, config=config)
1855 if scm_repo:
1856 cs_cache = scm_repo.get_commit(
1857 pre_load=["author", "date", "message", "parents"])
1858 else:
1859 cs_cache = EmptyCommit()
1860
1861 if isinstance(cs_cache, BaseChangeset):
1862 cs_cache = cs_cache.__json__()
1863
1864 def is_outdated(new_cs_cache):
1865 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1866 new_cs_cache['revision'] != self.changeset_cache['revision']):
1867 return True
1868 return False
1869
1870 # check if we have maybe already latest cached revision
1871 if is_outdated(cs_cache) or not self.changeset_cache:
1872 _default = datetime.datetime.fromtimestamp(0)
1873 last_change = cs_cache.get('date') or _default
1874 log.debug('updated repo %s with new cs cache %s',
1875 self.repo_name, cs_cache)
1876 self.updated_on = last_change
1877 self.changeset_cache = cs_cache
1878 Session().add(self)
1879 Session().commit()
1880 else:
1881 log.debug('Skipping update_commit_cache for repo:`%s` '
1882 'commit already with latest changes', self.repo_name)
1883
1884 @property
1885 def tip(self):
1886 return self.get_commit('tip')
1887
1888 @property
1889 def author(self):
1890 return self.tip.author
1891
1892 @property
1893 def last_change(self):
1894 return self.scm_instance().last_change
1895
1896 def get_comments(self, revisions=None):
1897 """
1898 Returns comments for this repository grouped by revisions
1899
1900 :param revisions: filter query by revisions only
1901 """
1902 cmts = ChangesetComment.query()\
1903 .filter(ChangesetComment.repo == self)
1904 if revisions:
1905 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1906 grouped = collections.defaultdict(list)
1907 for cmt in cmts.all():
1908 grouped[cmt.revision].append(cmt)
1909 return grouped
1910
1911 def statuses(self, revisions=None):
1912 """
1913 Returns statuses for this repository
1914
1915 :param revisions: list of revisions to get statuses for
1916 """
1917 statuses = ChangesetStatus.query()\
1918 .filter(ChangesetStatus.repo == self)\
1919 .filter(ChangesetStatus.version == 0)
1920
1921 if revisions:
1922 # Try doing the filtering in chunks to avoid hitting limits
1923 size = 500
1924 status_results = []
1925 for chunk in xrange(0, len(revisions), size):
1926 status_results += statuses.filter(
1927 ChangesetStatus.revision.in_(
1928 revisions[chunk: chunk+size])
1929 ).all()
1930 else:
1931 status_results = statuses.all()
1932
1933 grouped = {}
1934
1935 # maybe we have open new pullrequest without a status?
1936 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1937 status_lbl = ChangesetStatus.get_status_lbl(stat)
1938 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1939 for rev in pr.revisions:
1940 pr_id = pr.pull_request_id
1941 pr_repo = pr.target_repo.repo_name
1942 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1943
1944 for stat in status_results:
1945 pr_id = pr_repo = None
1946 if stat.pull_request:
1947 pr_id = stat.pull_request.pull_request_id
1948 pr_repo = stat.pull_request.target_repo.repo_name
1949 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1950 pr_id, pr_repo]
1951 return grouped
1952
1953 # ==========================================================================
1954 # SCM CACHE INSTANCE
1955 # ==========================================================================
1956
1957 def scm_instance(self, **kwargs):
1958 import rhodecode
1959
1960 # Passing a config will not hit the cache currently only used
1961 # for repo2dbmapper
1962 config = kwargs.pop('config', None)
1963 cache = kwargs.pop('cache', None)
1964 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1965 # if cache is NOT defined use default global, else we have a full
1966 # control over cache behaviour
1967 if cache is None and full_cache and not config:
1968 return self._get_instance_cached()
1969 return self._get_instance(cache=bool(cache), config=config)
1970
1971 def _get_instance_cached(self):
1972 @cache_region('long_term')
1973 def _get_repo(cache_key):
1974 return self._get_instance()
1975
1976 invalidator_context = CacheKey.repo_context_cache(
1977 _get_repo, self.repo_name, None)
1978
1979 with invalidator_context as context:
1980 context.invalidate()
1981 repo = context.compute()
1982
1983 return repo
1984
1985 def _get_instance(self, cache=True, config=None):
1986 repo_full_path = self.repo_full_path
1987 try:
1988 vcs_alias = get_scm(repo_full_path)[0]
1989 log.debug(
1990 'Creating instance of %s repository from %s',
1991 vcs_alias, repo_full_path)
1992 backend = get_backend(vcs_alias)
1993 except VCSError:
1994 log.exception(
1995 'Perhaps this repository is in db and not in '
1996 'filesystem run rescan repositories with '
1997 '"destroy old data" option from admin panel')
1998 return
1999
2000 config = config or self._config
2001 custom_wire = {
2002 'cache': cache # controls the vcs.remote cache
2003 }
2004 repo = backend(
2005 safe_str(repo_full_path), config=config, create=False,
2006 with_wire=custom_wire)
2007
2008 return repo
2009
2010 def __json__(self):
2011 return {'landing_rev': self.landing_rev}
2012
2013 def get_dict(self):
2014
2015 # Since we transformed `repo_name` to a hybrid property, we need to
2016 # keep compatibility with the code which uses `repo_name` field.
2017
2018 result = super(Repository, self).get_dict()
2019 result['repo_name'] = result.pop('_repo_name', None)
2020 return result
2021
2022
2023 class RepoGroup(Base, BaseModel):
2024 __tablename__ = 'groups'
2025 __table_args__ = (
2026 UniqueConstraint('group_name', 'group_parent_id'),
2027 CheckConstraint('group_id != group_parent_id'),
2028 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2029 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2030 )
2031 __mapper_args__ = {'order_by': 'group_name'}
2032
2033 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2034
2035 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2036 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2037 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2038 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2039 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2040 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2041 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2042
2043 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2044 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2045 parent_group = relationship('RepoGroup', remote_side=group_id)
2046 user = relationship('User')
2047
2048 def __init__(self, group_name='', parent_group=None):
2049 self.group_name = group_name
2050 self.parent_group = parent_group
2051
2052 def __unicode__(self):
2053 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2054 self.group_name)
2055
2056 @classmethod
2057 def _generate_choice(cls, repo_group):
2058 from webhelpers.html import literal as _literal
2059 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2060 return repo_group.group_id, _name(repo_group.full_path_splitted)
2061
2062 @classmethod
2063 def groups_choices(cls, groups=None, show_empty_group=True):
2064 if not groups:
2065 groups = cls.query().all()
2066
2067 repo_groups = []
2068 if show_empty_group:
2069 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2070
2071 repo_groups.extend([cls._generate_choice(x) for x in groups])
2072
2073 repo_groups = sorted(
2074 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2075 return repo_groups
2076
2077 @classmethod
2078 def url_sep(cls):
2079 return URL_SEP
2080
2081 @classmethod
2082 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2083 if case_insensitive:
2084 gr = cls.query().filter(func.lower(cls.group_name)
2085 == func.lower(group_name))
2086 else:
2087 gr = cls.query().filter(cls.group_name == group_name)
2088 if cache:
2089 gr = gr.options(FromCache(
2090 "sql_cache_short",
2091 "get_group_%s" % _hash_key(group_name)))
2092 return gr.scalar()
2093
2094 @classmethod
2095 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2096 case_insensitive=True):
2097 q = RepoGroup.query()
2098
2099 if not isinstance(user_id, Optional):
2100 q = q.filter(RepoGroup.user_id == user_id)
2101
2102 if not isinstance(group_id, Optional):
2103 q = q.filter(RepoGroup.group_parent_id == group_id)
2104
2105 if case_insensitive:
2106 q = q.order_by(func.lower(RepoGroup.group_name))
2107 else:
2108 q = q.order_by(RepoGroup.group_name)
2109 return q.all()
2110
2111 @property
2112 def parents(self):
2113 parents_recursion_limit = 10
2114 groups = []
2115 if self.parent_group is None:
2116 return groups
2117 cur_gr = self.parent_group
2118 groups.insert(0, cur_gr)
2119 cnt = 0
2120 while 1:
2121 cnt += 1
2122 gr = getattr(cur_gr, 'parent_group', None)
2123 cur_gr = cur_gr.parent_group
2124 if gr is None:
2125 break
2126 if cnt == parents_recursion_limit:
2127 # this will prevent accidental infinit loops
2128 log.error(('more than %s parents found for group %s, stopping '
2129 'recursive parent fetching' % (parents_recursion_limit, self)))
2130 break
2131
2132 groups.insert(0, gr)
2133 return groups
2134
2135 @property
2136 def children(self):
2137 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2138
2139 @property
2140 def name(self):
2141 return self.group_name.split(RepoGroup.url_sep())[-1]
2142
2143 @property
2144 def full_path(self):
2145 return self.group_name
2146
2147 @property
2148 def full_path_splitted(self):
2149 return self.group_name.split(RepoGroup.url_sep())
2150
2151 @property
2152 def repositories(self):
2153 return Repository.query()\
2154 .filter(Repository.group == self)\
2155 .order_by(Repository.repo_name)
2156
2157 @property
2158 def repositories_recursive_count(self):
2159 cnt = self.repositories.count()
2160
2161 def children_count(group):
2162 cnt = 0
2163 for child in group.children:
2164 cnt += child.repositories.count()
2165 cnt += children_count(child)
2166 return cnt
2167
2168 return cnt + children_count(self)
2169
2170 def _recursive_objects(self, include_repos=True):
2171 all_ = []
2172
2173 def _get_members(root_gr):
2174 if include_repos:
2175 for r in root_gr.repositories:
2176 all_.append(r)
2177 childs = root_gr.children.all()
2178 if childs:
2179 for gr in childs:
2180 all_.append(gr)
2181 _get_members(gr)
2182
2183 _get_members(self)
2184 return [self] + all_
2185
2186 def recursive_groups_and_repos(self):
2187 """
2188 Recursive return all groups, with repositories in those groups
2189 """
2190 return self._recursive_objects()
2191
2192 def recursive_groups(self):
2193 """
2194 Returns all children groups for this group including children of children
2195 """
2196 return self._recursive_objects(include_repos=False)
2197
2198 def get_new_name(self, group_name):
2199 """
2200 returns new full group name based on parent and new name
2201
2202 :param group_name:
2203 """
2204 path_prefix = (self.parent_group.full_path_splitted if
2205 self.parent_group else [])
2206 return RepoGroup.url_sep().join(path_prefix + [group_name])
2207
2208 def permissions(self, with_admins=True, with_owner=True):
2209 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2210 q = q.options(joinedload(UserRepoGroupToPerm.group),
2211 joinedload(UserRepoGroupToPerm.user),
2212 joinedload(UserRepoGroupToPerm.permission),)
2213
2214 # get owners and admins and permissions. We do a trick of re-writing
2215 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2216 # has a global reference and changing one object propagates to all
2217 # others. This means if admin is also an owner admin_row that change
2218 # would propagate to both objects
2219 perm_rows = []
2220 for _usr in q.all():
2221 usr = AttributeDict(_usr.user.get_dict())
2222 usr.permission = _usr.permission.permission_name
2223 perm_rows.append(usr)
2224
2225 # filter the perm rows by 'default' first and then sort them by
2226 # admin,write,read,none permissions sorted again alphabetically in
2227 # each group
2228 perm_rows = sorted(perm_rows, key=display_sort)
2229
2230 _admin_perm = 'group.admin'
2231 owner_row = []
2232 if with_owner:
2233 usr = AttributeDict(self.user.get_dict())
2234 usr.owner_row = True
2235 usr.permission = _admin_perm
2236 owner_row.append(usr)
2237
2238 super_admin_rows = []
2239 if with_admins:
2240 for usr in User.get_all_super_admins():
2241 # if this admin is also owner, don't double the record
2242 if usr.user_id == owner_row[0].user_id:
2243 owner_row[0].admin_row = True
2244 else:
2245 usr = AttributeDict(usr.get_dict())
2246 usr.admin_row = True
2247 usr.permission = _admin_perm
2248 super_admin_rows.append(usr)
2249
2250 return super_admin_rows + owner_row + perm_rows
2251
2252 def permission_user_groups(self):
2253 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2254 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2255 joinedload(UserGroupRepoGroupToPerm.users_group),
2256 joinedload(UserGroupRepoGroupToPerm.permission),)
2257
2258 perm_rows = []
2259 for _user_group in q.all():
2260 usr = AttributeDict(_user_group.users_group.get_dict())
2261 usr.permission = _user_group.permission.permission_name
2262 perm_rows.append(usr)
2263
2264 return perm_rows
2265
2266 def get_api_data(self):
2267 """
2268 Common function for generating api data
2269
2270 """
2271 group = self
2272 data = {
2273 'group_id': group.group_id,
2274 'group_name': group.group_name,
2275 'group_description': group.group_description,
2276 'parent_group': group.parent_group.group_name if group.parent_group else None,
2277 'repositories': [x.repo_name for x in group.repositories],
2278 'owner': group.user.username,
2279 }
2280 return data
2281
2282
2283 class Permission(Base, BaseModel):
2284 __tablename__ = 'permissions'
2285 __table_args__ = (
2286 Index('p_perm_name_idx', 'permission_name'),
2287 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2288 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2289 )
2290 PERMS = [
2291 ('hg.admin', _('RhodeCode Super Administrator')),
2292
2293 ('repository.none', _('Repository no access')),
2294 ('repository.read', _('Repository read access')),
2295 ('repository.write', _('Repository write access')),
2296 ('repository.admin', _('Repository admin access')),
2297
2298 ('group.none', _('Repository group no access')),
2299 ('group.read', _('Repository group read access')),
2300 ('group.write', _('Repository group write access')),
2301 ('group.admin', _('Repository group admin access')),
2302
2303 ('usergroup.none', _('User group no access')),
2304 ('usergroup.read', _('User group read access')),
2305 ('usergroup.write', _('User group write access')),
2306 ('usergroup.admin', _('User group admin access')),
2307
2308 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2309 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2310
2311 ('hg.usergroup.create.false', _('User Group creation disabled')),
2312 ('hg.usergroup.create.true', _('User Group creation enabled')),
2313
2314 ('hg.create.none', _('Repository creation disabled')),
2315 ('hg.create.repository', _('Repository creation enabled')),
2316 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2317 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2318
2319 ('hg.fork.none', _('Repository forking disabled')),
2320 ('hg.fork.repository', _('Repository forking enabled')),
2321
2322 ('hg.register.none', _('Registration disabled')),
2323 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2324 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2325
2326 ('hg.extern_activate.manual', _('Manual activation of external account')),
2327 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2328
2329 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2330 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2331 ]
2332
2333 # definition of system default permissions for DEFAULT user
2334 DEFAULT_USER_PERMISSIONS = [
2335 'repository.read',
2336 'group.read',
2337 'usergroup.read',
2338 'hg.create.repository',
2339 'hg.repogroup.create.false',
2340 'hg.usergroup.create.false',
2341 'hg.create.write_on_repogroup.true',
2342 'hg.fork.repository',
2343 'hg.register.manual_activate',
2344 'hg.extern_activate.auto',
2345 'hg.inherit_default_perms.true',
2346 ]
2347
2348 # defines which permissions are more important higher the more important
2349 # Weight defines which permissions are more important.
2350 # The higher number the more important.
2351 PERM_WEIGHTS = {
2352 'repository.none': 0,
2353 'repository.read': 1,
2354 'repository.write': 3,
2355 'repository.admin': 4,
2356
2357 'group.none': 0,
2358 'group.read': 1,
2359 'group.write': 3,
2360 'group.admin': 4,
2361
2362 'usergroup.none': 0,
2363 'usergroup.read': 1,
2364 'usergroup.write': 3,
2365 'usergroup.admin': 4,
2366
2367 'hg.repogroup.create.false': 0,
2368 'hg.repogroup.create.true': 1,
2369
2370 'hg.usergroup.create.false': 0,
2371 'hg.usergroup.create.true': 1,
2372
2373 'hg.fork.none': 0,
2374 'hg.fork.repository': 1,
2375 'hg.create.none': 0,
2376 'hg.create.repository': 1
2377 }
2378
2379 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2380 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2381 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2382
2383 def __unicode__(self):
2384 return u"<%s('%s:%s')>" % (
2385 self.__class__.__name__, self.permission_id, self.permission_name
2386 )
2387
2388 @classmethod
2389 def get_by_key(cls, key):
2390 return cls.query().filter(cls.permission_name == key).scalar()
2391
2392 @classmethod
2393 def get_default_repo_perms(cls, user_id, repo_id=None):
2394 q = Session().query(UserRepoToPerm, Repository, Permission)\
2395 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2396 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2397 .filter(UserRepoToPerm.user_id == user_id)
2398 if repo_id:
2399 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2400 return q.all()
2401
2402 @classmethod
2403 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2404 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2405 .join(
2406 Permission,
2407 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2408 .join(
2409 Repository,
2410 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2411 .join(
2412 UserGroup,
2413 UserGroupRepoToPerm.users_group_id ==
2414 UserGroup.users_group_id)\
2415 .join(
2416 UserGroupMember,
2417 UserGroupRepoToPerm.users_group_id ==
2418 UserGroupMember.users_group_id)\
2419 .filter(
2420 UserGroupMember.user_id == user_id,
2421 UserGroup.users_group_active == true())
2422 if repo_id:
2423 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2424 return q.all()
2425
2426 @classmethod
2427 def get_default_group_perms(cls, user_id, repo_group_id=None):
2428 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2429 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2430 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2431 .filter(UserRepoGroupToPerm.user_id == user_id)
2432 if repo_group_id:
2433 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2434 return q.all()
2435
2436 @classmethod
2437 def get_default_group_perms_from_user_group(
2438 cls, user_id, repo_group_id=None):
2439 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2440 .join(
2441 Permission,
2442 UserGroupRepoGroupToPerm.permission_id ==
2443 Permission.permission_id)\
2444 .join(
2445 RepoGroup,
2446 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2447 .join(
2448 UserGroup,
2449 UserGroupRepoGroupToPerm.users_group_id ==
2450 UserGroup.users_group_id)\
2451 .join(
2452 UserGroupMember,
2453 UserGroupRepoGroupToPerm.users_group_id ==
2454 UserGroupMember.users_group_id)\
2455 .filter(
2456 UserGroupMember.user_id == user_id,
2457 UserGroup.users_group_active == true())
2458 if repo_group_id:
2459 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2460 return q.all()
2461
2462 @classmethod
2463 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2464 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2465 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2466 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2467 .filter(UserUserGroupToPerm.user_id == user_id)
2468 if user_group_id:
2469 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2470 return q.all()
2471
2472 @classmethod
2473 def get_default_user_group_perms_from_user_group(
2474 cls, user_id, user_group_id=None):
2475 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2476 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2477 .join(
2478 Permission,
2479 UserGroupUserGroupToPerm.permission_id ==
2480 Permission.permission_id)\
2481 .join(
2482 TargetUserGroup,
2483 UserGroupUserGroupToPerm.target_user_group_id ==
2484 TargetUserGroup.users_group_id)\
2485 .join(
2486 UserGroup,
2487 UserGroupUserGroupToPerm.user_group_id ==
2488 UserGroup.users_group_id)\
2489 .join(
2490 UserGroupMember,
2491 UserGroupUserGroupToPerm.user_group_id ==
2492 UserGroupMember.users_group_id)\
2493 .filter(
2494 UserGroupMember.user_id == user_id,
2495 UserGroup.users_group_active == true())
2496 if user_group_id:
2497 q = q.filter(
2498 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2499
2500 return q.all()
2501
2502
2503 class UserRepoToPerm(Base, BaseModel):
2504 __tablename__ = 'repo_to_perm'
2505 __table_args__ = (
2506 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2507 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2508 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2509 )
2510 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2511 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2512 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2513 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2514
2515 user = relationship('User')
2516 repository = relationship('Repository')
2517 permission = relationship('Permission')
2518
2519 @classmethod
2520 def create(cls, user, repository, permission):
2521 n = cls()
2522 n.user = user
2523 n.repository = repository
2524 n.permission = permission
2525 Session().add(n)
2526 return n
2527
2528 def __unicode__(self):
2529 return u'<%s => %s >' % (self.user, self.repository)
2530
2531
2532 class UserUserGroupToPerm(Base, BaseModel):
2533 __tablename__ = 'user_user_group_to_perm'
2534 __table_args__ = (
2535 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2536 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2537 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2538 )
2539 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2540 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2541 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2542 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2543
2544 user = relationship('User')
2545 user_group = relationship('UserGroup')
2546 permission = relationship('Permission')
2547
2548 @classmethod
2549 def create(cls, user, user_group, permission):
2550 n = cls()
2551 n.user = user
2552 n.user_group = user_group
2553 n.permission = permission
2554 Session().add(n)
2555 return n
2556
2557 def __unicode__(self):
2558 return u'<%s => %s >' % (self.user, self.user_group)
2559
2560
2561 class UserToPerm(Base, BaseModel):
2562 __tablename__ = 'user_to_perm'
2563 __table_args__ = (
2564 UniqueConstraint('user_id', 'permission_id'),
2565 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2566 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2567 )
2568 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2569 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2570 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2571
2572 user = relationship('User')
2573 permission = relationship('Permission', lazy='joined')
2574
2575 def __unicode__(self):
2576 return u'<%s => %s >' % (self.user, self.permission)
2577
2578
2579 class UserGroupRepoToPerm(Base, BaseModel):
2580 __tablename__ = 'users_group_repo_to_perm'
2581 __table_args__ = (
2582 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2583 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2584 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2585 )
2586 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2587 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2588 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2589 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2590
2591 users_group = relationship('UserGroup')
2592 permission = relationship('Permission')
2593 repository = relationship('Repository')
2594
2595 @classmethod
2596 def create(cls, users_group, repository, permission):
2597 n = cls()
2598 n.users_group = users_group
2599 n.repository = repository
2600 n.permission = permission
2601 Session().add(n)
2602 return n
2603
2604 def __unicode__(self):
2605 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2606
2607
2608 class UserGroupUserGroupToPerm(Base, BaseModel):
2609 __tablename__ = 'user_group_user_group_to_perm'
2610 __table_args__ = (
2611 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2612 CheckConstraint('target_user_group_id != user_group_id'),
2613 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2614 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2615 )
2616 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2617 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2618 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2619 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2620
2621 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2622 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2623 permission = relationship('Permission')
2624
2625 @classmethod
2626 def create(cls, target_user_group, user_group, permission):
2627 n = cls()
2628 n.target_user_group = target_user_group
2629 n.user_group = user_group
2630 n.permission = permission
2631 Session().add(n)
2632 return n
2633
2634 def __unicode__(self):
2635 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2636
2637
2638 class UserGroupToPerm(Base, BaseModel):
2639 __tablename__ = 'users_group_to_perm'
2640 __table_args__ = (
2641 UniqueConstraint('users_group_id', 'permission_id',),
2642 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2643 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2644 )
2645 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2646 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2647 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2648
2649 users_group = relationship('UserGroup')
2650 permission = relationship('Permission')
2651
2652
2653 class UserRepoGroupToPerm(Base, BaseModel):
2654 __tablename__ = 'user_repo_group_to_perm'
2655 __table_args__ = (
2656 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2657 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2658 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2659 )
2660
2661 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2663 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2664 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2665
2666 user = relationship('User')
2667 group = relationship('RepoGroup')
2668 permission = relationship('Permission')
2669
2670 @classmethod
2671 def create(cls, user, repository_group, permission):
2672 n = cls()
2673 n.user = user
2674 n.group = repository_group
2675 n.permission = permission
2676 Session().add(n)
2677 return n
2678
2679
2680 class UserGroupRepoGroupToPerm(Base, BaseModel):
2681 __tablename__ = 'users_group_repo_group_to_perm'
2682 __table_args__ = (
2683 UniqueConstraint('users_group_id', 'group_id'),
2684 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2685 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2686 )
2687
2688 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2689 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2690 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2691 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2692
2693 users_group = relationship('UserGroup')
2694 permission = relationship('Permission')
2695 group = relationship('RepoGroup')
2696
2697 @classmethod
2698 def create(cls, user_group, repository_group, permission):
2699 n = cls()
2700 n.users_group = user_group
2701 n.group = repository_group
2702 n.permission = permission
2703 Session().add(n)
2704 return n
2705
2706 def __unicode__(self):
2707 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2708
2709
2710 class Statistics(Base, BaseModel):
2711 __tablename__ = 'statistics'
2712 __table_args__ = (
2713 UniqueConstraint('repository_id'),
2714 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2715 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2716 )
2717 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2718 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2719 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2720 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2721 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2722 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2723
2724 repository = relationship('Repository', single_parent=True)
2725
2726
2727 class UserFollowing(Base, BaseModel):
2728 __tablename__ = 'user_followings'
2729 __table_args__ = (
2730 UniqueConstraint('user_id', 'follows_repository_id'),
2731 UniqueConstraint('user_id', 'follows_user_id'),
2732 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2733 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2734 )
2735
2736 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2737 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2738 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2739 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2740 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2741
2742 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2743
2744 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2745 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2746
2747 @classmethod
2748 def get_repo_followers(cls, repo_id):
2749 return cls.query().filter(cls.follows_repo_id == repo_id)
2750
2751
2752 class CacheKey(Base, BaseModel):
2753 __tablename__ = 'cache_invalidation'
2754 __table_args__ = (
2755 UniqueConstraint('cache_key'),
2756 Index('key_idx', 'cache_key'),
2757 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2758 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2759 )
2760 CACHE_TYPE_ATOM = 'ATOM'
2761 CACHE_TYPE_RSS = 'RSS'
2762 CACHE_TYPE_README = 'README'
2763
2764 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2765 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2766 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2767 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2768
2769 def __init__(self, cache_key, cache_args=''):
2770 self.cache_key = cache_key
2771 self.cache_args = cache_args
2772 self.cache_active = False
2773
2774 def __unicode__(self):
2775 return u"<%s('%s:%s[%s]')>" % (
2776 self.__class__.__name__,
2777 self.cache_id, self.cache_key, self.cache_active)
2778
2779 def _cache_key_partition(self):
2780 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2781 return prefix, repo_name, suffix
2782
2783 def get_prefix(self):
2784 """
2785 Try to extract prefix from existing cache key. The key could consist
2786 of prefix, repo_name, suffix
2787 """
2788 # this returns prefix, repo_name, suffix
2789 return self._cache_key_partition()[0]
2790
2791 def get_suffix(self):
2792 """
2793 get suffix that might have been used in _get_cache_key to
2794 generate self.cache_key. Only used for informational purposes
2795 in repo_edit.html.
2796 """
2797 # prefix, repo_name, suffix
2798 return self._cache_key_partition()[2]
2799
2800 @classmethod
2801 def delete_all_cache(cls):
2802 """
2803 Delete all cache keys from database.
2804 Should only be run when all instances are down and all entries
2805 thus stale.
2806 """
2807 cls.query().delete()
2808 Session().commit()
2809
2810 @classmethod
2811 def get_cache_key(cls, repo_name, cache_type):
2812 """
2813
2814 Generate a cache key for this process of RhodeCode instance.
2815 Prefix most likely will be process id or maybe explicitly set
2816 instance_id from .ini file.
2817 """
2818 import rhodecode
2819 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2820
2821 repo_as_unicode = safe_unicode(repo_name)
2822 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2823 if cache_type else repo_as_unicode
2824
2825 return u'{}{}'.format(prefix, key)
2826
2827 @classmethod
2828 def set_invalidate(cls, repo_name, delete=False):
2829 """
2830 Mark all caches of a repo as invalid in the database.
2831 """
2832
2833 try:
2834 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2835 if delete:
2836 log.debug('cache objects deleted for repo %s',
2837 safe_str(repo_name))
2838 qry.delete()
2839 else:
2840 log.debug('cache objects marked as invalid for repo %s',
2841 safe_str(repo_name))
2842 qry.update({"cache_active": False})
2843
2844 Session().commit()
2845 except Exception:
2846 log.exception(
2847 'Cache key invalidation failed for repository %s',
2848 safe_str(repo_name))
2849 Session().rollback()
2850
2851 @classmethod
2852 def get_active_cache(cls, cache_key):
2853 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2854 if inv_obj:
2855 return inv_obj
2856 return None
2857
2858 @classmethod
2859 def repo_context_cache(cls, compute_func, repo_name, cache_type):
2860 """
2861 @cache_region('long_term')
2862 def _heavy_calculation(cache_key):
2863 return 'result'
2864
2865 cache_context = CacheKey.repo_context_cache(
2866 _heavy_calculation, repo_name, cache_type)
2867
2868 with cache_context as context:
2869 context.invalidate()
2870 computed = context.compute()
2871
2872 assert computed == 'result'
2873 """
2874 from rhodecode.lib import caches
2875 return caches.InvalidationContext(compute_func, repo_name, cache_type)
2876
2877
2878 class ChangesetComment(Base, BaseModel):
2879 __tablename__ = 'changeset_comments'
2880 __table_args__ = (
2881 Index('cc_revision_idx', 'revision'),
2882 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2883 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2884 )
2885
2886 COMMENT_OUTDATED = u'comment_outdated'
2887
2888 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2889 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2890 revision = Column('revision', String(40), nullable=True)
2891 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2892 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2893 line_no = Column('line_no', Unicode(10), nullable=True)
2894 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2895 f_path = Column('f_path', Unicode(1000), nullable=True)
2896 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2897 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2898 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2899 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2900 renderer = Column('renderer', Unicode(64), nullable=True)
2901 display_state = Column('display_state', Unicode(128), nullable=True)
2902
2903 author = relationship('User', lazy='joined')
2904 repo = relationship('Repository')
2905 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2906 pull_request = relationship('PullRequest', lazy='joined')
2907 pull_request_version = relationship('PullRequestVersion')
2908
2909 @classmethod
2910 def get_users(cls, revision=None, pull_request_id=None):
2911 """
2912 Returns user associated with this ChangesetComment. ie those
2913 who actually commented
2914
2915 :param cls:
2916 :param revision:
2917 """
2918 q = Session().query(User)\
2919 .join(ChangesetComment.author)
2920 if revision:
2921 q = q.filter(cls.revision == revision)
2922 elif pull_request_id:
2923 q = q.filter(cls.pull_request_id == pull_request_id)
2924 return q.all()
2925
2926 def render(self, mentions=False):
2927 from rhodecode.lib import helpers as h
2928 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2929
2930 def __repr__(self):
2931 if self.comment_id:
2932 return '<DB:ChangesetComment #%s>' % self.comment_id
2933 else:
2934 return '<DB:ChangesetComment at %#x>' % id(self)
2935
2936
2937 class ChangesetStatus(Base, BaseModel):
2938 __tablename__ = 'changeset_statuses'
2939 __table_args__ = (
2940 Index('cs_revision_idx', 'revision'),
2941 Index('cs_version_idx', 'version'),
2942 UniqueConstraint('repo_id', 'revision', 'version'),
2943 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2944 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2945 )
2946 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2947 STATUS_APPROVED = 'approved'
2948 STATUS_REJECTED = 'rejected'
2949 STATUS_UNDER_REVIEW = 'under_review'
2950
2951 STATUSES = [
2952 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2953 (STATUS_APPROVED, _("Approved")),
2954 (STATUS_REJECTED, _("Rejected")),
2955 (STATUS_UNDER_REVIEW, _("Under Review")),
2956 ]
2957
2958 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2959 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2961 revision = Column('revision', String(40), nullable=False)
2962 status = Column('status', String(128), nullable=False, default=DEFAULT)
2963 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2964 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2965 version = Column('version', Integer(), nullable=False, default=0)
2966 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2967
2968 author = relationship('User', lazy='joined')
2969 repo = relationship('Repository')
2970 comment = relationship('ChangesetComment', lazy='joined')
2971 pull_request = relationship('PullRequest', lazy='joined')
2972
2973 def __unicode__(self):
2974 return u"<%s('%s[%s]:%s')>" % (
2975 self.__class__.__name__,
2976 self.status, self.version, self.author
2977 )
2978
2979 @classmethod
2980 def get_status_lbl(cls, value):
2981 return dict(cls.STATUSES).get(value)
2982
2983 @property
2984 def status_lbl(self):
2985 return ChangesetStatus.get_status_lbl(self.status)
2986
2987
2988 class _PullRequestBase(BaseModel):
2989 """
2990 Common attributes of pull request and version entries.
2991 """
2992
2993 # .status values
2994 STATUS_NEW = u'new'
2995 STATUS_OPEN = u'open'
2996 STATUS_CLOSED = u'closed'
2997
2998 title = Column('title', Unicode(255), nullable=True)
2999 description = Column(
3000 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3001 nullable=True)
3002 # new/open/closed status of pull request (not approve/reject/etc)
3003 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3004 created_on = Column(
3005 'created_on', DateTime(timezone=False), nullable=False,
3006 default=datetime.datetime.now)
3007 updated_on = Column(
3008 'updated_on', DateTime(timezone=False), nullable=False,
3009 default=datetime.datetime.now)
3010
3011 @declared_attr
3012 def user_id(cls):
3013 return Column(
3014 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3015 unique=None)
3016
3017 # 500 revisions max
3018 _revisions = Column(
3019 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3020
3021 @declared_attr
3022 def source_repo_id(cls):
3023 # TODO: dan: rename column to source_repo_id
3024 return Column(
3025 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3026 nullable=False)
3027
3028 source_ref = Column('org_ref', Unicode(255), nullable=False)
3029
3030 @declared_attr
3031 def target_repo_id(cls):
3032 # TODO: dan: rename column to target_repo_id
3033 return Column(
3034 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3035 nullable=False)
3036
3037 target_ref = Column('other_ref', Unicode(255), nullable=False)
3038
3039 # TODO: dan: rename column to last_merge_source_rev
3040 _last_merge_source_rev = Column(
3041 'last_merge_org_rev', String(40), nullable=True)
3042 # TODO: dan: rename column to last_merge_target_rev
3043 _last_merge_target_rev = Column(
3044 'last_merge_other_rev', String(40), nullable=True)
3045 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3046 merge_rev = Column('merge_rev', String(40), nullable=True)
3047
3048 @hybrid_property
3049 def revisions(self):
3050 return self._revisions.split(':') if self._revisions else []
3051
3052 @revisions.setter
3053 def revisions(self, val):
3054 self._revisions = ':'.join(val)
3055
3056 @declared_attr
3057 def author(cls):
3058 return relationship('User', lazy='joined')
3059
3060 @declared_attr
3061 def source_repo(cls):
3062 return relationship(
3063 'Repository',
3064 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3065
3066 @property
3067 def source_ref_parts(self):
3068 refs = self.source_ref.split(':')
3069 return Reference(refs[0], refs[1], refs[2])
3070
3071 @declared_attr
3072 def target_repo(cls):
3073 return relationship(
3074 'Repository',
3075 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3076
3077 @property
3078 def target_ref_parts(self):
3079 refs = self.target_ref.split(':')
3080 return Reference(refs[0], refs[1], refs[2])
3081
3082
3083 class PullRequest(Base, _PullRequestBase):
3084 __tablename__ = 'pull_requests'
3085 __table_args__ = (
3086 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3087 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3088 )
3089
3090 pull_request_id = Column(
3091 'pull_request_id', Integer(), nullable=False, primary_key=True)
3092
3093 def __repr__(self):
3094 if self.pull_request_id:
3095 return '<DB:PullRequest #%s>' % self.pull_request_id
3096 else:
3097 return '<DB:PullRequest at %#x>' % id(self)
3098
3099 reviewers = relationship('PullRequestReviewers',
3100 cascade="all, delete, delete-orphan")
3101 statuses = relationship('ChangesetStatus')
3102 comments = relationship('ChangesetComment',
3103 cascade="all, delete, delete-orphan")
3104 versions = relationship('PullRequestVersion',
3105 cascade="all, delete, delete-orphan")
3106
3107 def is_closed(self):
3108 return self.status == self.STATUS_CLOSED
3109
3110 def get_api_data(self):
3111 from rhodecode.model.pull_request import PullRequestModel
3112 pull_request = self
3113 merge_status = PullRequestModel().merge_status(pull_request)
3114 data = {
3115 'pull_request_id': pull_request.pull_request_id,
3116 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name,
3117 pull_request_id=self.pull_request_id,
3118 qualified=True),
3119 'title': pull_request.title,
3120 'description': pull_request.description,
3121 'status': pull_request.status,
3122 'created_on': pull_request.created_on,
3123 'updated_on': pull_request.updated_on,
3124 'commit_ids': pull_request.revisions,
3125 'review_status': pull_request.calculated_review_status(),
3126 'mergeable': {
3127 'status': merge_status[0],
3128 'message': unicode(merge_status[1]),
3129 },
3130 'source': {
3131 'clone_url': pull_request.source_repo.clone_url(),
3132 'repository': pull_request.source_repo.repo_name,
3133 'reference': {
3134 'name': pull_request.source_ref_parts.name,
3135 'type': pull_request.source_ref_parts.type,
3136 'commit_id': pull_request.source_ref_parts.commit_id,
3137 },
3138 },
3139 'target': {
3140 'clone_url': pull_request.target_repo.clone_url(),
3141 'repository': pull_request.target_repo.repo_name,
3142 'reference': {
3143 'name': pull_request.target_ref_parts.name,
3144 'type': pull_request.target_ref_parts.type,
3145 'commit_id': pull_request.target_ref_parts.commit_id,
3146 },
3147 },
3148 'author': pull_request.author.get_api_data(include_secrets=False,
3149 details='basic'),
3150 'reviewers': [
3151 {
3152 'user': reviewer.get_api_data(include_secrets=False,
3153 details='basic'),
3154 'review_status': st[0][1].status if st else 'not_reviewed',
3155 }
3156 for reviewer, st in pull_request.reviewers_statuses()
3157 ]
3158 }
3159
3160 return data
3161
3162 def __json__(self):
3163 return {
3164 'revisions': self.revisions,
3165 }
3166
3167 def calculated_review_status(self):
3168 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3169 # because it's tricky on how to use ChangesetStatusModel from there
3170 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3171 from rhodecode.model.changeset_status import ChangesetStatusModel
3172 return ChangesetStatusModel().calculated_review_status(self)
3173
3174 def reviewers_statuses(self):
3175 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3176 from rhodecode.model.changeset_status import ChangesetStatusModel
3177 return ChangesetStatusModel().reviewers_statuses(self)
3178
3179
3180 class PullRequestVersion(Base, _PullRequestBase):
3181 __tablename__ = 'pull_request_versions'
3182 __table_args__ = (
3183 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3184 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3185 )
3186
3187 pull_request_version_id = Column(
3188 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3189 pull_request_id = Column(
3190 'pull_request_id', Integer(),
3191 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3192 pull_request = relationship('PullRequest')
3193
3194 def __repr__(self):
3195 if self.pull_request_version_id:
3196 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3197 else:
3198 return '<DB:PullRequestVersion at %#x>' % id(self)
3199
3200
3201 class PullRequestReviewers(Base, BaseModel):
3202 __tablename__ = 'pull_request_reviewers'
3203 __table_args__ = (
3204 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3205 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3206 )
3207
3208 def __init__(self, user=None, pull_request=None):
3209 self.user = user
3210 self.pull_request = pull_request
3211
3212 pull_requests_reviewers_id = Column(
3213 'pull_requests_reviewers_id', Integer(), nullable=False,
3214 primary_key=True)
3215 pull_request_id = Column(
3216 "pull_request_id", Integer(),
3217 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3218 user_id = Column(
3219 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3220
3221 user = relationship('User')
3222 pull_request = relationship('PullRequest')
3223
3224
3225 class Notification(Base, BaseModel):
3226 __tablename__ = 'notifications'
3227 __table_args__ = (
3228 Index('notification_type_idx', 'type'),
3229 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3230 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3231 )
3232
3233 TYPE_CHANGESET_COMMENT = u'cs_comment'
3234 TYPE_MESSAGE = u'message'
3235 TYPE_MENTION = u'mention'
3236 TYPE_REGISTRATION = u'registration'
3237 TYPE_PULL_REQUEST = u'pull_request'
3238 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3239
3240 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3241 subject = Column('subject', Unicode(512), nullable=True)
3242 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3243 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3244 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3245 type_ = Column('type', Unicode(255))
3246
3247 created_by_user = relationship('User')
3248 notifications_to_users = relationship('UserNotification', lazy='joined',
3249 cascade="all, delete, delete-orphan")
3250
3251 @property
3252 def recipients(self):
3253 return [x.user for x in UserNotification.query()\
3254 .filter(UserNotification.notification == self)\
3255 .order_by(UserNotification.user_id.asc()).all()]
3256
3257 @classmethod
3258 def create(cls, created_by, subject, body, recipients, type_=None):
3259 if type_ is None:
3260 type_ = Notification.TYPE_MESSAGE
3261
3262 notification = cls()
3263 notification.created_by_user = created_by
3264 notification.subject = subject
3265 notification.body = body
3266 notification.type_ = type_
3267 notification.created_on = datetime.datetime.now()
3268
3269 for u in recipients:
3270 assoc = UserNotification()
3271 assoc.notification = notification
3272
3273 # if created_by is inside recipients mark his notification
3274 # as read
3275 if u.user_id == created_by.user_id:
3276 assoc.read = True
3277
3278 u.notifications.append(assoc)
3279 Session().add(notification)
3280
3281 return notification
3282
3283 @property
3284 def description(self):
3285 from rhodecode.model.notification import NotificationModel
3286 return NotificationModel().make_description(self)
3287
3288
3289 class UserNotification(Base, BaseModel):
3290 __tablename__ = 'user_to_notification'
3291 __table_args__ = (
3292 UniqueConstraint('user_id', 'notification_id'),
3293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3295 )
3296 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3297 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3298 read = Column('read', Boolean, default=False)
3299 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3300
3301 user = relationship('User', lazy="joined")
3302 notification = relationship('Notification', lazy="joined",
3303 order_by=lambda: Notification.created_on.desc(),)
3304
3305 def mark_as_read(self):
3306 self.read = True
3307 Session().add(self)
3308
3309
3310 class Gist(Base, BaseModel):
3311 __tablename__ = 'gists'
3312 __table_args__ = (
3313 Index('g_gist_access_id_idx', 'gist_access_id'),
3314 Index('g_created_on_idx', 'created_on'),
3315 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3316 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3317 )
3318 GIST_PUBLIC = u'public'
3319 GIST_PRIVATE = u'private'
3320 DEFAULT_FILENAME = u'gistfile1.txt'
3321
3322 ACL_LEVEL_PUBLIC = u'acl_public'
3323 ACL_LEVEL_PRIVATE = u'acl_private'
3324
3325 gist_id = Column('gist_id', Integer(), primary_key=True)
3326 gist_access_id = Column('gist_access_id', Unicode(250))
3327 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3328 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3329 gist_expires = Column('gist_expires', Float(53), nullable=False)
3330 gist_type = Column('gist_type', Unicode(128), nullable=False)
3331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3332 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3333 acl_level = Column('acl_level', Unicode(128), nullable=True)
3334
3335 owner = relationship('User')
3336
3337 def __repr__(self):
3338 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3339
3340 @classmethod
3341 def get_or_404(cls, id_):
3342 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3343 if not res:
3344 raise HTTPNotFound
3345 return res
3346
3347 @classmethod
3348 def get_by_access_id(cls, gist_access_id):
3349 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3350
3351 def gist_url(self):
3352 import rhodecode
3353 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3354 if alias_url:
3355 return alias_url.replace('{gistid}', self.gist_access_id)
3356
3357 return url('gist', gist_id=self.gist_access_id, qualified=True)
3358
3359 @classmethod
3360 def base_path(cls):
3361 """
3362 Returns base path when all gists are stored
3363
3364 :param cls:
3365 """
3366 from rhodecode.model.gist import GIST_STORE_LOC
3367 q = Session().query(RhodeCodeUi)\
3368 .filter(RhodeCodeUi.ui_key == URL_SEP)
3369 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3370 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3371
3372 def get_api_data(self):
3373 """
3374 Common function for generating gist related data for API
3375 """
3376 gist = self
3377 data = {
3378 'gist_id': gist.gist_id,
3379 'type': gist.gist_type,
3380 'access_id': gist.gist_access_id,
3381 'description': gist.gist_description,
3382 'url': gist.gist_url(),
3383 'expires': gist.gist_expires,
3384 'created_on': gist.created_on,
3385 'modified_at': gist.modified_at,
3386 'content': None,
3387 'acl_level': gist.acl_level,
3388 }
3389 return data
3390
3391 def __json__(self):
3392 data = dict(
3393 )
3394 data.update(self.get_api_data())
3395 return data
3396 # SCM functions
3397
3398 def scm_instance(self, **kwargs):
3399 from rhodecode.lib.vcs import get_repo
3400 base_path = self.base_path()
3401 return get_repo(os.path.join(*map(safe_str,
3402 [base_path, self.gist_access_id])))
3403
3404
3405 class DbMigrateVersion(Base, BaseModel):
3406 __tablename__ = 'db_migrate_version'
3407 __table_args__ = (
3408 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3409 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3410 )
3411 repository_id = Column('repository_id', String(250), primary_key=True)
3412 repository_path = Column('repository_path', Text)
3413 version = Column('version', Integer)
3414
3415
3416 class ExternalIdentity(Base, BaseModel):
3417 __tablename__ = 'external_identities'
3418 __table_args__ = (
3419 Index('local_user_id_idx', 'local_user_id'),
3420 Index('external_id_idx', 'external_id'),
3421 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3422 'mysql_charset': 'utf8'})
3423
3424 external_id = Column('external_id', Unicode(255), default=u'',
3425 primary_key=True)
3426 external_username = Column('external_username', Unicode(1024), default=u'')
3427 local_user_id = Column('local_user_id', Integer(),
3428 ForeignKey('users.user_id'), primary_key=True)
3429 provider_name = Column('provider_name', Unicode(255), default=u'',
3430 primary_key=True)
3431 access_token = Column('access_token', String(1024), default=u'')
3432 alt_token = Column('alt_token', String(1024), default=u'')
3433 token_secret = Column('token_secret', String(1024), default=u'')
3434
3435 @classmethod
3436 def by_external_id_and_provider(cls, external_id, provider_name,
3437 local_user_id=None):
3438 """
3439 Returns ExternalIdentity instance based on search params
3440
3441 :param external_id:
3442 :param provider_name:
3443 :return: ExternalIdentity
3444 """
3445 query = cls.query()
3446 query = query.filter(cls.external_id == external_id)
3447 query = query.filter(cls.provider_name == provider_name)
3448 if local_user_id:
3449 query = query.filter(cls.local_user_id == local_user_id)
3450 return query.first()
3451
3452 @classmethod
3453 def user_by_external_id_and_provider(cls, external_id, provider_name):
3454 """
3455 Returns User instance based on search params
3456
3457 :param external_id:
3458 :param provider_name:
3459 :return: User
3460 """
3461 query = User.query()
3462 query = query.filter(cls.external_id == external_id)
3463 query = query.filter(cls.provider_name == provider_name)
3464 query = query.filter(User.user_id == cls.local_user_id)
3465 return query.first()
3466
3467 @classmethod
3468 def by_local_user_id(cls, local_user_id):
3469 """
3470 Returns all tokens for user
3471
3472 :param local_user_id:
3473 :return: ExternalIdentity
3474 """
3475 query = cls.query()
3476 query = query.filter(cls.local_user_id == local_user_id)
3477 return query
3478
3479
3480 class Integration(Base, BaseModel):
3481 __tablename__ = 'integrations'
3482 __table_args__ = (
3483 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3484 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3485 )
3486
3487 integration_id = Column('integration_id', Integer(), primary_key=True)
3488 integration_type = Column('integration_type', String(255))
3489 enabled = Column("enabled", Boolean(), nullable=False)
3490 name = Column('name', String(255), nullable=False)
3491 settings_json = Column('settings_json',
3492 UnicodeText().with_variant(UnicodeText(16384), 'mysql'))
3493 repo_id = Column(
3494 "repo_id", Integer(), ForeignKey('repositories.repo_id'),
3495 nullable=True, unique=None, default=None)
3496 repo = relationship('Repository', lazy='joined')
3497
3498 @hybrid_property
3499 def settings(self):
3500 data = json.loads(self.settings_json or '{}')
3501 return data
3502
3503 @settings.setter
3504 def settings(self, dct):
3505 self.settings_json = json.dumps(dct, indent=2)
3506
3507 def __repr__(self):
3508 if self.repo:
3509 scope = 'repo=%r' % self.repo
3510 else:
3511 scope = 'global'
3512
3513 return '<Integration(%r, %r)>' % (self.integration_type, scope)
3514
3515 def settings_as_dict(self):
3516 return json.loads(self.settings_json)
@@ -0,0 +1,27 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 import sqlalchemy as sa
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8
9 from rhodecode.lib.dbmigrate.versions import _reset_base
10
11 log = logging.getLogger(__name__)
12
13
14 def upgrade(migrate_engine):
15 """
16 Upgrade operations go here.
17 Don't create your own engine; bind migrate_engine to your metadata
18 """
19 _reset_base(migrate_engine)
20 from rhodecode.lib.dbmigrate.schema import db_4_3_0_0
21
22 integrations_table = db_4_3_0_0.Integration.__table__
23 integrations_table.create()
24
25
26 def downgrade(migrate_engine):
27 pass
@@ -0,0 +1,118 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2011-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21
22 """
23 Model for integrations
24 """
25
26
27 import logging
28 import traceback
29
30 from pylons import tmpl_context as c
31 from pylons.i18n.translation import _, ungettext
32 from sqlalchemy import or_
33 from sqlalchemy.sql.expression import false, true
34 from mako import exceptions
35
36 import rhodecode
37 from rhodecode import events
38 from rhodecode.lib import helpers as h
39 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.utils import PartialRenderer
41 from rhodecode.model import BaseModel
42 from rhodecode.model.db import Integration, User
43 from rhodecode.model.meta import Session
44 from rhodecode.integrations import integration_type_registry
45
46 log = logging.getLogger(__name__)
47
48
49 class IntegrationModel(BaseModel):
50
51 cls = Integration
52
53 def __get_integration(self, integration):
54 if isinstance(integration, Integration):
55 return integration
56 elif isinstance(integration, (int, long)):
57 return self.sa.query(Integration).get(integration)
58 else:
59 if integration:
60 raise Exception('integration must be int, long or Instance'
61 ' of Integration got %s' % type(integration))
62
63 def delete(self, integration):
64 try:
65 integration = self.__get_integration(integration)
66 if integration:
67 Session().delete(integration)
68 return True
69 except Exception:
70 log.error(traceback.format_exc())
71 raise
72 return False
73
74 def get_integration_handler(self, integration):
75 TypeClass = integration_type_registry.get(integration.integration_type)
76 if not TypeClass:
77 log.error('No class could be found for integration type: {}'.format(
78 integration.integration_type))
79 return None
80
81 return TypeClass(integration.settings)
82
83 def send_event(self, integration, event):
84 """ Send an event to an integration """
85 handler = self.get_integration_handler(integration)
86 if handler:
87 handler.send_event(event)
88
89 def get_integrations(self, repo=None):
90 if repo:
91 return self.sa.query(Integration).filter(
92 Integration.repo_id==repo.repo_id).all()
93
94 # global integrations
95 return self.sa.query(Integration).filter(
96 Integration.repo_id==None).all()
97
98 def get_for_event(self, event, cache=False):
99 """
100 Get integrations that match an event
101 """
102 query = self.sa.query(Integration).filter(Integration.enabled==True)
103
104 if isinstance(event, events.RepoEvent): # global + repo integrations
105 query = query.filter(
106 or_(Integration.repo_id==None,
107 Integration.repo_id==event.repo.repo_id))
108 if cache:
109 query = query.options(FromCache(
110 "sql_cache_short",
111 "get_enabled_repo_integrations_%i" % event.repo.repo_id))
112 else: # only global integrations
113 query = query.filter(Integration.repo_id==None)
114 if cache:
115 query = query.options(FromCache(
116 "sql_cache_short", "get_enabled_global_integrations"))
117
118 return query.all()
@@ -0,0 +1,40 b''
1 ## -*- coding: utf-8 -*-
2 <%!
3 def inherit(context):
4 if context['c'].repo:
5 return "/admin/repos/repo_edit.html"
6 else:
7 return "/admin/settings/settings.html"
8 %>
9 <%inherit file="${inherit(context)}" />
10
11 <%def name="title()">
12 ${_('Integrations settings')}
13 %if c.rhodecode_name:
14 &middot; ${h.branding(c.rhodecode_name)}
15 %endif
16 </%def>
17
18 <%def name="breadcrumbs_links()">
19 ${h.link_to(_('Admin'),h.url('admin_home'))}
20 &raquo;
21 ${_('Integrations')}
22 </%def>
23
24 <%def name="menu_bar_nav()">
25 %if c.repo:
26 ${self.menu_items(active='repositories')}
27 %else:
28 ${self.menu_items(active='admin')}
29 %endif
30 </%def>
31
32 <%def name="menu_bar_subnav()">
33 %if c.repo:
34 ${self.repo_menu(active='options')}
35 %endif
36 </%def>
37
38 <%def name="main_content()">
39 ${next.body()}
40 </%def>
@@ -0,0 +1,108 b''
1 ## -*- coding: utf-8 -*-
2 <%inherit file="base.html"/>
3
4 <%def name="breadcrumbs_links()">
5 %if c.repo:
6 ${h.link_to('Settings',h.url('edit_repo', repo_name=c.repo.repo_name))}
7 &raquo;
8 ${h.link_to(_('Integrations'),request.route_url(route_name='repo_integrations_home', repo_name=c.repo.repo_name))}
9 &raquo;
10 ${h.link_to(current_IntegrationType.display_name,
11 request.route_url(route_name='repo_integrations_list',
12 repo_name=c.repo.repo_name,
13 integration=current_IntegrationType.key))}
14 %else:
15 ${h.link_to(_('Admin'),h.url('admin_home'))}
16 &raquo;
17 ${h.link_to(_('Settings'),h.url('admin_settings'))}
18 &raquo;
19 ${h.link_to(_('Integrations'),request.route_url(route_name='global_integrations_home'))}
20 &raquo;
21 ${h.link_to(current_IntegrationType.display_name,
22 request.route_url(route_name='global_integrations_list',
23 integration=current_IntegrationType.key))}
24 %endif
25 %if integration:
26 &raquo;
27 ${integration.name}
28 %endif
29 </%def>
30
31
32 <div class="panel panel-default">
33 <div class="panel-heading">
34 <h2 class="panel-title">
35 %if integration:
36 ${current_IntegrationType.display_name} - ${integration.name}
37 %else:
38 ${_('Create new %(integration_type)s integration') % {'integration_type': current_IntegrationType.display_name}}
39 %endif
40 </h2>
41 </div>
42 <div class="fields panel-body">
43 ${h.secure_form(request.url)}
44 <div class="form">
45 %for node in schema:
46 <% label_css_class = ("label-checkbox" if (node.widget == "bool") else "") %>
47 <div class="field">
48 <div class="label ${label_css_class}"><label for="${node.name}">${node.title}</label></div>
49 <div class="input">
50 %if node.widget in ["string", "int", "unicode"]:
51 ${h.text(node.name, defaults.get(node.name), class_="medium", placeholder=hasattr(node, 'placeholder') and node.placeholder or '')}
52 %elif node.widget in ["text"]:
53 ${h.textarea(node.name, defaults.get(node.name), class_="medium", placeholder=hasattr(node, 'placeholder') and node.placeholder or '')}
54 %elif node.widget == "password":
55 ${h.password(node.name, defaults.get(node.name), class_="medium")}
56 %elif node.widget == "bool":
57 <div class="checkbox">${h.checkbox(node.name, True, checked=defaults.get(node.name))}</div>
58 %elif node.widget == "select":
59 ${h.select(node.name, defaults.get(node.name), node.choices)}
60 %elif node.widget == "checkbox_list":
61 %for i, choice in enumerate(node.choices):
62 <%
63 name, value = choice, choice
64 if isinstance(choice, tuple):
65 choice, name = choice
66 %>
67 <div>
68 <input id="${node.name}-${choice}"
69 name="${node.name}"
70 value="${value}"
71 type="checkbox"
72 ${value in defaults.get(node.name, []) and 'checked' or ''}>
73 <label for="${node.name}-${value}">
74 ${name}
75 </label>
76 </div>
77 %endfor
78 %elif node.widget == "readonly":
79 ${node.default}
80 %else:
81 This field is of type ${node.typ}, which cannot be displayed. Must be one of [string|int|bool|select|password|text|checkbox_list].
82 %endif
83 %if node.name in errors:
84 <span class="error-message">${errors.get(node.name)}</span>
85 <br />
86 %endif
87 <p class="help-block">${node.description}</p>
88 </div>
89 </div>
90 %endfor
91
92 ## Allow derived templates to add something below the form
93 ## input fields
94 %if hasattr(next, 'below_form_fields'):
95 ${next.below_form_fields()}
96 %endif
97
98 <div class="buttons">
99 ${h.submit('save',_('Save'),class_="btn")}
100 %if integration:
101 ${h.submit('delete',_('Delete'),class_="btn btn-danger")}
102 %endif
103 </div>
104
105 </div>
106 ${h.end_form()}
107 </div>
108 </div> No newline at end of file
@@ -0,0 +1,147 b''
1 ## -*- coding: utf-8 -*-
2 <%inherit file="base.html"/>
3
4 <%def name="breadcrumbs_links()">
5 %if c.repo:
6 ${h.link_to('Settings',h.url('edit_repo', repo_name=c.repo.repo_name))}
7 %else:
8 ${h.link_to(_('Admin'),h.url('admin_home'))}
9 &raquo;
10 ${h.link_to(_('Settings'),h.url('admin_settings'))}
11 %endif
12 %if current_IntegrationType:
13 &raquo;
14 %if c.repo:
15 ${h.link_to(_('Integrations'),
16 request.route_url(route_name='repo_integrations_home',
17 repo_name=c.repo.repo_name))}
18 %else:
19 ${h.link_to(_('Integrations'),
20 request.route_url(route_name='global_integrations_home'))}
21 %endif
22 &raquo;
23 ${current_IntegrationType.display_name}
24 %else:
25 &raquo;
26 ${_('Integrations')}
27 %endif
28 </%def>
29 <div class="panel panel-default">
30 <div class="panel-heading">
31 <h3 class="panel-title">${_('Create new integration')}</h3>
32 </div>
33 <div class="panel-body">
34 %if not available_integrations:
35 No integrations available
36 %else:
37 %for integration in available_integrations:
38 <%
39 if c.repo:
40 create_url = request.route_url('repo_integrations_create',
41 repo_name=c.repo.repo_name,
42 integration=integration)
43 else:
44 create_url = request.route_url('global_integrations_create',
45 integration=integration)
46 %>
47 <a href="${create_url}" class="btn">
48 ${integration}
49 </a>
50 %endfor
51 %endif
52 </div>
53 </div>
54 <div class="panel panel-default">
55 <div class="panel-heading">
56 <h3 class="panel-title">${_('Current integrations')}</h3>
57 </div>
58 <div class="panel-body">
59 <table class="rctable issuetracker">
60 <thead>
61 <tr>
62 <th>${_('Enabled')}</th>
63 <th>${_('Description')}</th>
64 <th>${_('Type')}</th>
65 <th>${_('Actions')}</th>
66 <th ></th>
67 </tr>
68 </thead>
69 <tbody>
70
71 %for integration_type, integrations in sorted(current_integrations.items()):
72 %for integration in sorted(integrations, key=lambda x: x.name):
73 <tr id="integration_${integration.integration_id}">
74 <td class="td-enabled">
75 %if integration.enabled:
76 <div class="flag_status approved pull-left"></div>
77 %else:
78 <div class="flag_status rejected pull-left"></div>
79 %endif
80 </td>
81 <td class="td-description">
82 ${integration.name}
83 </td>
84 <td class="td-regex">
85 ${integration.integration_type}
86 </td>
87 <td class="td-action">
88 %if integration_type not in available_integrations:
89 ${_('unknown integration')}
90 %else:
91 <%
92 if c.repo:
93 edit_url = request.route_url('repo_integrations_edit',
94 repo_name=c.repo.repo_name,
95 integration=integration.integration_type,
96 integration_id=integration.integration_id)
97 else:
98 edit_url = request.route_url('global_integrations_edit',
99 integration=integration.integration_type,
100 integration_id=integration.integration_id)
101 %>
102 <div class="grid_edit">
103 <a href="${edit_url}">${_('Edit')}</a>
104 </div>
105 <div class="grid_delete">
106 <a href="${edit_url}"
107 class="btn btn-link btn-danger delete_integration_entry"
108 data-desc="${integration.name}"
109 data-uid="${integration.integration_id}">
110 ${_('Delete')}
111 </a>
112 </div>
113 %endif
114 </td>
115 </tr>
116 %endfor
117 %endfor
118 <tr id="last-row"></tr>
119 </tbody>
120 </table>
121 </div>
122 </div>
123 <script type="text/javascript">
124 var delete_integration = function(entry) {
125 if (confirm("Confirm to remove this integration: "+$(entry).data('desc'))) {
126 var request = $.ajax({
127 type: "POST",
128 url: $(entry).attr('href'),
129 data: {
130 'delete': 'delete',
131 'csrf_token': CSRF_TOKEN
132 },
133 success: function(){
134 location.reload();
135 },
136 error: function(data, textStatus, errorThrown){
137 alert("Error while deleting entry.\nError code {0} ({1}). URL: {2}".format(data.status,data.statusText,$(entry)[0].url));
138 }
139 });
140 };
141 }
142
143 $('.delete_integration_entry').on('click', function(e){
144 e.preventDefault();
145 delete_integration(this);
146 });
147 </script> No newline at end of file
@@ -1,1654 +1,1641 b''
1 1 {
2 2 Babel = super.buildPythonPackage {
3 3 name = "Babel-1.3";
4 4 buildInputs = with self; [];
5 5 doCheck = false;
6 6 propagatedBuildInputs = with self; [pytz];
7 7 src = fetchurl {
8 8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 10 };
11 11 meta = {
12 12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 13 };
14 14 };
15 15 Beaker = super.buildPythonPackage {
16 16 name = "Beaker-1.7.0";
17 17 buildInputs = with self; [];
18 18 doCheck = false;
19 19 propagatedBuildInputs = with self; [];
20 20 src = fetchurl {
21 21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 22 md5 = "386be3f7fe427358881eee4622b428b3";
23 23 };
24 24 meta = {
25 25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 26 };
27 27 };
28 28 CProfileV = super.buildPythonPackage {
29 29 name = "CProfileV-1.0.6";
30 30 buildInputs = with self; [];
31 31 doCheck = false;
32 32 propagatedBuildInputs = with self; [bottle];
33 33 src = fetchurl {
34 34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 36 };
37 37 meta = {
38 38 license = [ pkgs.lib.licenses.mit ];
39 39 };
40 40 };
41 41 Fabric = super.buildPythonPackage {
42 42 name = "Fabric-1.10.0";
43 43 buildInputs = with self; [];
44 44 doCheck = false;
45 45 propagatedBuildInputs = with self; [paramiko];
46 46 src = fetchurl {
47 47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 FormEncode = super.buildPythonPackage {
55 55 name = "FormEncode-1.2.4";
56 56 buildInputs = with self; [];
57 57 doCheck = false;
58 58 propagatedBuildInputs = with self; [];
59 59 src = fetchurl {
60 60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 62 };
63 63 meta = {
64 64 license = [ pkgs.lib.licenses.psfl ];
65 65 };
66 66 };
67 67 Jinja2 = super.buildPythonPackage {
68 68 name = "Jinja2-2.7.3";
69 69 buildInputs = with self; [];
70 70 doCheck = false;
71 71 propagatedBuildInputs = with self; [MarkupSafe];
72 72 src = fetchurl {
73 73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 75 };
76 76 meta = {
77 77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 78 };
79 79 };
80 80 Mako = super.buildPythonPackage {
81 81 name = "Mako-1.0.1";
82 82 buildInputs = with self; [];
83 83 doCheck = false;
84 84 propagatedBuildInputs = with self; [MarkupSafe];
85 85 src = fetchurl {
86 86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 88 };
89 89 meta = {
90 90 license = [ pkgs.lib.licenses.mit ];
91 91 };
92 92 };
93 93 Markdown = super.buildPythonPackage {
94 94 name = "Markdown-2.6.2";
95 95 buildInputs = with self; [];
96 96 doCheck = false;
97 97 propagatedBuildInputs = with self; [];
98 98 src = fetchurl {
99 99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 101 };
102 102 meta = {
103 103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 104 };
105 105 };
106 106 MarkupSafe = super.buildPythonPackage {
107 107 name = "MarkupSafe-0.23";
108 108 buildInputs = with self; [];
109 109 doCheck = false;
110 110 propagatedBuildInputs = with self; [];
111 111 src = fetchurl {
112 112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 114 };
115 115 meta = {
116 116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 117 };
118 118 };
119 119 MySQL-python = super.buildPythonPackage {
120 120 name = "MySQL-python-1.2.5";
121 121 buildInputs = with self; [];
122 122 doCheck = false;
123 123 propagatedBuildInputs = with self; [];
124 124 src = fetchurl {
125 125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 127 };
128 128 meta = {
129 129 license = [ pkgs.lib.licenses.gpl1 ];
130 130 };
131 131 };
132 132 Paste = super.buildPythonPackage {
133 133 name = "Paste-2.0.2";
134 134 buildInputs = with self; [];
135 135 doCheck = false;
136 136 propagatedBuildInputs = with self; [six];
137 137 src = fetchurl {
138 138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.mit ];
143 143 };
144 144 };
145 145 PasteDeploy = super.buildPythonPackage {
146 146 name = "PasteDeploy-1.5.2";
147 147 buildInputs = with self; [];
148 148 doCheck = false;
149 149 propagatedBuildInputs = with self; [];
150 150 src = fetchurl {
151 151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 152 md5 = "352b7205c78c8de4987578d19431af3b";
153 153 };
154 154 meta = {
155 155 license = [ pkgs.lib.licenses.mit ];
156 156 };
157 157 };
158 158 PasteScript = super.buildPythonPackage {
159 159 name = "PasteScript-1.7.5";
160 160 buildInputs = with self; [];
161 161 doCheck = false;
162 162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 163 src = fetchurl {
164 164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 166 };
167 167 meta = {
168 168 license = [ pkgs.lib.licenses.mit ];
169 169 };
170 170 };
171 171 Pygments = super.buildPythonPackage {
172 172 name = "Pygments-2.1.3";
173 173 buildInputs = with self; [];
174 174 doCheck = false;
175 175 propagatedBuildInputs = with self; [];
176 176 src = fetchurl {
177 177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 Pylons = super.buildPythonPackage {
185 185 name = "Pylons-1.0.1";
186 186 buildInputs = with self; [];
187 187 doCheck = false;
188 188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 189 src = fetchurl {
190 190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 192 };
193 193 meta = {
194 194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 195 };
196 196 };
197 197 Pyro4 = super.buildPythonPackage {
198 198 name = "Pyro4-4.41";
199 199 buildInputs = with self; [];
200 200 doCheck = false;
201 201 propagatedBuildInputs = with self; [serpent];
202 202 src = fetchurl {
203 203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 205 };
206 206 meta = {
207 207 license = [ pkgs.lib.licenses.mit ];
208 208 };
209 209 };
210 210 Routes = super.buildPythonPackage {
211 211 name = "Routes-1.13";
212 212 buildInputs = with self; [];
213 213 doCheck = false;
214 214 propagatedBuildInputs = with self; [repoze.lru];
215 215 src = fetchurl {
216 216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 218 };
219 219 meta = {
220 220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 221 };
222 222 };
223 223 SQLAlchemy = super.buildPythonPackage {
224 224 name = "SQLAlchemy-0.9.9";
225 225 buildInputs = with self; [];
226 226 doCheck = false;
227 227 propagatedBuildInputs = with self; [];
228 228 src = fetchurl {
229 229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 231 };
232 232 meta = {
233 233 license = [ pkgs.lib.licenses.mit ];
234 234 };
235 235 };
236 236 Sphinx = super.buildPythonPackage {
237 237 name = "Sphinx-1.2.2";
238 238 buildInputs = with self; [];
239 239 doCheck = false;
240 240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 241 src = fetchurl {
242 242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 244 };
245 245 meta = {
246 246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 247 };
248 248 };
249 249 Tempita = super.buildPythonPackage {
250 250 name = "Tempita-0.5.2";
251 251 buildInputs = with self; [];
252 252 doCheck = false;
253 253 propagatedBuildInputs = with self; [];
254 254 src = fetchurl {
255 255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 257 };
258 258 meta = {
259 259 license = [ pkgs.lib.licenses.mit ];
260 260 };
261 261 };
262 262 URLObject = super.buildPythonPackage {
263 263 name = "URLObject-2.4.0";
264 264 buildInputs = with self; [];
265 265 doCheck = false;
266 266 propagatedBuildInputs = with self; [];
267 267 src = fetchurl {
268 268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 270 };
271 271 meta = {
272 272 license = [ ];
273 273 };
274 274 };
275 275 WebError = super.buildPythonPackage {
276 276 name = "WebError-0.10.3";
277 277 buildInputs = with self; [];
278 278 doCheck = false;
279 279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 280 src = fetchurl {
281 281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 283 };
284 284 meta = {
285 285 license = [ pkgs.lib.licenses.mit ];
286 286 };
287 287 };
288 288 WebHelpers = super.buildPythonPackage {
289 289 name = "WebHelpers-1.3";
290 290 buildInputs = with self; [];
291 291 doCheck = false;
292 292 propagatedBuildInputs = with self; [MarkupSafe];
293 293 src = fetchurl {
294 294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 295 md5 = "32749ffadfc40fea51075a7def32588b";
296 296 };
297 297 meta = {
298 298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 299 };
300 300 };
301 301 WebHelpers2 = super.buildPythonPackage {
302 302 name = "WebHelpers2-2.0";
303 303 buildInputs = with self; [];
304 304 doCheck = false;
305 305 propagatedBuildInputs = with self; [MarkupSafe six];
306 306 src = fetchurl {
307 307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 309 };
310 310 meta = {
311 311 license = [ pkgs.lib.licenses.mit ];
312 312 };
313 313 };
314 314 WebOb = super.buildPythonPackage {
315 315 name = "WebOb-1.3.1";
316 316 buildInputs = with self; [];
317 317 doCheck = false;
318 318 propagatedBuildInputs = with self; [];
319 319 src = fetchurl {
320 320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 321 md5 = "20918251c5726956ba8fef22d1556177";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.mit ];
325 325 };
326 326 };
327 327 WebTest = super.buildPythonPackage {
328 328 name = "WebTest-1.4.3";
329 329 buildInputs = with self; [];
330 330 doCheck = false;
331 331 propagatedBuildInputs = with self; [WebOb];
332 332 src = fetchurl {
333 333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 334 md5 = "631ce728bed92c681a4020a36adbc353";
335 335 };
336 336 meta = {
337 337 license = [ pkgs.lib.licenses.mit ];
338 338 };
339 339 };
340 340 Whoosh = super.buildPythonPackage {
341 341 name = "Whoosh-2.7.0";
342 342 buildInputs = with self; [];
343 343 doCheck = false;
344 344 propagatedBuildInputs = with self; [];
345 345 src = fetchurl {
346 346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 348 };
349 349 meta = {
350 350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 351 };
352 352 };
353 353 alembic = super.buildPythonPackage {
354 354 name = "alembic-0.8.4";
355 355 buildInputs = with self; [];
356 356 doCheck = false;
357 357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 358 src = fetchurl {
359 359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 361 };
362 362 meta = {
363 363 license = [ pkgs.lib.licenses.mit ];
364 364 };
365 365 };
366 366 amqplib = super.buildPythonPackage {
367 367 name = "amqplib-1.0.2";
368 368 buildInputs = with self; [];
369 369 doCheck = false;
370 370 propagatedBuildInputs = with self; [];
371 371 src = fetchurl {
372 372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 374 };
375 375 meta = {
376 376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 377 };
378 378 };
379 379 anyjson = super.buildPythonPackage {
380 380 name = "anyjson-0.3.3";
381 381 buildInputs = with self; [];
382 382 doCheck = false;
383 383 propagatedBuildInputs = with self; [];
384 384 src = fetchurl {
385 385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 390 };
391 391 };
392 392 appenlight-client = super.buildPythonPackage {
393 393 name = "appenlight-client-0.6.14";
394 394 buildInputs = with self; [];
395 395 doCheck = false;
396 396 propagatedBuildInputs = with self; [WebOb requests];
397 397 src = fetchurl {
398 398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 400 };
401 401 meta = {
402 402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 403 };
404 404 };
405 405 authomatic = super.buildPythonPackage {
406 406 name = "authomatic-0.1.0.post1";
407 407 buildInputs = with self; [];
408 408 doCheck = false;
409 409 propagatedBuildInputs = with self; [];
410 410 src = fetchurl {
411 411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 413 };
414 414 meta = {
415 415 license = [ pkgs.lib.licenses.mit ];
416 416 };
417 417 };
418 418 backport-ipaddress = super.buildPythonPackage {
419 419 name = "backport-ipaddress-0.1";
420 420 buildInputs = with self; [];
421 421 doCheck = false;
422 422 propagatedBuildInputs = with self; [];
423 423 src = fetchurl {
424 424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 426 };
427 427 meta = {
428 428 license = [ pkgs.lib.licenses.psfl ];
429 429 };
430 430 };
431 431 bottle = super.buildPythonPackage {
432 432 name = "bottle-0.12.8";
433 433 buildInputs = with self; [];
434 434 doCheck = false;
435 435 propagatedBuildInputs = with self; [];
436 436 src = fetchurl {
437 437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 439 };
440 440 meta = {
441 441 license = [ pkgs.lib.licenses.mit ];
442 442 };
443 443 };
444 444 bumpversion = super.buildPythonPackage {
445 445 name = "bumpversion-0.5.3";
446 446 buildInputs = with self; [];
447 447 doCheck = false;
448 448 propagatedBuildInputs = with self; [];
449 449 src = fetchurl {
450 450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 452 };
453 453 meta = {
454 454 license = [ pkgs.lib.licenses.mit ];
455 455 };
456 456 };
457 457 celery = super.buildPythonPackage {
458 458 name = "celery-2.2.10";
459 459 buildInputs = with self; [];
460 460 doCheck = false;
461 461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 462 src = fetchurl {
463 463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 464 md5 = "898bc87e54f278055b561316ba73e222";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 click = super.buildPythonPackage {
471 471 name = "click-5.1";
472 472 buildInputs = with self; [];
473 473 doCheck = false;
474 474 propagatedBuildInputs = with self; [];
475 475 src = fetchurl {
476 476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 478 };
479 479 meta = {
480 480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 481 };
482 482 };
483 483 colander = super.buildPythonPackage {
484 484 name = "colander-1.2";
485 485 buildInputs = with self; [];
486 486 doCheck = false;
487 487 propagatedBuildInputs = with self; [translationstring iso8601];
488 488 src = fetchurl {
489 489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 491 };
492 492 meta = {
493 493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 494 };
495 495 };
496 496 configobj = super.buildPythonPackage {
497 497 name = "configobj-5.0.6";
498 498 buildInputs = with self; [];
499 499 doCheck = false;
500 500 propagatedBuildInputs = with self; [six];
501 501 src = fetchurl {
502 502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 504 };
505 505 meta = {
506 506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 507 };
508 508 };
509 509 cov-core = super.buildPythonPackage {
510 510 name = "cov-core-1.15.0";
511 511 buildInputs = with self; [];
512 512 doCheck = false;
513 513 propagatedBuildInputs = with self; [coverage];
514 514 src = fetchurl {
515 515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 517 };
518 518 meta = {
519 519 license = [ pkgs.lib.licenses.mit ];
520 520 };
521 521 };
522 522 coverage = super.buildPythonPackage {
523 523 name = "coverage-3.7.1";
524 524 buildInputs = with self; [];
525 525 doCheck = false;
526 526 propagatedBuildInputs = with self; [];
527 527 src = fetchurl {
528 528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 530 };
531 531 meta = {
532 532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 533 };
534 534 };
535 535 cssselect = super.buildPythonPackage {
536 536 name = "cssselect-0.9.1";
537 537 buildInputs = with self; [];
538 538 doCheck = false;
539 539 propagatedBuildInputs = with self; [];
540 540 src = fetchurl {
541 541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 546 };
547 547 };
548 548 decorator = super.buildPythonPackage {
549 549 name = "decorator-3.4.2";
550 550 buildInputs = with self; [];
551 551 doCheck = false;
552 552 propagatedBuildInputs = with self; [];
553 553 src = fetchurl {
554 554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 556 };
557 557 meta = {
558 558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 559 };
560 560 };
561 561 docutils = super.buildPythonPackage {
562 562 name = "docutils-0.12";
563 563 buildInputs = with self; [];
564 564 doCheck = false;
565 565 propagatedBuildInputs = with self; [];
566 566 src = fetchurl {
567 567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 568 md5 = "4622263b62c5c771c03502afa3157768";
569 569 };
570 570 meta = {
571 571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 572 };
573 573 };
574 574 dogpile.cache = super.buildPythonPackage {
575 575 name = "dogpile.cache-0.6.1";
576 576 buildInputs = with self; [];
577 577 doCheck = false;
578 578 propagatedBuildInputs = with self; [dogpile.core];
579 579 src = fetchurl {
580 580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 582 };
583 583 meta = {
584 584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 585 };
586 586 };
587 587 dogpile.core = super.buildPythonPackage {
588 588 name = "dogpile.core-0.4.1";
589 589 buildInputs = with self; [];
590 590 doCheck = false;
591 591 propagatedBuildInputs = with self; [];
592 592 src = fetchurl {
593 593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 595 };
596 596 meta = {
597 597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 598 };
599 599 };
600 600 dulwich = super.buildPythonPackage {
601 601 name = "dulwich-0.12.0";
602 602 buildInputs = with self; [];
603 603 doCheck = false;
604 604 propagatedBuildInputs = with self; [];
605 605 src = fetchurl {
606 606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 608 };
609 609 meta = {
610 610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 611 };
612 612 };
613 613 ecdsa = super.buildPythonPackage {
614 614 name = "ecdsa-0.11";
615 615 buildInputs = with self; [];
616 616 doCheck = false;
617 617 propagatedBuildInputs = with self; [];
618 618 src = fetchurl {
619 619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 621 };
622 622 meta = {
623 623 license = [ pkgs.lib.licenses.mit ];
624 624 };
625 625 };
626 626 elasticsearch = super.buildPythonPackage {
627 627 name = "elasticsearch-2.3.0";
628 628 buildInputs = with self; [];
629 629 doCheck = false;
630 630 propagatedBuildInputs = with self; [urllib3];
631 631 src = fetchurl {
632 632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 634 };
635 635 meta = {
636 636 license = [ pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 elasticsearch-dsl = super.buildPythonPackage {
640 640 name = "elasticsearch-dsl-2.0.0";
641 641 buildInputs = with self; [];
642 642 doCheck = false;
643 643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 644 src = fetchurl {
645 645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 647 };
648 648 meta = {
649 649 license = [ pkgs.lib.licenses.asl20 ];
650 650 };
651 651 };
652 652 flake8 = super.buildPythonPackage {
653 653 name = "flake8-2.4.1";
654 654 buildInputs = with self; [];
655 655 doCheck = false;
656 656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 657 src = fetchurl {
658 658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 660 };
661 661 meta = {
662 662 license = [ pkgs.lib.licenses.mit ];
663 663 };
664 664 };
665 665 future = super.buildPythonPackage {
666 666 name = "future-0.14.3";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 673 };
674 674 meta = {
675 675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 676 };
677 677 };
678 678 futures = super.buildPythonPackage {
679 679 name = "futures-3.0.2";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 689 };
690 690 };
691 691 gnureadline = super.buildPythonPackage {
692 692 name = "gnureadline-6.3.3";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.gpl1 ];
702 702 };
703 703 };
704 704 gprof2dot = super.buildPythonPackage {
705 705 name = "gprof2dot-2015.12.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 712 };
713 713 meta = {
714 714 license = [ { fullName = "LGPL"; } ];
715 715 };
716 716 };
717 717 gunicorn = super.buildPythonPackage {
718 718 name = "gunicorn-19.6.0";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 infrae.cache = super.buildPythonPackage {
731 731 name = "infrae.cache-1.0.1";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 738 };
739 739 meta = {
740 740 license = [ pkgs.lib.licenses.zpt21 ];
741 741 };
742 742 };
743 743 invoke = super.buildPythonPackage {
744 744 name = "invoke-0.13.0";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 751 };
752 752 meta = {
753 753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 754 };
755 755 };
756 756 ipdb = super.buildPythonPackage {
757 757 name = "ipdb-0.8";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [ipython];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.gpl1 ];
767 767 };
768 768 };
769 769 ipython = super.buildPythonPackage {
770 770 name = "ipython-3.1.0";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 780 };
781 781 };
782 782 iso8601 = super.buildPythonPackage {
783 783 name = "iso8601-0.1.11";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.mit ];
793 793 };
794 794 };
795 795 itsdangerous = super.buildPythonPackage {
796 796 name = "itsdangerous-0.24";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 803 };
804 804 meta = {
805 805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 806 };
807 807 };
808 808 kombu = super.buildPythonPackage {
809 809 name = "kombu-1.5.1";
810 810 buildInputs = with self; [];
811 811 doCheck = false;
812 812 propagatedBuildInputs = with self; [anyjson amqplib];
813 813 src = fetchurl {
814 814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 lxml = super.buildPythonPackage {
822 822 name = "lxml-3.4.4";
823 823 buildInputs = with self; [];
824 824 doCheck = false;
825 825 propagatedBuildInputs = with self; [];
826 826 src = fetchurl {
827 827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 832 };
833 833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
847 834 mccabe = super.buildPythonPackage {
848 835 name = "mccabe-0.3";
849 836 buildInputs = with self; [];
850 837 doCheck = false;
851 838 propagatedBuildInputs = with self; [];
852 839 src = fetchurl {
853 840 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
854 841 md5 = "81640948ff226f8c12b3277059489157";
855 842 };
856 843 meta = {
857 844 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
858 845 };
859 846 };
860 847 meld3 = super.buildPythonPackage {
861 848 name = "meld3-1.0.2";
862 849 buildInputs = with self; [];
863 850 doCheck = false;
864 851 propagatedBuildInputs = with self; [];
865 852 src = fetchurl {
866 853 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
867 854 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
868 855 };
869 856 meta = {
870 857 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
871 858 };
872 859 };
873 860 mock = super.buildPythonPackage {
874 861 name = "mock-1.0.1";
875 862 buildInputs = with self; [];
876 863 doCheck = false;
877 864 propagatedBuildInputs = with self; [];
878 865 src = fetchurl {
879 866 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
880 867 md5 = "869f08d003c289a97c1a6610faf5e913";
881 868 };
882 869 meta = {
883 870 license = [ pkgs.lib.licenses.bsdOriginal ];
884 871 };
885 872 };
886 873 msgpack-python = super.buildPythonPackage {
887 874 name = "msgpack-python-0.4.6";
888 875 buildInputs = with self; [];
889 876 doCheck = false;
890 877 propagatedBuildInputs = with self; [];
891 878 src = fetchurl {
892 879 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
893 880 md5 = "8b317669314cf1bc881716cccdaccb30";
894 881 };
895 882 meta = {
896 883 license = [ pkgs.lib.licenses.asl20 ];
897 884 };
898 885 };
899 886 nose = super.buildPythonPackage {
900 887 name = "nose-1.3.6";
901 888 buildInputs = with self; [];
902 889 doCheck = false;
903 890 propagatedBuildInputs = with self; [];
904 891 src = fetchurl {
905 892 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
906 893 md5 = "0ca546d81ca8309080fc80cb389e7a16";
907 894 };
908 895 meta = {
909 896 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
910 897 };
911 898 };
912 899 objgraph = super.buildPythonPackage {
913 900 name = "objgraph-2.0.0";
914 901 buildInputs = with self; [];
915 902 doCheck = false;
916 903 propagatedBuildInputs = with self; [];
917 904 src = fetchurl {
918 905 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
919 906 md5 = "25b0d5e5adc74aa63ead15699614159c";
920 907 };
921 908 meta = {
922 909 license = [ pkgs.lib.licenses.mit ];
923 910 };
924 911 };
925 912 packaging = super.buildPythonPackage {
926 913 name = "packaging-15.2";
927 914 buildInputs = with self; [];
928 915 doCheck = false;
929 916 propagatedBuildInputs = with self; [];
930 917 src = fetchurl {
931 918 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
932 919 md5 = "c16093476f6ced42128bf610e5db3784";
933 920 };
934 921 meta = {
935 922 license = [ pkgs.lib.licenses.asl20 ];
936 923 };
937 924 };
938 925 paramiko = super.buildPythonPackage {
939 926 name = "paramiko-1.15.1";
940 927 buildInputs = with self; [];
941 928 doCheck = false;
942 929 propagatedBuildInputs = with self; [pycrypto ecdsa];
943 930 src = fetchurl {
944 931 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
945 932 md5 = "48c274c3f9b1282932567b21f6acf3b5";
946 933 };
947 934 meta = {
948 935 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
949 936 };
950 937 };
951 938 pep8 = super.buildPythonPackage {
952 939 name = "pep8-1.5.7";
953 940 buildInputs = with self; [];
954 941 doCheck = false;
955 942 propagatedBuildInputs = with self; [];
956 943 src = fetchurl {
957 944 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
958 945 md5 = "f6adbdd69365ecca20513c709f9b7c93";
959 946 };
960 947 meta = {
961 948 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
962 949 };
963 950 };
964 951 psutil = super.buildPythonPackage {
965 952 name = "psutil-2.2.1";
966 953 buildInputs = with self; [];
967 954 doCheck = false;
968 955 propagatedBuildInputs = with self; [];
969 956 src = fetchurl {
970 957 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
971 958 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
972 959 };
973 960 meta = {
974 961 license = [ pkgs.lib.licenses.bsdOriginal ];
975 962 };
976 963 };
977 964 psycopg2 = super.buildPythonPackage {
978 965 name = "psycopg2-2.6.1";
979 966 buildInputs = with self; [];
980 967 doCheck = false;
981 968 propagatedBuildInputs = with self; [];
982 969 src = fetchurl {
983 970 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
984 971 md5 = "842b44f8c95517ed5b792081a2370da1";
985 972 };
986 973 meta = {
987 974 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
988 975 };
989 976 };
990 977 py = super.buildPythonPackage {
991 978 name = "py-1.4.29";
992 979 buildInputs = with self; [];
993 980 doCheck = false;
994 981 propagatedBuildInputs = with self; [];
995 982 src = fetchurl {
996 983 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
997 984 md5 = "c28e0accba523a29b35a48bb703fb96c";
998 985 };
999 986 meta = {
1000 987 license = [ pkgs.lib.licenses.mit ];
1001 988 };
1002 989 };
1003 990 py-bcrypt = super.buildPythonPackage {
1004 991 name = "py-bcrypt-0.4";
1005 992 buildInputs = with self; [];
1006 993 doCheck = false;
1007 994 propagatedBuildInputs = with self; [];
1008 995 src = fetchurl {
1009 996 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1010 997 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1011 998 };
1012 999 meta = {
1013 1000 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 1001 };
1015 1002 };
1016 1003 py-gfm = super.buildPythonPackage {
1017 1004 name = "py-gfm-0.1.3";
1018 1005 buildInputs = with self; [];
1019 1006 doCheck = false;
1020 1007 propagatedBuildInputs = with self; [setuptools Markdown];
1021 1008 src = fetchurl {
1022 1009 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1023 1010 md5 = "e588d9e69640a241b97e2c59c22527a6";
1024 1011 };
1025 1012 meta = {
1026 1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1027 1014 };
1028 1015 };
1029 1016 pycrypto = super.buildPythonPackage {
1030 1017 name = "pycrypto-2.6.1";
1031 1018 buildInputs = with self; [];
1032 1019 doCheck = false;
1033 1020 propagatedBuildInputs = with self; [];
1034 1021 src = fetchurl {
1035 1022 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1036 1023 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1037 1024 };
1038 1025 meta = {
1039 1026 license = [ pkgs.lib.licenses.publicDomain ];
1040 1027 };
1041 1028 };
1042 1029 pycurl = super.buildPythonPackage {
1043 1030 name = "pycurl-7.19.5";
1044 1031 buildInputs = with self; [];
1045 1032 doCheck = false;
1046 1033 propagatedBuildInputs = with self; [];
1047 1034 src = fetchurl {
1048 1035 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1049 1036 md5 = "47b4eac84118e2606658122104e62072";
1050 1037 };
1051 1038 meta = {
1052 1039 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1053 1040 };
1054 1041 };
1055 1042 pyflakes = super.buildPythonPackage {
1056 1043 name = "pyflakes-0.8.1";
1057 1044 buildInputs = with self; [];
1058 1045 doCheck = false;
1059 1046 propagatedBuildInputs = with self; [];
1060 1047 src = fetchurl {
1061 1048 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1062 1049 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1063 1050 };
1064 1051 meta = {
1065 1052 license = [ pkgs.lib.licenses.mit ];
1066 1053 };
1067 1054 };
1068 1055 pyparsing = super.buildPythonPackage {
1069 1056 name = "pyparsing-1.5.7";
1070 1057 buildInputs = with self; [];
1071 1058 doCheck = false;
1072 1059 propagatedBuildInputs = with self; [];
1073 1060 src = fetchurl {
1074 1061 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1075 1062 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1076 1063 };
1077 1064 meta = {
1078 1065 license = [ pkgs.lib.licenses.mit ];
1079 1066 };
1080 1067 };
1081 1068 pyramid = super.buildPythonPackage {
1082 1069 name = "pyramid-1.6.1";
1083 1070 buildInputs = with self; [];
1084 1071 doCheck = false;
1085 1072 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1086 1073 src = fetchurl {
1087 1074 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1088 1075 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1089 1076 };
1090 1077 meta = {
1091 1078 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 1079 };
1093 1080 };
1094 1081 pyramid-beaker = super.buildPythonPackage {
1095 1082 name = "pyramid-beaker-0.8";
1096 1083 buildInputs = with self; [];
1097 1084 doCheck = false;
1098 1085 propagatedBuildInputs = with self; [pyramid Beaker];
1099 1086 src = fetchurl {
1100 1087 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1101 1088 md5 = "22f14be31b06549f80890e2c63a93834";
1102 1089 };
1103 1090 meta = {
1104 1091 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1105 1092 };
1106 1093 };
1107 1094 pyramid-debugtoolbar = super.buildPythonPackage {
1108 1095 name = "pyramid-debugtoolbar-2.4.2";
1109 1096 buildInputs = with self; [];
1110 1097 doCheck = false;
1111 1098 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1112 1099 src = fetchurl {
1113 1100 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1114 1101 md5 = "073ea67086cc4bd5decc3a000853642d";
1115 1102 };
1116 1103 meta = {
1117 1104 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1118 1105 };
1119 1106 };
1120 1107 pyramid-jinja2 = super.buildPythonPackage {
1121 1108 name = "pyramid-jinja2-2.5";
1122 1109 buildInputs = with self; [];
1123 1110 doCheck = false;
1124 1111 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1125 1112 src = fetchurl {
1126 1113 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1127 1114 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1128 1115 };
1129 1116 meta = {
1130 1117 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 1118 };
1132 1119 };
1133 1120 pyramid-mako = super.buildPythonPackage {
1134 1121 name = "pyramid-mako-1.0.2";
1135 1122 buildInputs = with self; [];
1136 1123 doCheck = false;
1137 1124 propagatedBuildInputs = with self; [pyramid Mako];
1138 1125 src = fetchurl {
1139 1126 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1140 1127 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1141 1128 };
1142 1129 meta = {
1143 1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1144 1131 };
1145 1132 };
1146 1133 pysqlite = super.buildPythonPackage {
1147 1134 name = "pysqlite-2.6.3";
1148 1135 buildInputs = with self; [];
1149 1136 doCheck = false;
1150 1137 propagatedBuildInputs = with self; [];
1151 1138 src = fetchurl {
1152 1139 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1153 1140 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1154 1141 };
1155 1142 meta = {
1156 1143 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1157 1144 };
1158 1145 };
1159 1146 pytest = super.buildPythonPackage {
1160 1147 name = "pytest-2.8.5";
1161 1148 buildInputs = with self; [];
1162 1149 doCheck = false;
1163 1150 propagatedBuildInputs = with self; [py];
1164 1151 src = fetchurl {
1165 1152 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1166 1153 md5 = "8493b06f700862f1294298d6c1b715a9";
1167 1154 };
1168 1155 meta = {
1169 1156 license = [ pkgs.lib.licenses.mit ];
1170 1157 };
1171 1158 };
1172 1159 pytest-catchlog = super.buildPythonPackage {
1173 1160 name = "pytest-catchlog-1.2.2";
1174 1161 buildInputs = with self; [];
1175 1162 doCheck = false;
1176 1163 propagatedBuildInputs = with self; [py pytest];
1177 1164 src = fetchurl {
1178 1165 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1179 1166 md5 = "09d890c54c7456c818102b7ff8c182c8";
1180 1167 };
1181 1168 meta = {
1182 1169 license = [ pkgs.lib.licenses.mit ];
1183 1170 };
1184 1171 };
1185 1172 pytest-cov = super.buildPythonPackage {
1186 1173 name = "pytest-cov-1.8.1";
1187 1174 buildInputs = with self; [];
1188 1175 doCheck = false;
1189 1176 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1190 1177 src = fetchurl {
1191 1178 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1192 1179 md5 = "76c778afa2494088270348be42d759fc";
1193 1180 };
1194 1181 meta = {
1195 1182 license = [ pkgs.lib.licenses.mit ];
1196 1183 };
1197 1184 };
1198 1185 pytest-profiling = super.buildPythonPackage {
1199 1186 name = "pytest-profiling-1.0.1";
1200 1187 buildInputs = with self; [];
1201 1188 doCheck = false;
1202 1189 propagatedBuildInputs = with self; [six pytest gprof2dot];
1203 1190 src = fetchurl {
1204 1191 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1205 1192 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1206 1193 };
1207 1194 meta = {
1208 1195 license = [ pkgs.lib.licenses.mit ];
1209 1196 };
1210 1197 };
1211 1198 pytest-runner = super.buildPythonPackage {
1212 1199 name = "pytest-runner-2.7.1";
1213 1200 buildInputs = with self; [];
1214 1201 doCheck = false;
1215 1202 propagatedBuildInputs = with self; [];
1216 1203 src = fetchurl {
1217 1204 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1218 1205 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1219 1206 };
1220 1207 meta = {
1221 1208 license = [ pkgs.lib.licenses.mit ];
1222 1209 };
1223 1210 };
1224 1211 pytest-timeout = super.buildPythonPackage {
1225 1212 name = "pytest-timeout-0.4";
1226 1213 buildInputs = with self; [];
1227 1214 doCheck = false;
1228 1215 propagatedBuildInputs = with self; [pytest];
1229 1216 src = fetchurl {
1230 1217 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1231 1218 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1232 1219 };
1233 1220 meta = {
1234 1221 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1235 1222 };
1236 1223 };
1237 1224 python-dateutil = super.buildPythonPackage {
1238 1225 name = "python-dateutil-1.5";
1239 1226 buildInputs = with self; [];
1240 1227 doCheck = false;
1241 1228 propagatedBuildInputs = with self; [];
1242 1229 src = fetchurl {
1243 1230 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1244 1231 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1245 1232 };
1246 1233 meta = {
1247 1234 license = [ pkgs.lib.licenses.psfl ];
1248 1235 };
1249 1236 };
1250 1237 python-editor = super.buildPythonPackage {
1251 1238 name = "python-editor-1.0.1";
1252 1239 buildInputs = with self; [];
1253 1240 doCheck = false;
1254 1241 propagatedBuildInputs = with self; [];
1255 1242 src = fetchurl {
1256 1243 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1257 1244 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1258 1245 };
1259 1246 meta = {
1260 1247 license = [ pkgs.lib.licenses.asl20 ];
1261 1248 };
1262 1249 };
1263 1250 python-ldap = super.buildPythonPackage {
1264 1251 name = "python-ldap-2.4.19";
1265 1252 buildInputs = with self; [];
1266 1253 doCheck = false;
1267 1254 propagatedBuildInputs = with self; [setuptools];
1268 1255 src = fetchurl {
1269 1256 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1270 1257 md5 = "b941bf31d09739492aa19ef679e94ae3";
1271 1258 };
1272 1259 meta = {
1273 1260 license = [ pkgs.lib.licenses.psfl ];
1274 1261 };
1275 1262 };
1276 1263 python-memcached = super.buildPythonPackage {
1277 1264 name = "python-memcached-1.57";
1278 1265 buildInputs = with self; [];
1279 1266 doCheck = false;
1280 1267 propagatedBuildInputs = with self; [six];
1281 1268 src = fetchurl {
1282 1269 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1283 1270 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1284 1271 };
1285 1272 meta = {
1286 1273 license = [ pkgs.lib.licenses.psfl ];
1287 1274 };
1288 1275 };
1289 1276 python-pam = super.buildPythonPackage {
1290 1277 name = "python-pam-1.8.2";
1291 1278 buildInputs = with self; [];
1292 1279 doCheck = false;
1293 1280 propagatedBuildInputs = with self; [];
1294 1281 src = fetchurl {
1295 1282 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1296 1283 md5 = "db71b6b999246fb05d78ecfbe166629d";
1297 1284 };
1298 1285 meta = {
1299 1286 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1300 1287 };
1301 1288 };
1302 1289 pytz = super.buildPythonPackage {
1303 1290 name = "pytz-2015.4";
1304 1291 buildInputs = with self; [];
1305 1292 doCheck = false;
1306 1293 propagatedBuildInputs = with self; [];
1307 1294 src = fetchurl {
1308 1295 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1309 1296 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1310 1297 };
1311 1298 meta = {
1312 1299 license = [ pkgs.lib.licenses.mit ];
1313 1300 };
1314 1301 };
1315 1302 pyzmq = super.buildPythonPackage {
1316 1303 name = "pyzmq-14.6.0";
1317 1304 buildInputs = with self; [];
1318 1305 doCheck = false;
1319 1306 propagatedBuildInputs = with self; [];
1320 1307 src = fetchurl {
1321 1308 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1322 1309 md5 = "395b5de95a931afa5b14c9349a5b8024";
1323 1310 };
1324 1311 meta = {
1325 1312 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1326 1313 };
1327 1314 };
1328 1315 recaptcha-client = super.buildPythonPackage {
1329 1316 name = "recaptcha-client-1.0.6";
1330 1317 buildInputs = with self; [];
1331 1318 doCheck = false;
1332 1319 propagatedBuildInputs = with self; [];
1333 1320 src = fetchurl {
1334 1321 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1335 1322 md5 = "74228180f7e1fb76c4d7089160b0d919";
1336 1323 };
1337 1324 meta = {
1338 1325 license = [ { fullName = "MIT/X11"; } ];
1339 1326 };
1340 1327 };
1341 1328 repoze.lru = super.buildPythonPackage {
1342 1329 name = "repoze.lru-0.6";
1343 1330 buildInputs = with self; [];
1344 1331 doCheck = false;
1345 1332 propagatedBuildInputs = with self; [];
1346 1333 src = fetchurl {
1347 1334 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1348 1335 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1349 1336 };
1350 1337 meta = {
1351 1338 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1352 1339 };
1353 1340 };
1354 1341 requests = super.buildPythonPackage {
1355 1342 name = "requests-2.9.1";
1356 1343 buildInputs = with self; [];
1357 1344 doCheck = false;
1358 1345 propagatedBuildInputs = with self; [];
1359 1346 src = fetchurl {
1360 1347 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1361 1348 md5 = "0b7f480d19012ec52bab78292efd976d";
1362 1349 };
1363 1350 meta = {
1364 1351 license = [ pkgs.lib.licenses.asl20 ];
1365 1352 };
1366 1353 };
1367 1354 rhodecode-enterprise-ce = super.buildPythonPackage {
1368 1355 name = "rhodecode-enterprise-ce-4.3.0";
1369 1356 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1370 1357 doCheck = true;
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1372 1359 src = ./.;
1373 1360 meta = {
1374 1361 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1375 1362 };
1376 1363 };
1377 1364 rhodecode-tools = super.buildPythonPackage {
1378 1365 name = "rhodecode-tools-0.8.3";
1379 1366 buildInputs = with self; [];
1380 1367 doCheck = false;
1381 1368 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1382 1369 src = fetchurl {
1383 1370 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1384 1371 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1385 1372 };
1386 1373 meta = {
1387 1374 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1388 1375 };
1389 1376 };
1390 1377 serpent = super.buildPythonPackage {
1391 1378 name = "serpent-1.12";
1392 1379 buildInputs = with self; [];
1393 1380 doCheck = false;
1394 1381 propagatedBuildInputs = with self; [];
1395 1382 src = fetchurl {
1396 1383 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1397 1384 md5 = "05869ac7b062828b34f8f927f0457b65";
1398 1385 };
1399 1386 meta = {
1400 1387 license = [ pkgs.lib.licenses.mit ];
1401 1388 };
1402 1389 };
1403 1390 setproctitle = super.buildPythonPackage {
1404 1391 name = "setproctitle-1.1.8";
1405 1392 buildInputs = with self; [];
1406 1393 doCheck = false;
1407 1394 propagatedBuildInputs = with self; [];
1408 1395 src = fetchurl {
1409 1396 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1410 1397 md5 = "728f4c8c6031bbe56083a48594027edd";
1411 1398 };
1412 1399 meta = {
1413 1400 license = [ pkgs.lib.licenses.bsdOriginal ];
1414 1401 };
1415 1402 };
1416 1403 setuptools = super.buildPythonPackage {
1417 1404 name = "setuptools-20.8.1";
1418 1405 buildInputs = with self; [];
1419 1406 doCheck = false;
1420 1407 propagatedBuildInputs = with self; [];
1421 1408 src = fetchurl {
1422 1409 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1423 1410 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1424 1411 };
1425 1412 meta = {
1426 1413 license = [ pkgs.lib.licenses.mit ];
1427 1414 };
1428 1415 };
1429 1416 setuptools-scm = super.buildPythonPackage {
1430 1417 name = "setuptools-scm-1.11.0";
1431 1418 buildInputs = with self; [];
1432 1419 doCheck = false;
1433 1420 propagatedBuildInputs = with self; [];
1434 1421 src = fetchurl {
1435 1422 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1436 1423 md5 = "4c5c896ba52e134bbc3507bac6400087";
1437 1424 };
1438 1425 meta = {
1439 1426 license = [ pkgs.lib.licenses.mit ];
1440 1427 };
1441 1428 };
1442 1429 simplejson = super.buildPythonPackage {
1443 1430 name = "simplejson-3.7.2";
1444 1431 buildInputs = with self; [];
1445 1432 doCheck = false;
1446 1433 propagatedBuildInputs = with self; [];
1447 1434 src = fetchurl {
1448 1435 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1449 1436 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1450 1437 };
1451 1438 meta = {
1452 1439 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1453 1440 };
1454 1441 };
1455 1442 six = super.buildPythonPackage {
1456 1443 name = "six-1.9.0";
1457 1444 buildInputs = with self; [];
1458 1445 doCheck = false;
1459 1446 propagatedBuildInputs = with self; [];
1460 1447 src = fetchurl {
1461 1448 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1462 1449 md5 = "476881ef4012262dfc8adc645ee786c4";
1463 1450 };
1464 1451 meta = {
1465 1452 license = [ pkgs.lib.licenses.mit ];
1466 1453 };
1467 1454 };
1468 1455 subprocess32 = super.buildPythonPackage {
1469 1456 name = "subprocess32-3.2.6";
1470 1457 buildInputs = with self; [];
1471 1458 doCheck = false;
1472 1459 propagatedBuildInputs = with self; [];
1473 1460 src = fetchurl {
1474 1461 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1475 1462 md5 = "754c5ab9f533e764f931136974b618f1";
1476 1463 };
1477 1464 meta = {
1478 1465 license = [ pkgs.lib.licenses.psfl ];
1479 1466 };
1480 1467 };
1481 1468 supervisor = super.buildPythonPackage {
1482 1469 name = "supervisor-3.3.0";
1483 1470 buildInputs = with self; [];
1484 1471 doCheck = false;
1485 1472 propagatedBuildInputs = with self; [meld3];
1486 1473 src = fetchurl {
1487 1474 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1488 1475 md5 = "46bac00378d1eddb616752b990c67416";
1489 1476 };
1490 1477 meta = {
1491 1478 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1492 1479 };
1493 1480 };
1494 1481 transifex-client = super.buildPythonPackage {
1495 1482 name = "transifex-client-0.10";
1496 1483 buildInputs = with self; [];
1497 1484 doCheck = false;
1498 1485 propagatedBuildInputs = with self; [];
1499 1486 src = fetchurl {
1500 1487 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1501 1488 md5 = "5549538d84b8eede6b254cd81ae024fa";
1502 1489 };
1503 1490 meta = {
1504 1491 license = [ pkgs.lib.licenses.gpl2 ];
1505 1492 };
1506 1493 };
1507 1494 translationstring = super.buildPythonPackage {
1508 1495 name = "translationstring-1.3";
1509 1496 buildInputs = with self; [];
1510 1497 doCheck = false;
1511 1498 propagatedBuildInputs = with self; [];
1512 1499 src = fetchurl {
1513 1500 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1514 1501 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1515 1502 };
1516 1503 meta = {
1517 1504 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1518 1505 };
1519 1506 };
1520 1507 trollius = super.buildPythonPackage {
1521 1508 name = "trollius-1.0.4";
1522 1509 buildInputs = with self; [];
1523 1510 doCheck = false;
1524 1511 propagatedBuildInputs = with self; [futures];
1525 1512 src = fetchurl {
1526 1513 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1527 1514 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1528 1515 };
1529 1516 meta = {
1530 1517 license = [ pkgs.lib.licenses.asl20 ];
1531 1518 };
1532 1519 };
1533 1520 uWSGI = super.buildPythonPackage {
1534 1521 name = "uWSGI-2.0.11.2";
1535 1522 buildInputs = with self; [];
1536 1523 doCheck = false;
1537 1524 propagatedBuildInputs = with self; [];
1538 1525 src = fetchurl {
1539 1526 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1540 1527 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1541 1528 };
1542 1529 meta = {
1543 1530 license = [ pkgs.lib.licenses.gpl2 ];
1544 1531 };
1545 1532 };
1546 1533 urllib3 = super.buildPythonPackage {
1547 1534 name = "urllib3-1.16";
1548 1535 buildInputs = with self; [];
1549 1536 doCheck = false;
1550 1537 propagatedBuildInputs = with self; [];
1551 1538 src = fetchurl {
1552 1539 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1553 1540 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1554 1541 };
1555 1542 meta = {
1556 1543 license = [ pkgs.lib.licenses.mit ];
1557 1544 };
1558 1545 };
1559 1546 venusian = super.buildPythonPackage {
1560 1547 name = "venusian-1.0";
1561 1548 buildInputs = with self; [];
1562 1549 doCheck = false;
1563 1550 propagatedBuildInputs = with self; [];
1564 1551 src = fetchurl {
1565 1552 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1566 1553 md5 = "dccf2eafb7113759d60c86faf5538756";
1567 1554 };
1568 1555 meta = {
1569 1556 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1570 1557 };
1571 1558 };
1572 1559 waitress = super.buildPythonPackage {
1573 1560 name = "waitress-0.8.9";
1574 1561 buildInputs = with self; [];
1575 1562 doCheck = false;
1576 1563 propagatedBuildInputs = with self; [setuptools];
1577 1564 src = fetchurl {
1578 1565 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1579 1566 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1580 1567 };
1581 1568 meta = {
1582 1569 license = [ pkgs.lib.licenses.zpt21 ];
1583 1570 };
1584 1571 };
1585 1572 wsgiref = super.buildPythonPackage {
1586 1573 name = "wsgiref-0.1.2";
1587 1574 buildInputs = with self; [];
1588 1575 doCheck = false;
1589 1576 propagatedBuildInputs = with self; [];
1590 1577 src = fetchurl {
1591 1578 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1592 1579 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1593 1580 };
1594 1581 meta = {
1595 1582 license = [ { fullName = "PSF or ZPL"; } ];
1596 1583 };
1597 1584 };
1598 1585 zope.cachedescriptors = super.buildPythonPackage {
1599 1586 name = "zope.cachedescriptors-4.0.0";
1600 1587 buildInputs = with self; [];
1601 1588 doCheck = false;
1602 1589 propagatedBuildInputs = with self; [setuptools];
1603 1590 src = fetchurl {
1604 1591 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1605 1592 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1606 1593 };
1607 1594 meta = {
1608 1595 license = [ pkgs.lib.licenses.zpt21 ];
1609 1596 };
1610 1597 };
1611 1598 zope.deprecation = super.buildPythonPackage {
1612 1599 name = "zope.deprecation-4.1.2";
1613 1600 buildInputs = with self; [];
1614 1601 doCheck = false;
1615 1602 propagatedBuildInputs = with self; [setuptools];
1616 1603 src = fetchurl {
1617 1604 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1618 1605 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1619 1606 };
1620 1607 meta = {
1621 1608 license = [ pkgs.lib.licenses.zpt21 ];
1622 1609 };
1623 1610 };
1624 1611 zope.event = super.buildPythonPackage {
1625 1612 name = "zope.event-4.0.3";
1626 1613 buildInputs = with self; [];
1627 1614 doCheck = false;
1628 1615 propagatedBuildInputs = with self; [setuptools];
1629 1616 src = fetchurl {
1630 1617 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1631 1618 md5 = "9a3780916332b18b8b85f522bcc3e249";
1632 1619 };
1633 1620 meta = {
1634 1621 license = [ pkgs.lib.licenses.zpt21 ];
1635 1622 };
1636 1623 };
1637 1624 zope.interface = super.buildPythonPackage {
1638 1625 name = "zope.interface-4.1.3";
1639 1626 buildInputs = with self; [];
1640 1627 doCheck = false;
1641 1628 propagatedBuildInputs = with self; [setuptools];
1642 1629 src = fetchurl {
1643 1630 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1644 1631 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1645 1632 };
1646 1633 meta = {
1647 1634 license = [ pkgs.lib.licenses.zpt21 ];
1648 1635 };
1649 1636 };
1650 1637
1651 1638 ### Test requirements
1652 1639
1653 1640
1654 1641 }
@@ -1,152 +1,151 b''
1 1 Babel==1.3
2 2 Beaker==1.7.0
3 3 CProfileV==1.0.6
4 4 Fabric==1.10.0
5 5 FormEncode==1.2.4
6 6 Jinja2==2.7.3
7 7 Mako==1.0.1
8 8 Markdown==2.6.2
9 9 MarkupSafe==0.23
10 10 MySQL-python==1.2.5
11 11 Paste==2.0.2
12 12 PasteDeploy==1.5.2
13 13 PasteScript==1.7.5
14 14 Pygments==2.1.3
15 15
16 16 # TODO: This version is not available on PyPI
17 17 # Pylons==1.0.2.dev20160108
18 18 Pylons==1.0.1
19 19
20 20 # TODO: This version is not available, but newer ones are
21 21 # Pyro4==4.35
22 22 Pyro4==4.41
23 23
24 24 # TODO: This should probably not be in here
25 25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26 26
27 27 # TODO: This is not really a dependency, we should add it only
28 28 # into the development environment, since there it is useful.
29 29 # RhodeCodeVCSServer==3.9.0
30 30
31 31 Routes==1.13
32 32 SQLAlchemy==0.9.9
33 33 Sphinx==1.2.2
34 34 Tempita==0.5.2
35 35 URLObject==2.4.0
36 36 WebError==0.10.3
37 37
38 38 # TODO: This is modified by us, needs a better integration. For now
39 39 # using the latest version before.
40 40 # WebHelpers==1.3.dev20150807
41 41 WebHelpers==1.3
42 42
43 43 WebHelpers2==2.0
44 44 WebOb==1.3.1
45 45 WebTest==1.4.3
46 46 Whoosh==2.7.0
47 47 alembic==0.8.4
48 48 amqplib==1.0.2
49 49 anyjson==0.3.3
50 50 appenlight-client==0.6.14
51 51 authomatic==0.1.0.post1;
52 52 backport-ipaddress==0.1
53 53 bottle==0.12.8
54 54 bumpversion==0.5.3
55 55 celery==2.2.10
56 56 click==5.1
57 57 colander==1.2
58 58 configobj==5.0.6
59 59 cov-core==1.15.0
60 60 coverage==3.7.1
61 61 cssselect==0.9.1
62 62 decorator==3.4.2
63 63 docutils==0.12
64 64 dogpile.cache==0.6.1
65 65 dogpile.core==0.4.1
66 66 dulwich==0.12.0
67 67 ecdsa==0.11
68 68 flake8==2.4.1
69 69 future==0.14.3
70 70 futures==3.0.2
71 71 gprof2dot==2015.12.1
72 72 gunicorn==19.6.0
73 73
74 74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 75 # actually we should not need this for Enterprise at all.
76 76 # hgsubversion==1.8.2
77 77
78 78 gnureadline==6.3.3
79 79 infrae.cache==1.0.1
80 80 invoke==0.13.0
81 81 ipdb==0.8
82 82 ipython==3.1.0
83 83 iso8601==0.1.11
84 84 itsdangerous==0.24
85 85 kombu==1.5.1
86 86 lxml==3.4.4
87 marshmallow==2.8.0
88 87 mccabe==0.3
89 88 meld3==1.0.2
90 89 mock==1.0.1
91 90 msgpack-python==0.4.6
92 91 nose==1.3.6
93 92 objgraph==2.0.0
94 93 packaging==15.2
95 94 paramiko==1.15.1
96 95 pep8==1.5.7
97 96 psutil==2.2.1
98 97 psycopg2==2.6.1
99 98 py==1.4.29
100 99 py-bcrypt==0.4
101 100 py-gfm==0.1.3
102 101 pycrypto==2.6.1
103 102 pycurl==7.19.5
104 103 pyflakes==0.8.1
105 104 pyparsing==1.5.7
106 105 pyramid==1.6.1
107 106 pyramid-beaker==0.8
108 107 pyramid-debugtoolbar==2.4.2
109 108 pyramid-jinja2==2.5
110 109 pyramid-mako==1.0.2
111 110 pysqlite==2.6.3
112 111 pytest==2.8.5
113 112 pytest-runner==2.7.1
114 113 pytest-catchlog==1.2.2
115 114 pytest-cov==1.8.1
116 115 pytest-profiling==1.0.1
117 116 pytest-timeout==0.4
118 117 python-dateutil==1.5
119 118 python-ldap==2.4.19
120 119 python-memcached==1.57
121 120 python-pam==1.8.2
122 121 pytz==2015.4
123 122 pyzmq==14.6.0
124 123
125 124 # TODO: This is not available in public
126 125 # rc-testdata==0.2.0
127 126
128 127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
129 128
130 129
131 130 recaptcha-client==1.0.6
132 131 repoze.lru==0.6
133 132 requests==2.9.1
134 133 serpent==1.12
135 134 setproctitle==1.1.8
136 135 setuptools==20.8.1
137 136 setuptools-scm==1.11.0
138 137 simplejson==3.7.2
139 138 six==1.9.0
140 139 subprocess32==3.2.6
141 140 supervisor==3.3.0
142 141 transifex-client==0.10
143 142 translationstring==1.3
144 143 trollius==1.0.4
145 144 uWSGI==2.0.11.2
146 145 venusian==1.0
147 146 waitress==0.8.9
148 147 wsgiref==0.1.2
149 148 zope.cachedescriptors==4.0.0
150 149 zope.deprecation==4.1.2
151 150 zope.event==4.0.3
152 151 zope.interface==4.1.3
@@ -1,58 +1,58 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pylons
44 44 CONFIG = {}
45 45
46 46 # Linked module for extensions
47 47 EXTENSIONS = {}
48 48
49 49 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
50 __dbversion__ = 54 # defines current db version for migrations
50 __dbversion__ = 55 # defines current db version for migrations
51 51 __platform__ = platform.system()
52 52 __license__ = 'AGPLv3, and Commercial License'
53 53 __author__ = 'RhodeCode GmbH'
54 54 __url__ = 'http://rhodecode.com'
55 55
56 56 is_windows = __platform__ in ['Windows']
57 57 is_unix = not is_windows
58 58 is_test = False
@@ -1,124 +1,126 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 from pylons import url
26 26 from zope.interface import implementer
27 27
28 28 from rhodecode.admin.interfaces import IAdminNavigationRegistry
29 29 from rhodecode.lib.utils import get_registry
30 30 from rhodecode.translation import _
31 31
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35 NavListEntry = collections.namedtuple('NavListEntry', ['key', 'name', 'url'])
36 36
37 37
38 38 class NavEntry(object):
39 39 """
40 40 Represents an entry in the admin navigation.
41 41
42 42 :param key: Unique identifier used to store reference in an OrderedDict.
43 43 :param name: Display name, usually a translation string.
44 44 :param view_name: Name of the view, used generate the URL.
45 45 :param pyramid: Indicator to use pyramid for URL generation. This should
46 46 be removed as soon as we are fully migrated to pyramid.
47 47 """
48 48
49 49 def __init__(self, key, name, view_name, pyramid=False):
50 50 self.key = key
51 51 self.name = name
52 52 self.view_name = view_name
53 53 self.pyramid = pyramid
54 54
55 55 def generate_url(self, request):
56 56 if self.pyramid:
57 57 if hasattr(request, 'route_path'):
58 58 return request.route_path(self.view_name)
59 59 else:
60 60 # TODO: johbo: Remove this after migrating to pyramid.
61 61 # We need the pyramid request here to generate URLs to pyramid
62 62 # views from within pylons views.
63 63 from pyramid.threadlocal import get_current_request
64 64 pyramid_request = get_current_request()
65 65 return pyramid_request.route_path(self.view_name)
66 66 else:
67 67 return url(self.view_name)
68 68
69 69
70 70 @implementer(IAdminNavigationRegistry)
71 71 class NavigationRegistry(object):
72 72
73 73 _base_entries = [
74 74 NavEntry('global', _('Global'), 'admin_settings_global'),
75 75 NavEntry('vcs', _('VCS'), 'admin_settings_vcs'),
76 76 NavEntry('visual', _('Visual'), 'admin_settings_visual'),
77 77 NavEntry('mapping', _('Remap and Rescan'), 'admin_settings_mapping'),
78 78 NavEntry('issuetracker', _('Issue Tracker'),
79 79 'admin_settings_issuetracker'),
80 80 NavEntry('email', _('Email'), 'admin_settings_email'),
81 81 NavEntry('hooks', _('Hooks'), 'admin_settings_hooks'),
82 82 NavEntry('search', _('Full Text Search'), 'admin_settings_search'),
83 NavEntry('integrations', _('Integrations'),
84 'global_integrations_home', pyramid=True),
83 85 NavEntry('system', _('System Info'), 'admin_settings_system'),
84 86 NavEntry('open_source', _('Open Source Licenses'),
85 87 'admin_settings_open_source', pyramid=True),
86 88 # TODO: marcink: we disable supervisor now until the supervisor stats
87 89 # page is fixed in the nix configuration
88 90 # NavEntry('supervisor', _('Supervisor'), 'admin_settings_supervisor'),
89 91 ]
90 92
91 93 _labs_entry = NavEntry('labs', _('Labs'),
92 94 'admin_settings_labs')
93 95
94 96 def __init__(self, labs_active=False):
95 97 self._registered_entries = collections.OrderedDict([
96 98 (item.key, item) for item in self.__class__._base_entries
97 99 ])
98 100
99 101 if labs_active:
100 102 self.add_entry(self._labs_entry)
101 103
102 104 def add_entry(self, entry):
103 105 self._registered_entries[entry.key] = entry
104 106
105 107 def get_navlist(self, request):
106 108 navlist = [NavListEntry(i.key, i.name, i.generate_url(request))
107 109 for i in self._registered_entries.values()]
108 110 return navlist
109 111
110 112
111 113 def navigation_registry(request):
112 114 """
113 115 Helper that returns the admin navigation registry.
114 116 """
115 117 pyramid_registry = get_registry(request)
116 118 nav_registry = pyramid_registry.queryUtility(IAdminNavigationRegistry)
117 119 return nav_registry
118 120
119 121
120 122 def navigation_list(request):
121 123 """
122 124 Helper that returns the admin navigation as list of NavListEntry objects.
123 125 """
124 126 return navigation_registry(request).get_navlist(request)
@@ -1,387 +1,388 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25
26 26 from paste.registry import RegistryManager
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pylons.wsgiapp import PylonsApp
29 29 from pyramid.authorization import ACLAuthorizationPolicy
30 30 from pyramid.config import Configurator
31 31 from pyramid.static import static_view
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
35 35 import pyramid.httpexceptions as httpexceptions
36 36 from pyramid.renderers import render_to_response, render
37 37 from routes.middleware import RoutesMiddleware
38 38 import routes.util
39 39
40 40 import rhodecode
41 41 from rhodecode.config import patches
42 42 from rhodecode.config.environment import (
43 43 load_environment, load_pyramid_environment)
44 44 from rhodecode.lib.middleware import csrf
45 45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.middleware.vcs import VCSMiddleware
49 49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 50
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
56 56 """Create a Pylons WSGI application and return it
57 57
58 58 ``global_conf``
59 59 The inherited configuration for this application. Normally from
60 60 the [DEFAULT] section of the Paste ini file.
61 61
62 62 ``full_stack``
63 63 Whether or not this application provides a full WSGI stack (by
64 64 default, meaning it handles its own exceptions and errors).
65 65 Disable full_stack when this application is "managed" by
66 66 another WSGI middleware.
67 67
68 68 ``app_conf``
69 69 The application's local configuration. Normally specified in
70 70 the [app:<name>] section of the Paste ini file (where <name>
71 71 defaults to main).
72 72
73 73 """
74 74 # Apply compatibility patches
75 75 patches.kombu_1_5_1_python_2_7_11()
76 76 patches.inspect_getargspec()
77 77
78 78 # Configure the Pylons environment
79 79 config = load_environment(global_conf, app_conf)
80 80
81 81 # The Pylons WSGI app
82 82 app = PylonsApp(config=config)
83 83 if rhodecode.is_test:
84 84 app = csrf.CSRFDetector(app)
85 85
86 86 expected_origin = config.get('expected_origin')
87 87 if expected_origin:
88 88 # The API can be accessed from other Origins.
89 89 app = csrf.OriginChecker(app, expected_origin,
90 90 skip_urls=[routes.util.url_for('api')])
91 91
92 92
93 93 if asbool(full_stack):
94 94
95 95 # Appenlight monitoring and error handler
96 96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
97 97
98 98 # we want our low level middleware to get to the request ASAP. We don't
99 99 # need any pylons stack middleware in them
100 100 app = VCSMiddleware(app, config, appenlight_client)
101 101
102 102 # Establish the Registry for this application
103 103 app = RegistryManager(app)
104 104
105 105 app.config = config
106 106
107 107 return app
108 108
109 109
110 110 def make_pyramid_app(global_config, **settings):
111 111 """
112 112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
113 113 application.
114 114
115 115 Specials:
116 116
117 117 * We migrate from Pylons to Pyramid. While doing this, we keep both
118 118 frameworks functional. This involves moving some WSGI middlewares around
119 119 and providing access to some data internals, so that the old code is
120 120 still functional.
121 121
122 122 * The application can also be integrated like a plugin via the call to
123 123 `includeme`. This is accompanied with the other utility functions which
124 124 are called. Changing this should be done with great care to not break
125 125 cases when these fragments are assembled from another place.
126 126
127 127 """
128 128 # The edition string should be available in pylons too, so we add it here
129 129 # before copying the settings.
130 130 settings.setdefault('rhodecode.edition', 'Community Edition')
131 131
132 132 # As long as our Pylons application does expect "unprepared" settings, make
133 133 # sure that we keep an unmodified copy. This avoids unintentional change of
134 134 # behavior in the old application.
135 135 settings_pylons = settings.copy()
136 136
137 137 sanitize_settings_and_apply_defaults(settings)
138 138 config = Configurator(settings=settings)
139 139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
140 140
141 141 load_pyramid_environment(global_config, settings)
142 142
143 143 includeme(config)
144 144 includeme_last(config)
145 145 pyramid_app = config.make_wsgi_app()
146 146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
147 147 return pyramid_app
148 148
149 149
150 150 def add_pylons_compat_data(registry, global_config, settings):
151 151 """
152 152 Attach data to the registry to support the Pylons integration.
153 153 """
154 154 registry._pylons_compat_global_config = global_config
155 155 registry._pylons_compat_settings = settings
156 156
157 157
158 158 def webob_to_pyramid_http_response(webob_response):
159 159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
160 160 pyramid_response = ResponseClass(webob_response.status)
161 161 pyramid_response.status = webob_response.status
162 162 pyramid_response.headers.update(webob_response.headers)
163 163 if pyramid_response.headers['content-type'] == 'text/html':
164 164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
165 165 return pyramid_response
166 166
167 167
168 168 def error_handler(exception, request):
169 169 # TODO: dan: replace the old pylons error controller with this
170 170 from rhodecode.model.settings import SettingsModel
171 171 from rhodecode.lib.utils2 import AttributeDict
172 172
173 173 try:
174 174 rc_config = SettingsModel().get_all_settings()
175 175 except Exception:
176 176 log.exception('failed to fetch settings')
177 177 rc_config = {}
178 178
179 179 base_response = HTTPInternalServerError()
180 180 # prefer original exception for the response since it may have headers set
181 181 if isinstance(exception, HTTPError):
182 182 base_response = exception
183 183
184 184 c = AttributeDict()
185 185 c.error_message = base_response.status
186 186 c.error_explanation = base_response.explanation or str(base_response)
187 187 c.visual = AttributeDict()
188 188
189 189 c.visual.rhodecode_support_url = (
190 190 request.registry.settings.get('rhodecode_support_url') or
191 191 request.route_url('rhodecode_support')
192 192 )
193 193 c.redirect_time = 0
194 194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
195 195 if not c.rhodecode_name:
196 196 c.rhodecode_name = 'Rhodecode'
197 197
198 198 response = render_to_response(
199 199 '/errors/error_document.html', {'c': c}, request=request,
200 200 response=base_response)
201 201
202 202 return response
203 203
204 204
205 205 def includeme(config):
206 206 settings = config.registry.settings
207 207
208 208 if asbool(settings.get('appenlight', 'false')):
209 209 config.include('appenlight_client.ext.pyramid_tween')
210 210
211 211 # Includes which are required. The application would fail without them.
212 212 config.include('pyramid_mako')
213 213 config.include('pyramid_beaker')
214 214 config.include('rhodecode.admin')
215 215 config.include('rhodecode.authentication')
216 config.include('rhodecode.integrations')
216 217 config.include('rhodecode.login')
217 218 config.include('rhodecode.tweens')
218 219 config.include('rhodecode.api')
219 220 config.add_route(
220 221 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221 222
222 223 # Set the authorization policy.
223 224 authz_policy = ACLAuthorizationPolicy()
224 225 config.set_authorization_policy(authz_policy)
225 226
226 227 # Set the default renderer for HTML templates to mako.
227 228 config.add_mako_renderer('.html')
228 229
229 230 # plugin information
230 231 config.registry.rhodecode_plugins = {}
231 232
232 233 config.add_directive(
233 234 'register_rhodecode_plugin', register_rhodecode_plugin)
234 235 # include RhodeCode plugins
235 236 includes = aslist(settings.get('rhodecode.includes', []))
236 237 for inc in includes:
237 238 config.include(inc)
238 239
239 240 pylons_app = make_app(
240 241 config.registry._pylons_compat_global_config,
241 242 **config.registry._pylons_compat_settings)
242 243 config.registry._pylons_compat_config = pylons_app.config
243 244
244 245 pylons_app_as_view = wsgiapp(pylons_app)
245 246
246 247 # Protect from VCS Server error related pages when server is not available
247 248 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 249 if not vcs_server_enabled:
249 250 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250 251
251 252
252 253 def pylons_app_with_error_handler(context, request):
253 254 """
254 255 Handle exceptions from rc pylons app:
255 256
256 257 - old webob type exceptions get converted to pyramid exceptions
257 258 - pyramid exceptions are passed to the error handler view
258 259 """
259 260 try:
260 261 response = pylons_app_as_view(context, request)
261 262 if 400 <= response.status_int <= 599: # webob type error responses
262 263 return error_handler(
263 264 webob_to_pyramid_http_response(response), request)
264 265 except HTTPError as e: # pyramid type exceptions
265 266 return error_handler(e, request)
266 267 except Exception:
267 268 if settings.get('debugtoolbar.enabled', False):
268 269 raise
269 270 return error_handler(HTTPInternalServerError(), request)
270 271 return response
271 272
272 273 # This is the glue which allows us to migrate in chunks. By registering the
273 274 # pylons based application as the "Not Found" view in Pyramid, we will
274 275 # fallback to the old application each time the new one does not yet know
275 276 # how to handle a request.
276 277 config.add_notfound_view(pylons_app_with_error_handler)
277 278
278 279 if settings.get('debugtoolbar.enabled', False):
279 280 # if toolbar, then only http type exceptions get caught and rendered
280 281 ExcClass = HTTPError
281 282 else:
282 283 # if no toolbar, then any exception gets caught and rendered
283 284 ExcClass = Exception
284 285 config.add_view(error_handler, context=ExcClass)
285 286
286 287
287 288 def includeme_last(config):
288 289 """
289 290 The static file catchall needs to be last in the view configuration.
290 291 """
291 292 settings = config.registry.settings
292 293
293 294 # Note: johbo: I would prefer to register a prefix for static files at some
294 295 # point, e.g. move them under '_static/'. This would fully avoid that we
295 296 # can have name clashes with a repository name. Imaging someone calling his
296 297 # repo "css" ;-) Also having an external web server to serve out the static
297 298 # files seems to be easier to set up if they have a common prefix.
298 299 #
299 300 # Example: config.add_static_view('_static', path='rhodecode:public')
300 301 #
301 302 # It might be an option to register both paths for a while and then migrate
302 303 # over to the new location.
303 304
304 305 # Serving static files with a catchall.
305 306 if settings['static_files']:
306 307 config.add_route('catchall_static', '/*subpath')
307 308 config.add_view(
308 309 static_view('rhodecode:public'), route_name='catchall_static')
309 310
310 311
311 312 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 313 """
313 314 Apply outer WSGI middlewares around the application.
314 315
315 316 Part of this has been moved up from the Pylons layer, so that the
316 317 data is also available if old Pylons code is hit through an already ported
317 318 view.
318 319 """
319 320 settings = config.registry.settings
320 321
321 322 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 323 pyramid_app = HttpsFixup(pyramid_app, settings)
323 324
324 325 # Add RoutesMiddleware to support the pylons compatibility tween during
325 326
326 327 # migration to pyramid.
327 328 pyramid_app = RoutesMiddleware(
328 329 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329 330
330 331 if asbool(settings.get('appenlight', 'false')):
331 332 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 333 pyramid_app, config.registry._pylons_compat_config)
333 334
334 335 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 336 # serving static files, might be something that should have its own setting
336 337 # as well?
337 338 if settings['static_files']:
338 339 pyramid_app = make_gzip_middleware(
339 340 pyramid_app, settings, compress_level=1)
340 341
341 342 return pyramid_app
342 343
343 344
344 345 def sanitize_settings_and_apply_defaults(settings):
345 346 """
346 347 Applies settings defaults and does all type conversion.
347 348
348 349 We would move all settings parsing and preparation into this place, so that
349 350 we have only one place left which deals with this part. The remaining parts
350 351 of the application would start to rely fully on well prepared settings.
351 352
352 353 This piece would later be split up per topic to avoid a big fat monster
353 354 function.
354 355 """
355 356
356 357 # Pyramid's mako renderer has to search in the templates folder so that the
357 358 # old templates still work. Ported and new templates are expected to use
358 359 # real asset specifications for the includes.
359 360 mako_directories = settings.setdefault('mako.directories', [
360 361 # Base templates of the original Pylons application
361 362 'rhodecode:templates',
362 363 ])
363 364 log.debug(
364 365 "Using the following Mako template directories: %s",
365 366 mako_directories)
366 367
367 368 # Default includes, possible to change as a user
368 369 pyramid_includes = settings.setdefault('pyramid.includes', [
369 370 'rhodecode.lib.middleware.request_wrapper',
370 371 ])
371 372 log.debug(
372 373 "Using the following pyramid.includes: %s",
373 374 pyramid_includes)
374 375
375 376 # TODO: johbo: Re-think this, usually the call to config.include
376 377 # should allow to pass in a prefix.
377 378 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 379
379 380 _bool_setting(settings, 'vcs.server.enable', 'true')
380 381 _bool_setting(settings, 'static_files', 'true')
381 382 _bool_setting(settings, 'is_test', 'false')
382 383
383 384 return settings
384 385
385 386
386 387 def _bool_setting(settings, name, default):
387 388 settings[name] = asbool(settings.get(name, default))
@@ -1,1141 +1,1154 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Routes configuration
23 23
24 24 The more specific and detailed routes should be defined first so they
25 25 may take precedent over the more generic routes. For more information
26 26 refer to the routes manual at http://routes.groovie.org/docs/
27 27
28 28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 29 and _route_name variable which uses some of stored naming here to do redirects.
30 30 """
31 31 import os
32 32 import re
33 33 from routes import Mapper
34 34
35 35 from rhodecode.config import routing_links
36 36
37 37 # prefix for non repository related links needs to be prefixed with `/`
38 38 ADMIN_PREFIX = '/_admin'
39 39
40 40 # Default requirements for URL parts
41 41 URL_NAME_REQUIREMENTS = {
42 42 # group name can have a slash in them, but they must not end with a slash
43 43 'group_name': r'.*?[^/]',
44 44 # repo names can have a slash in them, but they must not end with a slash
45 45 'repo_name': r'.*?[^/]',
46 46 # file path eats up everything at the end
47 47 'f_path': r'.*',
48 48 # reference types
49 49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
50 50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
51 51 }
52 52
53 53
54 def add_route_requirements(route_path, requirements):
55 """
56 Adds regex requirements to pyramid routes using a mapping dict
57
58 >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'})
59 '/{action}/{id:\d+}'
60
61 """
62 for key, regex in requirements.items():
63 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
64 return route_path
65
66
54 67 class JSRoutesMapper(Mapper):
55 68 """
56 69 Wrapper for routes.Mapper to make pyroutes compatible url definitions
57 70 """
58 71 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
59 72 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
60 73 def __init__(self, *args, **kw):
61 74 super(JSRoutesMapper, self).__init__(*args, **kw)
62 75 self._jsroutes = []
63 76
64 77 def connect(self, *args, **kw):
65 78 """
66 79 Wrapper for connect to take an extra argument jsroute=True
67 80
68 81 :param jsroute: boolean, if True will add the route to the pyroutes list
69 82 """
70 83 if kw.pop('jsroute', False):
71 84 if not self._named_route_regex.match(args[0]):
72 85 raise Exception('only named routes can be added to pyroutes')
73 86 self._jsroutes.append(args[0])
74 87
75 88 super(JSRoutesMapper, self).connect(*args, **kw)
76 89
77 90 def _extract_route_information(self, route):
78 91 """
79 92 Convert a route into tuple(name, path, args), eg:
80 93 ('user_profile', '/profile/%(username)s', ['username'])
81 94 """
82 95 routepath = route.routepath
83 96 def replace(matchobj):
84 97 if matchobj.group(1):
85 98 return "%%(%s)s" % matchobj.group(1).split(':')[0]
86 99 else:
87 100 return "%%(%s)s" % matchobj.group(2)
88 101
89 102 routepath = self._argument_prog.sub(replace, routepath)
90 103 return (
91 104 route.name,
92 105 routepath,
93 106 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
94 107 for arg in self._argument_prog.findall(route.routepath)]
95 108 )
96 109
97 110 def jsroutes(self):
98 111 """
99 112 Return a list of pyroutes.js compatible routes
100 113 """
101 114 for route_name in self._jsroutes:
102 115 yield self._extract_route_information(self._routenames[route_name])
103 116
104 117
105 118 def make_map(config):
106 119 """Create, configure and return the routes Mapper"""
107 120 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
108 121 always_scan=config['debug'])
109 122 rmap.minimization = False
110 123 rmap.explicit = False
111 124
112 125 from rhodecode.lib.utils2 import str2bool
113 126 from rhodecode.model import repo, repo_group
114 127
115 128 def check_repo(environ, match_dict):
116 129 """
117 130 check for valid repository for proper 404 handling
118 131
119 132 :param environ:
120 133 :param match_dict:
121 134 """
122 135 repo_name = match_dict.get('repo_name')
123 136
124 137 if match_dict.get('f_path'):
125 138 # fix for multiple initial slashes that causes errors
126 139 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
127 140 repo_model = repo.RepoModel()
128 141 by_name_match = repo_model.get_by_repo_name(repo_name)
129 142 # if we match quickly from database, short circuit the operation,
130 143 # and validate repo based on the type.
131 144 if by_name_match:
132 145 return True
133 146
134 147 by_id_match = repo_model.get_repo_by_id(repo_name)
135 148 if by_id_match:
136 149 repo_name = by_id_match.repo_name
137 150 match_dict['repo_name'] = repo_name
138 151 return True
139 152
140 153 return False
141 154
142 155 def check_group(environ, match_dict):
143 156 """
144 157 check for valid repository group path for proper 404 handling
145 158
146 159 :param environ:
147 160 :param match_dict:
148 161 """
149 162 repo_group_name = match_dict.get('group_name')
150 163 repo_group_model = repo_group.RepoGroupModel()
151 164 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
152 165 if by_name_match:
153 166 return True
154 167
155 168 return False
156 169
157 170 def check_user_group(environ, match_dict):
158 171 """
159 172 check for valid user group for proper 404 handling
160 173
161 174 :param environ:
162 175 :param match_dict:
163 176 """
164 177 return True
165 178
166 179 def check_int(environ, match_dict):
167 180 return match_dict.get('id').isdigit()
168 181
169 182
170 183 #==========================================================================
171 184 # CUSTOM ROUTES HERE
172 185 #==========================================================================
173 186
174 187 # MAIN PAGE
175 188 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
176 189 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
177 190 action='goto_switcher_data')
178 191 rmap.connect('repo_list_data', '/_repos', controller='home',
179 192 action='repo_list_data')
180 193
181 194 rmap.connect('user_autocomplete_data', '/_users', controller='home',
182 195 action='user_autocomplete_data', jsroute=True)
183 196 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
184 197 action='user_group_autocomplete_data')
185 198
186 199 rmap.connect(
187 200 'user_profile', '/_profiles/{username}', controller='users',
188 201 action='user_profile')
189 202
190 203 # TODO: johbo: Static links, to be replaced by our redirection mechanism
191 204 rmap.connect('rst_help',
192 205 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
193 206 _static=True)
194 207 rmap.connect('markdown_help',
195 208 'http://daringfireball.net/projects/markdown/syntax',
196 209 _static=True)
197 210 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
198 211 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
199 212 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
200 213 # TODO: anderson - making this a static link since redirect won't play
201 214 # nice with POST requests
202 215 rmap.connect('enterprise_license_convert_from_old',
203 216 'https://rhodecode.com/u/license-upgrade',
204 217 _static=True)
205 218
206 219 routing_links.connect_redirection_links(rmap)
207 220
208 221 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
209 222 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
210 223
211 224 # ADMIN REPOSITORY ROUTES
212 225 with rmap.submapper(path_prefix=ADMIN_PREFIX,
213 226 controller='admin/repos') as m:
214 227 m.connect('repos', '/repos',
215 228 action='create', conditions={'method': ['POST']})
216 229 m.connect('repos', '/repos',
217 230 action='index', conditions={'method': ['GET']})
218 231 m.connect('new_repo', '/create_repository', jsroute=True,
219 232 action='create_repository', conditions={'method': ['GET']})
220 233 m.connect('/repos/{repo_name}',
221 234 action='update', conditions={'method': ['PUT'],
222 235 'function': check_repo},
223 236 requirements=URL_NAME_REQUIREMENTS)
224 237 m.connect('delete_repo', '/repos/{repo_name}',
225 238 action='delete', conditions={'method': ['DELETE']},
226 239 requirements=URL_NAME_REQUIREMENTS)
227 240 m.connect('repo', '/repos/{repo_name}',
228 241 action='show', conditions={'method': ['GET'],
229 242 'function': check_repo},
230 243 requirements=URL_NAME_REQUIREMENTS)
231 244
232 245 # ADMIN REPOSITORY GROUPS ROUTES
233 246 with rmap.submapper(path_prefix=ADMIN_PREFIX,
234 247 controller='admin/repo_groups') as m:
235 248 m.connect('repo_groups', '/repo_groups',
236 249 action='create', conditions={'method': ['POST']})
237 250 m.connect('repo_groups', '/repo_groups',
238 251 action='index', conditions={'method': ['GET']})
239 252 m.connect('new_repo_group', '/repo_groups/new',
240 253 action='new', conditions={'method': ['GET']})
241 254 m.connect('update_repo_group', '/repo_groups/{group_name}',
242 255 action='update', conditions={'method': ['PUT'],
243 256 'function': check_group},
244 257 requirements=URL_NAME_REQUIREMENTS)
245 258
246 259 # EXTRAS REPO GROUP ROUTES
247 260 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
248 261 action='edit',
249 262 conditions={'method': ['GET'], 'function': check_group},
250 263 requirements=URL_NAME_REQUIREMENTS)
251 264 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
252 265 action='edit',
253 266 conditions={'method': ['PUT'], 'function': check_group},
254 267 requirements=URL_NAME_REQUIREMENTS)
255 268
256 269 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
257 270 action='edit_repo_group_advanced',
258 271 conditions={'method': ['GET'], 'function': check_group},
259 272 requirements=URL_NAME_REQUIREMENTS)
260 273 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
261 274 action='edit_repo_group_advanced',
262 275 conditions={'method': ['PUT'], 'function': check_group},
263 276 requirements=URL_NAME_REQUIREMENTS)
264 277
265 278 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
266 279 action='edit_repo_group_perms',
267 280 conditions={'method': ['GET'], 'function': check_group},
268 281 requirements=URL_NAME_REQUIREMENTS)
269 282 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
270 283 action='update_perms',
271 284 conditions={'method': ['PUT'], 'function': check_group},
272 285 requirements=URL_NAME_REQUIREMENTS)
273 286
274 287 m.connect('delete_repo_group', '/repo_groups/{group_name}',
275 288 action='delete', conditions={'method': ['DELETE'],
276 289 'function': check_group},
277 290 requirements=URL_NAME_REQUIREMENTS)
278 291
279 292 # ADMIN USER ROUTES
280 293 with rmap.submapper(path_prefix=ADMIN_PREFIX,
281 294 controller='admin/users') as m:
282 295 m.connect('users', '/users',
283 296 action='create', conditions={'method': ['POST']})
284 297 m.connect('users', '/users',
285 298 action='index', conditions={'method': ['GET']})
286 299 m.connect('new_user', '/users/new',
287 300 action='new', conditions={'method': ['GET']})
288 301 m.connect('update_user', '/users/{user_id}',
289 302 action='update', conditions={'method': ['PUT']})
290 303 m.connect('delete_user', '/users/{user_id}',
291 304 action='delete', conditions={'method': ['DELETE']})
292 305 m.connect('edit_user', '/users/{user_id}/edit',
293 306 action='edit', conditions={'method': ['GET']})
294 307 m.connect('user', '/users/{user_id}',
295 308 action='show', conditions={'method': ['GET']})
296 309 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
297 310 action='reset_password', conditions={'method': ['POST']})
298 311 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
299 312 action='create_personal_repo_group', conditions={'method': ['POST']})
300 313
301 314 # EXTRAS USER ROUTES
302 315 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
303 316 action='edit_advanced', conditions={'method': ['GET']})
304 317 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
305 318 action='update_advanced', conditions={'method': ['PUT']})
306 319
307 320 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
308 321 action='edit_auth_tokens', conditions={'method': ['GET']})
309 322 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
310 323 action='add_auth_token', conditions={'method': ['PUT']})
311 324 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
312 325 action='delete_auth_token', conditions={'method': ['DELETE']})
313 326
314 327 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
315 328 action='edit_global_perms', conditions={'method': ['GET']})
316 329 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
317 330 action='update_global_perms', conditions={'method': ['PUT']})
318 331
319 332 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
320 333 action='edit_perms_summary', conditions={'method': ['GET']})
321 334
322 335 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
323 336 action='edit_emails', conditions={'method': ['GET']})
324 337 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
325 338 action='add_email', conditions={'method': ['PUT']})
326 339 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
327 340 action='delete_email', conditions={'method': ['DELETE']})
328 341
329 342 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
330 343 action='edit_ips', conditions={'method': ['GET']})
331 344 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
332 345 action='add_ip', conditions={'method': ['PUT']})
333 346 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
334 347 action='delete_ip', conditions={'method': ['DELETE']})
335 348
336 349 # ADMIN USER GROUPS REST ROUTES
337 350 with rmap.submapper(path_prefix=ADMIN_PREFIX,
338 351 controller='admin/user_groups') as m:
339 352 m.connect('users_groups', '/user_groups',
340 353 action='create', conditions={'method': ['POST']})
341 354 m.connect('users_groups', '/user_groups',
342 355 action='index', conditions={'method': ['GET']})
343 356 m.connect('new_users_group', '/user_groups/new',
344 357 action='new', conditions={'method': ['GET']})
345 358 m.connect('update_users_group', '/user_groups/{user_group_id}',
346 359 action='update', conditions={'method': ['PUT']})
347 360 m.connect('delete_users_group', '/user_groups/{user_group_id}',
348 361 action='delete', conditions={'method': ['DELETE']})
349 362 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
350 363 action='edit', conditions={'method': ['GET']},
351 364 function=check_user_group)
352 365
353 366 # EXTRAS USER GROUP ROUTES
354 367 m.connect('edit_user_group_global_perms',
355 368 '/user_groups/{user_group_id}/edit/global_permissions',
356 369 action='edit_global_perms', conditions={'method': ['GET']})
357 370 m.connect('edit_user_group_global_perms',
358 371 '/user_groups/{user_group_id}/edit/global_permissions',
359 372 action='update_global_perms', conditions={'method': ['PUT']})
360 373 m.connect('edit_user_group_perms_summary',
361 374 '/user_groups/{user_group_id}/edit/permissions_summary',
362 375 action='edit_perms_summary', conditions={'method': ['GET']})
363 376
364 377 m.connect('edit_user_group_perms',
365 378 '/user_groups/{user_group_id}/edit/permissions',
366 379 action='edit_perms', conditions={'method': ['GET']})
367 380 m.connect('edit_user_group_perms',
368 381 '/user_groups/{user_group_id}/edit/permissions',
369 382 action='update_perms', conditions={'method': ['PUT']})
370 383
371 384 m.connect('edit_user_group_advanced',
372 385 '/user_groups/{user_group_id}/edit/advanced',
373 386 action='edit_advanced', conditions={'method': ['GET']})
374 387
375 388 m.connect('edit_user_group_members',
376 389 '/user_groups/{user_group_id}/edit/members', jsroute=True,
377 390 action='edit_members', conditions={'method': ['GET']})
378 391
379 392 # ADMIN PERMISSIONS ROUTES
380 393 with rmap.submapper(path_prefix=ADMIN_PREFIX,
381 394 controller='admin/permissions') as m:
382 395 m.connect('admin_permissions_application', '/permissions/application',
383 396 action='permission_application_update', conditions={'method': ['POST']})
384 397 m.connect('admin_permissions_application', '/permissions/application',
385 398 action='permission_application', conditions={'method': ['GET']})
386 399
387 400 m.connect('admin_permissions_global', '/permissions/global',
388 401 action='permission_global_update', conditions={'method': ['POST']})
389 402 m.connect('admin_permissions_global', '/permissions/global',
390 403 action='permission_global', conditions={'method': ['GET']})
391 404
392 405 m.connect('admin_permissions_object', '/permissions/object',
393 406 action='permission_objects_update', conditions={'method': ['POST']})
394 407 m.connect('admin_permissions_object', '/permissions/object',
395 408 action='permission_objects', conditions={'method': ['GET']})
396 409
397 410 m.connect('admin_permissions_ips', '/permissions/ips',
398 411 action='permission_ips', conditions={'method': ['POST']})
399 412 m.connect('admin_permissions_ips', '/permissions/ips',
400 413 action='permission_ips', conditions={'method': ['GET']})
401 414
402 415 m.connect('admin_permissions_overview', '/permissions/overview',
403 416 action='permission_perms', conditions={'method': ['GET']})
404 417
405 418 # ADMIN DEFAULTS REST ROUTES
406 419 with rmap.submapper(path_prefix=ADMIN_PREFIX,
407 420 controller='admin/defaults') as m:
408 421 m.connect('admin_defaults_repositories', '/defaults/repositories',
409 422 action='update_repository_defaults', conditions={'method': ['POST']})
410 423 m.connect('admin_defaults_repositories', '/defaults/repositories',
411 424 action='index', conditions={'method': ['GET']})
412 425
413 426 # ADMIN DEBUG STYLE ROUTES
414 427 if str2bool(config.get('debug_style')):
415 428 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
416 429 controller='debug_style') as m:
417 430 m.connect('debug_style_home', '',
418 431 action='index', conditions={'method': ['GET']})
419 432 m.connect('debug_style_template', '/t/{t_path}',
420 433 action='template', conditions={'method': ['GET']})
421 434
422 435 # ADMIN SETTINGS ROUTES
423 436 with rmap.submapper(path_prefix=ADMIN_PREFIX,
424 437 controller='admin/settings') as m:
425 438
426 439 # default
427 440 m.connect('admin_settings', '/settings',
428 441 action='settings_global_update',
429 442 conditions={'method': ['POST']})
430 443 m.connect('admin_settings', '/settings',
431 444 action='settings_global', conditions={'method': ['GET']})
432 445
433 446 m.connect('admin_settings_vcs', '/settings/vcs',
434 447 action='settings_vcs_update',
435 448 conditions={'method': ['POST']})
436 449 m.connect('admin_settings_vcs', '/settings/vcs',
437 450 action='settings_vcs',
438 451 conditions={'method': ['GET']})
439 452 m.connect('admin_settings_vcs', '/settings/vcs',
440 453 action='delete_svn_pattern',
441 454 conditions={'method': ['DELETE']})
442 455
443 456 m.connect('admin_settings_mapping', '/settings/mapping',
444 457 action='settings_mapping_update',
445 458 conditions={'method': ['POST']})
446 459 m.connect('admin_settings_mapping', '/settings/mapping',
447 460 action='settings_mapping', conditions={'method': ['GET']})
448 461
449 462 m.connect('admin_settings_global', '/settings/global',
450 463 action='settings_global_update',
451 464 conditions={'method': ['POST']})
452 465 m.connect('admin_settings_global', '/settings/global',
453 466 action='settings_global', conditions={'method': ['GET']})
454 467
455 468 m.connect('admin_settings_visual', '/settings/visual',
456 469 action='settings_visual_update',
457 470 conditions={'method': ['POST']})
458 471 m.connect('admin_settings_visual', '/settings/visual',
459 472 action='settings_visual', conditions={'method': ['GET']})
460 473
461 474 m.connect('admin_settings_issuetracker',
462 475 '/settings/issue-tracker', action='settings_issuetracker',
463 476 conditions={'method': ['GET']})
464 477 m.connect('admin_settings_issuetracker_save',
465 478 '/settings/issue-tracker/save',
466 479 action='settings_issuetracker_save',
467 480 conditions={'method': ['POST']})
468 481 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
469 482 action='settings_issuetracker_test',
470 483 conditions={'method': ['POST']})
471 484 m.connect('admin_issuetracker_delete',
472 485 '/settings/issue-tracker/delete',
473 486 action='settings_issuetracker_delete',
474 487 conditions={'method': ['DELETE']})
475 488
476 489 m.connect('admin_settings_email', '/settings/email',
477 490 action='settings_email_update',
478 491 conditions={'method': ['POST']})
479 492 m.connect('admin_settings_email', '/settings/email',
480 493 action='settings_email', conditions={'method': ['GET']})
481 494
482 495 m.connect('admin_settings_hooks', '/settings/hooks',
483 496 action='settings_hooks_update',
484 497 conditions={'method': ['POST', 'DELETE']})
485 498 m.connect('admin_settings_hooks', '/settings/hooks',
486 499 action='settings_hooks', conditions={'method': ['GET']})
487 500
488 501 m.connect('admin_settings_search', '/settings/search',
489 502 action='settings_search', conditions={'method': ['GET']})
490 503
491 504 m.connect('admin_settings_system', '/settings/system',
492 505 action='settings_system', conditions={'method': ['GET']})
493 506
494 507 m.connect('admin_settings_system_update', '/settings/system/updates',
495 508 action='settings_system_update', conditions={'method': ['GET']})
496 509
497 510 m.connect('admin_settings_supervisor', '/settings/supervisor',
498 511 action='settings_supervisor', conditions={'method': ['GET']})
499 512 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
500 513 action='settings_supervisor_log', conditions={'method': ['GET']})
501 514
502 515 m.connect('admin_settings_labs', '/settings/labs',
503 516 action='settings_labs_update',
504 517 conditions={'method': ['POST']})
505 518 m.connect('admin_settings_labs', '/settings/labs',
506 519 action='settings_labs', conditions={'method': ['GET']})
507 520
508 521 # ADMIN MY ACCOUNT
509 522 with rmap.submapper(path_prefix=ADMIN_PREFIX,
510 523 controller='admin/my_account') as m:
511 524
512 525 m.connect('my_account', '/my_account',
513 526 action='my_account', conditions={'method': ['GET']})
514 527 m.connect('my_account_edit', '/my_account/edit',
515 528 action='my_account_edit', conditions={'method': ['GET']})
516 529 m.connect('my_account', '/my_account',
517 530 action='my_account_update', conditions={'method': ['POST']})
518 531
519 532 m.connect('my_account_password', '/my_account/password',
520 533 action='my_account_password', conditions={'method': ['GET']})
521 534 m.connect('my_account_password', '/my_account/password',
522 535 action='my_account_password_update', conditions={'method': ['POST']})
523 536
524 537 m.connect('my_account_repos', '/my_account/repos',
525 538 action='my_account_repos', conditions={'method': ['GET']})
526 539
527 540 m.connect('my_account_watched', '/my_account/watched',
528 541 action='my_account_watched', conditions={'method': ['GET']})
529 542
530 543 m.connect('my_account_pullrequests', '/my_account/pull_requests',
531 544 action='my_account_pullrequests', conditions={'method': ['GET']})
532 545
533 546 m.connect('my_account_perms', '/my_account/perms',
534 547 action='my_account_perms', conditions={'method': ['GET']})
535 548
536 549 m.connect('my_account_emails', '/my_account/emails',
537 550 action='my_account_emails', conditions={'method': ['GET']})
538 551 m.connect('my_account_emails', '/my_account/emails',
539 552 action='my_account_emails_add', conditions={'method': ['POST']})
540 553 m.connect('my_account_emails', '/my_account/emails',
541 554 action='my_account_emails_delete', conditions={'method': ['DELETE']})
542 555
543 556 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
544 557 action='my_account_auth_tokens', conditions={'method': ['GET']})
545 558 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
546 559 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
547 560 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
548 561 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
549 562
550 563 # NOTIFICATION REST ROUTES
551 564 with rmap.submapper(path_prefix=ADMIN_PREFIX,
552 565 controller='admin/notifications') as m:
553 566 m.connect('notifications', '/notifications',
554 567 action='index', conditions={'method': ['GET']})
555 568 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
556 569 action='mark_all_read', conditions={'method': ['POST']})
557 570
558 571 m.connect('/notifications/{notification_id}',
559 572 action='update', conditions={'method': ['PUT']})
560 573 m.connect('/notifications/{notification_id}',
561 574 action='delete', conditions={'method': ['DELETE']})
562 575 m.connect('notification', '/notifications/{notification_id}',
563 576 action='show', conditions={'method': ['GET']})
564 577
565 578 # ADMIN GIST
566 579 with rmap.submapper(path_prefix=ADMIN_PREFIX,
567 580 controller='admin/gists') as m:
568 581 m.connect('gists', '/gists',
569 582 action='create', conditions={'method': ['POST']})
570 583 m.connect('gists', '/gists', jsroute=True,
571 584 action='index', conditions={'method': ['GET']})
572 585 m.connect('new_gist', '/gists/new', jsroute=True,
573 586 action='new', conditions={'method': ['GET']})
574 587
575 588 m.connect('/gists/{gist_id}',
576 589 action='delete', conditions={'method': ['DELETE']})
577 590 m.connect('edit_gist', '/gists/{gist_id}/edit',
578 591 action='edit_form', conditions={'method': ['GET']})
579 592 m.connect('edit_gist', '/gists/{gist_id}/edit',
580 593 action='edit', conditions={'method': ['POST']})
581 594 m.connect(
582 595 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
583 596 action='check_revision', conditions={'method': ['GET']})
584 597
585 598 m.connect('gist', '/gists/{gist_id}',
586 599 action='show', conditions={'method': ['GET']})
587 600 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
588 601 revision='tip',
589 602 action='show', conditions={'method': ['GET']})
590 603 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
591 604 revision='tip',
592 605 action='show', conditions={'method': ['GET']})
593 606 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
594 607 revision='tip',
595 608 action='show', conditions={'method': ['GET']},
596 609 requirements=URL_NAME_REQUIREMENTS)
597 610
598 611 # ADMIN MAIN PAGES
599 612 with rmap.submapper(path_prefix=ADMIN_PREFIX,
600 613 controller='admin/admin') as m:
601 614 m.connect('admin_home', '', action='index')
602 615 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
603 616 action='add_repo')
604 617 m.connect(
605 618 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
606 619 action='pull_requests')
607 620 m.connect(
608 621 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
609 622 action='pull_requests')
610 623
611 624
612 625 # USER JOURNAL
613 626 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
614 627 controller='journal', action='index')
615 628 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
616 629 controller='journal', action='journal_rss')
617 630 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
618 631 controller='journal', action='journal_atom')
619 632
620 633 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
621 634 controller='journal', action='public_journal')
622 635
623 636 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
624 637 controller='journal', action='public_journal_rss')
625 638
626 639 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
627 640 controller='journal', action='public_journal_rss')
628 641
629 642 rmap.connect('public_journal_atom',
630 643 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
631 644 action='public_journal_atom')
632 645
633 646 rmap.connect('public_journal_atom_old',
634 647 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
635 648 action='public_journal_atom')
636 649
637 650 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
638 651 controller='journal', action='toggle_following', jsroute=True,
639 652 conditions={'method': ['POST']})
640 653
641 654 # FULL TEXT SEARCH
642 655 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
643 656 controller='search')
644 657 rmap.connect('search_repo_home', '/{repo_name}/search',
645 658 controller='search',
646 659 action='index',
647 660 conditions={'function': check_repo},
648 661 requirements=URL_NAME_REQUIREMENTS)
649 662
650 663 # FEEDS
651 664 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
652 665 controller='feed', action='rss',
653 666 conditions={'function': check_repo},
654 667 requirements=URL_NAME_REQUIREMENTS)
655 668
656 669 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
657 670 controller='feed', action='atom',
658 671 conditions={'function': check_repo},
659 672 requirements=URL_NAME_REQUIREMENTS)
660 673
661 674 #==========================================================================
662 675 # REPOSITORY ROUTES
663 676 #==========================================================================
664 677
665 678 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
666 679 controller='admin/repos', action='repo_creating',
667 680 requirements=URL_NAME_REQUIREMENTS)
668 681 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
669 682 controller='admin/repos', action='repo_check',
670 683 requirements=URL_NAME_REQUIREMENTS)
671 684
672 685 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
673 686 controller='summary', action='repo_stats',
674 687 conditions={'function': check_repo},
675 688 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
676 689
677 690 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
678 691 controller='summary', action='repo_refs_data', jsroute=True,
679 692 requirements=URL_NAME_REQUIREMENTS)
680 693 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
681 694 controller='summary', action='repo_refs_changelog_data',
682 695 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
683 696
684 697 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
685 698 controller='changeset', revision='tip', jsroute=True,
686 699 conditions={'function': check_repo},
687 700 requirements=URL_NAME_REQUIREMENTS)
688 701 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
689 702 controller='changeset', revision='tip', action='changeset_children',
690 703 conditions={'function': check_repo},
691 704 requirements=URL_NAME_REQUIREMENTS)
692 705 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
693 706 controller='changeset', revision='tip', action='changeset_parents',
694 707 conditions={'function': check_repo},
695 708 requirements=URL_NAME_REQUIREMENTS)
696 709
697 710 # repo edit options
698 711 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
699 712 controller='admin/repos', action='edit',
700 713 conditions={'method': ['GET'], 'function': check_repo},
701 714 requirements=URL_NAME_REQUIREMENTS)
702 715
703 716 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
704 717 jsroute=True,
705 718 controller='admin/repos', action='edit_permissions',
706 719 conditions={'method': ['GET'], 'function': check_repo},
707 720 requirements=URL_NAME_REQUIREMENTS)
708 721 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
709 722 controller='admin/repos', action='edit_permissions_update',
710 723 conditions={'method': ['PUT'], 'function': check_repo},
711 724 requirements=URL_NAME_REQUIREMENTS)
712 725
713 726 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
714 727 controller='admin/repos', action='edit_fields',
715 728 conditions={'method': ['GET'], 'function': check_repo},
716 729 requirements=URL_NAME_REQUIREMENTS)
717 730 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
718 731 controller='admin/repos', action='create_repo_field',
719 732 conditions={'method': ['PUT'], 'function': check_repo},
720 733 requirements=URL_NAME_REQUIREMENTS)
721 734 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
722 735 controller='admin/repos', action='delete_repo_field',
723 736 conditions={'method': ['DELETE'], 'function': check_repo},
724 737 requirements=URL_NAME_REQUIREMENTS)
725 738
726 739 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
727 740 controller='admin/repos', action='edit_advanced',
728 741 conditions={'method': ['GET'], 'function': check_repo},
729 742 requirements=URL_NAME_REQUIREMENTS)
730 743
731 744 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
732 745 controller='admin/repos', action='edit_advanced_locking',
733 746 conditions={'method': ['PUT'], 'function': check_repo},
734 747 requirements=URL_NAME_REQUIREMENTS)
735 748 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
736 749 controller='admin/repos', action='toggle_locking',
737 750 conditions={'method': ['GET'], 'function': check_repo},
738 751 requirements=URL_NAME_REQUIREMENTS)
739 752
740 753 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
741 754 controller='admin/repos', action='edit_advanced_journal',
742 755 conditions={'method': ['PUT'], 'function': check_repo},
743 756 requirements=URL_NAME_REQUIREMENTS)
744 757
745 758 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
746 759 controller='admin/repos', action='edit_advanced_fork',
747 760 conditions={'method': ['PUT'], 'function': check_repo},
748 761 requirements=URL_NAME_REQUIREMENTS)
749 762
750 763 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
751 764 controller='admin/repos', action='edit_caches_form',
752 765 conditions={'method': ['GET'], 'function': check_repo},
753 766 requirements=URL_NAME_REQUIREMENTS)
754 767 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
755 768 controller='admin/repos', action='edit_caches',
756 769 conditions={'method': ['PUT'], 'function': check_repo},
757 770 requirements=URL_NAME_REQUIREMENTS)
758 771
759 772 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
760 773 controller='admin/repos', action='edit_remote_form',
761 774 conditions={'method': ['GET'], 'function': check_repo},
762 775 requirements=URL_NAME_REQUIREMENTS)
763 776 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
764 777 controller='admin/repos', action='edit_remote',
765 778 conditions={'method': ['PUT'], 'function': check_repo},
766 779 requirements=URL_NAME_REQUIREMENTS)
767 780
768 781 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
769 782 controller='admin/repos', action='edit_statistics_form',
770 783 conditions={'method': ['GET'], 'function': check_repo},
771 784 requirements=URL_NAME_REQUIREMENTS)
772 785 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
773 786 controller='admin/repos', action='edit_statistics',
774 787 conditions={'method': ['PUT'], 'function': check_repo},
775 788 requirements=URL_NAME_REQUIREMENTS)
776 789 rmap.connect('repo_settings_issuetracker',
777 790 '/{repo_name}/settings/issue-tracker',
778 791 controller='admin/repos', action='repo_issuetracker',
779 792 conditions={'method': ['GET'], 'function': check_repo},
780 793 requirements=URL_NAME_REQUIREMENTS)
781 794 rmap.connect('repo_issuetracker_test',
782 795 '/{repo_name}/settings/issue-tracker/test',
783 796 controller='admin/repos', action='repo_issuetracker_test',
784 797 conditions={'method': ['POST'], 'function': check_repo},
785 798 requirements=URL_NAME_REQUIREMENTS)
786 799 rmap.connect('repo_issuetracker_delete',
787 800 '/{repo_name}/settings/issue-tracker/delete',
788 801 controller='admin/repos', action='repo_issuetracker_delete',
789 802 conditions={'method': ['DELETE'], 'function': check_repo},
790 803 requirements=URL_NAME_REQUIREMENTS)
791 804 rmap.connect('repo_issuetracker_save',
792 805 '/{repo_name}/settings/issue-tracker/save',
793 806 controller='admin/repos', action='repo_issuetracker_save',
794 807 conditions={'method': ['POST'], 'function': check_repo},
795 808 requirements=URL_NAME_REQUIREMENTS)
796 809 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
797 810 controller='admin/repos', action='repo_settings_vcs_update',
798 811 conditions={'method': ['POST'], 'function': check_repo},
799 812 requirements=URL_NAME_REQUIREMENTS)
800 813 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
801 814 controller='admin/repos', action='repo_settings_vcs',
802 815 conditions={'method': ['GET'], 'function': check_repo},
803 816 requirements=URL_NAME_REQUIREMENTS)
804 817 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
805 818 controller='admin/repos', action='repo_delete_svn_pattern',
806 819 conditions={'method': ['DELETE'], 'function': check_repo},
807 820 requirements=URL_NAME_REQUIREMENTS)
808 821
809 822 # still working url for backward compat.
810 823 rmap.connect('raw_changeset_home_depraced',
811 824 '/{repo_name}/raw-changeset/{revision}',
812 825 controller='changeset', action='changeset_raw',
813 826 revision='tip', conditions={'function': check_repo},
814 827 requirements=URL_NAME_REQUIREMENTS)
815 828
816 829 # new URLs
817 830 rmap.connect('changeset_raw_home',
818 831 '/{repo_name}/changeset-diff/{revision}',
819 832 controller='changeset', action='changeset_raw',
820 833 revision='tip', conditions={'function': check_repo},
821 834 requirements=URL_NAME_REQUIREMENTS)
822 835
823 836 rmap.connect('changeset_patch_home',
824 837 '/{repo_name}/changeset-patch/{revision}',
825 838 controller='changeset', action='changeset_patch',
826 839 revision='tip', conditions={'function': check_repo},
827 840 requirements=URL_NAME_REQUIREMENTS)
828 841
829 842 rmap.connect('changeset_download_home',
830 843 '/{repo_name}/changeset-download/{revision}',
831 844 controller='changeset', action='changeset_download',
832 845 revision='tip', conditions={'function': check_repo},
833 846 requirements=URL_NAME_REQUIREMENTS)
834 847
835 848 rmap.connect('changeset_comment',
836 849 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
837 850 controller='changeset', revision='tip', action='comment',
838 851 conditions={'function': check_repo},
839 852 requirements=URL_NAME_REQUIREMENTS)
840 853
841 854 rmap.connect('changeset_comment_preview',
842 855 '/{repo_name}/changeset/comment/preview', jsroute=True,
843 856 controller='changeset', action='preview_comment',
844 857 conditions={'function': check_repo, 'method': ['POST']},
845 858 requirements=URL_NAME_REQUIREMENTS)
846 859
847 860 rmap.connect('changeset_comment_delete',
848 861 '/{repo_name}/changeset/comment/{comment_id}/delete',
849 862 controller='changeset', action='delete_comment',
850 863 conditions={'function': check_repo, 'method': ['DELETE']},
851 864 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
852 865
853 866 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
854 867 controller='changeset', action='changeset_info',
855 868 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
856 869
857 870 rmap.connect('compare_home',
858 871 '/{repo_name}/compare',
859 872 controller='compare', action='index',
860 873 conditions={'function': check_repo},
861 874 requirements=URL_NAME_REQUIREMENTS)
862 875
863 876 rmap.connect('compare_url',
864 877 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
865 878 controller='compare', action='compare',
866 879 conditions={'function': check_repo},
867 880 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
868 881
869 882 rmap.connect('pullrequest_home',
870 883 '/{repo_name}/pull-request/new', controller='pullrequests',
871 884 action='index', conditions={'function': check_repo,
872 885 'method': ['GET']},
873 886 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
874 887
875 888 rmap.connect('pullrequest',
876 889 '/{repo_name}/pull-request/new', controller='pullrequests',
877 890 action='create', conditions={'function': check_repo,
878 891 'method': ['POST']},
879 892 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
880 893
881 894 rmap.connect('pullrequest_repo_refs',
882 895 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
883 896 controller='pullrequests',
884 897 action='get_repo_refs',
885 898 conditions={'function': check_repo, 'method': ['GET']},
886 899 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
887 900
888 901 rmap.connect('pullrequest_repo_destinations',
889 902 '/{repo_name}/pull-request/repo-destinations',
890 903 controller='pullrequests',
891 904 action='get_repo_destinations',
892 905 conditions={'function': check_repo, 'method': ['GET']},
893 906 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
894 907
895 908 rmap.connect('pullrequest_show',
896 909 '/{repo_name}/pull-request/{pull_request_id}',
897 910 controller='pullrequests',
898 911 action='show', conditions={'function': check_repo,
899 912 'method': ['GET']},
900 913 requirements=URL_NAME_REQUIREMENTS)
901 914
902 915 rmap.connect('pullrequest_update',
903 916 '/{repo_name}/pull-request/{pull_request_id}',
904 917 controller='pullrequests',
905 918 action='update', conditions={'function': check_repo,
906 919 'method': ['PUT']},
907 920 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
908 921
909 922 rmap.connect('pullrequest_merge',
910 923 '/{repo_name}/pull-request/{pull_request_id}',
911 924 controller='pullrequests',
912 925 action='merge', conditions={'function': check_repo,
913 926 'method': ['POST']},
914 927 requirements=URL_NAME_REQUIREMENTS)
915 928
916 929 rmap.connect('pullrequest_delete',
917 930 '/{repo_name}/pull-request/{pull_request_id}',
918 931 controller='pullrequests',
919 932 action='delete', conditions={'function': check_repo,
920 933 'method': ['DELETE']},
921 934 requirements=URL_NAME_REQUIREMENTS)
922 935
923 936 rmap.connect('pullrequest_show_all',
924 937 '/{repo_name}/pull-request',
925 938 controller='pullrequests',
926 939 action='show_all', conditions={'function': check_repo,
927 940 'method': ['GET']},
928 941 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
929 942
930 943 rmap.connect('pullrequest_comment',
931 944 '/{repo_name}/pull-request-comment/{pull_request_id}',
932 945 controller='pullrequests',
933 946 action='comment', conditions={'function': check_repo,
934 947 'method': ['POST']},
935 948 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
936 949
937 950 rmap.connect('pullrequest_comment_delete',
938 951 '/{repo_name}/pull-request-comment/{comment_id}/delete',
939 952 controller='pullrequests', action='delete_comment',
940 953 conditions={'function': check_repo, 'method': ['DELETE']},
941 954 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
942 955
943 956 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
944 957 controller='summary', conditions={'function': check_repo},
945 958 requirements=URL_NAME_REQUIREMENTS)
946 959
947 960 rmap.connect('branches_home', '/{repo_name}/branches',
948 961 controller='branches', conditions={'function': check_repo},
949 962 requirements=URL_NAME_REQUIREMENTS)
950 963
951 964 rmap.connect('tags_home', '/{repo_name}/tags',
952 965 controller='tags', conditions={'function': check_repo},
953 966 requirements=URL_NAME_REQUIREMENTS)
954 967
955 968 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
956 969 controller='bookmarks', conditions={'function': check_repo},
957 970 requirements=URL_NAME_REQUIREMENTS)
958 971
959 972 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
960 973 controller='changelog', conditions={'function': check_repo},
961 974 requirements=URL_NAME_REQUIREMENTS)
962 975
963 976 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
964 977 controller='changelog', action='changelog_summary',
965 978 conditions={'function': check_repo},
966 979 requirements=URL_NAME_REQUIREMENTS)
967 980
968 981 rmap.connect('changelog_file_home',
969 982 '/{repo_name}/changelog/{revision}/{f_path}',
970 983 controller='changelog', f_path=None,
971 984 conditions={'function': check_repo},
972 985 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
973 986
974 987 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
975 988 controller='changelog', action='changelog_details',
976 989 conditions={'function': check_repo},
977 990 requirements=URL_NAME_REQUIREMENTS)
978 991
979 992 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
980 993 controller='files', revision='tip', f_path='',
981 994 conditions={'function': check_repo},
982 995 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
983 996
984 997 rmap.connect('files_home_simple_catchrev',
985 998 '/{repo_name}/files/{revision}',
986 999 controller='files', revision='tip', f_path='',
987 1000 conditions={'function': check_repo},
988 1001 requirements=URL_NAME_REQUIREMENTS)
989 1002
990 1003 rmap.connect('files_home_simple_catchall',
991 1004 '/{repo_name}/files',
992 1005 controller='files', revision='tip', f_path='',
993 1006 conditions={'function': check_repo},
994 1007 requirements=URL_NAME_REQUIREMENTS)
995 1008
996 1009 rmap.connect('files_history_home',
997 1010 '/{repo_name}/history/{revision}/{f_path}',
998 1011 controller='files', action='history', revision='tip', f_path='',
999 1012 conditions={'function': check_repo},
1000 1013 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1001 1014
1002 1015 rmap.connect('files_authors_home',
1003 1016 '/{repo_name}/authors/{revision}/{f_path}',
1004 1017 controller='files', action='authors', revision='tip', f_path='',
1005 1018 conditions={'function': check_repo},
1006 1019 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1007 1020
1008 1021 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1009 1022 controller='files', action='diff', f_path='',
1010 1023 conditions={'function': check_repo},
1011 1024 requirements=URL_NAME_REQUIREMENTS)
1012 1025
1013 1026 rmap.connect('files_diff_2way_home',
1014 1027 '/{repo_name}/diff-2way/{f_path}',
1015 1028 controller='files', action='diff_2way', f_path='',
1016 1029 conditions={'function': check_repo},
1017 1030 requirements=URL_NAME_REQUIREMENTS)
1018 1031
1019 1032 rmap.connect('files_rawfile_home',
1020 1033 '/{repo_name}/rawfile/{revision}/{f_path}',
1021 1034 controller='files', action='rawfile', revision='tip',
1022 1035 f_path='', conditions={'function': check_repo},
1023 1036 requirements=URL_NAME_REQUIREMENTS)
1024 1037
1025 1038 rmap.connect('files_raw_home',
1026 1039 '/{repo_name}/raw/{revision}/{f_path}',
1027 1040 controller='files', action='raw', revision='tip', f_path='',
1028 1041 conditions={'function': check_repo},
1029 1042 requirements=URL_NAME_REQUIREMENTS)
1030 1043
1031 1044 rmap.connect('files_render_home',
1032 1045 '/{repo_name}/render/{revision}/{f_path}',
1033 1046 controller='files', action='index', revision='tip', f_path='',
1034 1047 rendered=True, conditions={'function': check_repo},
1035 1048 requirements=URL_NAME_REQUIREMENTS)
1036 1049
1037 1050 rmap.connect('files_annotate_home',
1038 1051 '/{repo_name}/annotate/{revision}/{f_path}',
1039 1052 controller='files', action='index', revision='tip',
1040 1053 f_path='', annotate=True, conditions={'function': check_repo},
1041 1054 requirements=URL_NAME_REQUIREMENTS)
1042 1055
1043 1056 rmap.connect('files_edit',
1044 1057 '/{repo_name}/edit/{revision}/{f_path}',
1045 1058 controller='files', action='edit', revision='tip',
1046 1059 f_path='',
1047 1060 conditions={'function': check_repo, 'method': ['POST']},
1048 1061 requirements=URL_NAME_REQUIREMENTS)
1049 1062
1050 1063 rmap.connect('files_edit_home',
1051 1064 '/{repo_name}/edit/{revision}/{f_path}',
1052 1065 controller='files', action='edit_home', revision='tip',
1053 1066 f_path='', conditions={'function': check_repo},
1054 1067 requirements=URL_NAME_REQUIREMENTS)
1055 1068
1056 1069 rmap.connect('files_add',
1057 1070 '/{repo_name}/add/{revision}/{f_path}',
1058 1071 controller='files', action='add', revision='tip',
1059 1072 f_path='',
1060 1073 conditions={'function': check_repo, 'method': ['POST']},
1061 1074 requirements=URL_NAME_REQUIREMENTS)
1062 1075
1063 1076 rmap.connect('files_add_home',
1064 1077 '/{repo_name}/add/{revision}/{f_path}',
1065 1078 controller='files', action='add_home', revision='tip',
1066 1079 f_path='', conditions={'function': check_repo},
1067 1080 requirements=URL_NAME_REQUIREMENTS)
1068 1081
1069 1082 rmap.connect('files_delete',
1070 1083 '/{repo_name}/delete/{revision}/{f_path}',
1071 1084 controller='files', action='delete', revision='tip',
1072 1085 f_path='',
1073 1086 conditions={'function': check_repo, 'method': ['POST']},
1074 1087 requirements=URL_NAME_REQUIREMENTS)
1075 1088
1076 1089 rmap.connect('files_delete_home',
1077 1090 '/{repo_name}/delete/{revision}/{f_path}',
1078 1091 controller='files', action='delete_home', revision='tip',
1079 1092 f_path='', conditions={'function': check_repo},
1080 1093 requirements=URL_NAME_REQUIREMENTS)
1081 1094
1082 1095 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1083 1096 controller='files', action='archivefile',
1084 1097 conditions={'function': check_repo},
1085 1098 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1086 1099
1087 1100 rmap.connect('files_nodelist_home',
1088 1101 '/{repo_name}/nodelist/{revision}/{f_path}',
1089 1102 controller='files', action='nodelist',
1090 1103 conditions={'function': check_repo},
1091 1104 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1092 1105
1093 1106 rmap.connect('files_metadata_list_home',
1094 1107 '/{repo_name}/metadata_list/{revision}/{f_path}',
1095 1108 controller='files', action='metadata_list',
1096 1109 conditions={'function': check_repo},
1097 1110 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1098 1111
1099 1112 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1100 1113 controller='forks', action='fork_create',
1101 1114 conditions={'function': check_repo, 'method': ['POST']},
1102 1115 requirements=URL_NAME_REQUIREMENTS)
1103 1116
1104 1117 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1105 1118 controller='forks', action='fork',
1106 1119 conditions={'function': check_repo},
1107 1120 requirements=URL_NAME_REQUIREMENTS)
1108 1121
1109 1122 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1110 1123 controller='forks', action='forks',
1111 1124 conditions={'function': check_repo},
1112 1125 requirements=URL_NAME_REQUIREMENTS)
1113 1126
1114 1127 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1115 1128 controller='followers', action='followers',
1116 1129 conditions={'function': check_repo},
1117 1130 requirements=URL_NAME_REQUIREMENTS)
1118 1131
1119 1132 # must be here for proper group/repo catching pattern
1120 1133 _connect_with_slash(
1121 1134 rmap, 'repo_group_home', '/{group_name}',
1122 1135 controller='home', action='index_repo_group',
1123 1136 conditions={'function': check_group},
1124 1137 requirements=URL_NAME_REQUIREMENTS)
1125 1138
1126 1139 # catch all, at the end
1127 1140 _connect_with_slash(
1128 1141 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1129 1142 controller='summary', action='index',
1130 1143 conditions={'function': check_repo},
1131 1144 requirements=URL_NAME_REQUIREMENTS)
1132 1145
1133 1146 return rmap
1134 1147
1135 1148
1136 1149 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1137 1150 """
1138 1151 Connect a route with an optional trailing slash in `path`.
1139 1152 """
1140 1153 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1141 1154 mapper.connect(name, path, *args, **kwargs)
@@ -1,57 +1,66 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from pyramid.threadlocal import get_current_registry
20 20
21 21
22 def trigger(event):
22 def trigger(event, registry=None):
23 23 """
24 24 Helper method to send an event. This wraps the pyramid logic to send an
25 25 event.
26 26 """
27 27 # For the first step we are using pyramids thread locals here. If the
28 28 # event mechanism works out as a good solution we should think about
29 29 # passing the registry as an argument to get rid of it.
30 registry = get_current_registry()
30 registry = registry or get_current_registry()
31 31 registry.notify(event)
32 32
33 # Until we can work around the problem that VCS operations do not have a
34 # pyramid context to work with, we send the events to integrations directly
35
36 # Later it will be possible to use regular pyramid subscribers ie:
37 # config.add_subscriber(integrations_event_handler, RhodecodeEvent)
38 from rhodecode.integrations import integrations_event_handler
39 if isinstance(event, RhodecodeEvent):
40 integrations_event_handler(event)
41
33 42
34 43 from rhodecode.events.base import RhodecodeEvent
35 44
36 45 from rhodecode.events.user import (
37 46 UserPreCreate,
38 47 UserPreUpdate,
39 48 UserRegistered
40 49 )
41 50
42 51 from rhodecode.events.repo import (
43 52 RepoEvent,
44 RepoPreCreateEvent, RepoCreatedEvent,
45 RepoPreDeleteEvent, RepoDeletedEvent,
53 RepoPreCreateEvent, RepoCreateEvent,
54 RepoPreDeleteEvent, RepoDeleteEvent,
46 55 RepoPrePushEvent, RepoPushEvent,
47 56 RepoPrePullEvent, RepoPullEvent,
48 57 )
49 58
50 59 from rhodecode.events.pullrequest import (
51 60 PullRequestEvent,
52 61 PullRequestCreateEvent,
53 62 PullRequestUpdateEvent,
54 63 PullRequestReviewEvent,
55 64 PullRequestMergeEvent,
56 65 PullRequestCloseEvent,
57 ) No newline at end of file
66 )
@@ -1,71 +1,59 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from datetime import datetime
20 from marshmallow import Schema, fields
21 20 from pyramid.threadlocal import get_current_request
22 21 from rhodecode.lib.utils2 import AttributeDict
23 22
24 23
25 24 # this is a user object to be used for events caused by the system (eg. shell)
26 25 SYSTEM_USER = AttributeDict(dict(
27 26 username='__SYSTEM__'
28 27 ))
29 28
30 29
31 class UserSchema(Schema):
32 """
33 Marshmallow schema for a user
34 """
35 username = fields.Str()
36
37
38 class RhodecodeEventSchema(Schema):
39 """
40 Marshmallow schema for a rhodecode event
41 """
42 utc_timestamp = fields.DateTime()
43 actor = fields.Nested(UserSchema)
44 actor_ip = fields.Str()
45 name = fields.Str()
46
47
48 30 class RhodecodeEvent(object):
49 31 """
50 32 Base event class for all Rhodecode events
51 33 """
52 MarshmallowSchema = RhodecodeEventSchema
53
54 34 def __init__(self):
55 35 self.request = get_current_request()
56 36 self.utc_timestamp = datetime.utcnow()
57 37
58 38 @property
59 39 def actor(self):
60 40 if self.request:
61 41 return self.request.user.get_instance()
62 42 return SYSTEM_USER
63 43
64 44 @property
65 45 def actor_ip(self):
66 46 if self.request:
67 47 return self.request.user.ip_addr
68 48 return '<no ip available>'
69 49
70 50 def as_dict(self):
71 return self.MarshmallowSchema().dump(self).data
51 data = {
52 'name': self.name,
53 'utc_timestamp': self.utc_timestamp,
54 'actor_ip': self.actor_ip,
55 'actor': {
56 'username': self.actor.username
57 }
58 }
59 return data No newline at end of file
@@ -1,97 +1,97 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
20 19
20 from rhodecode.translation import lazy_ugettext
21 21 from rhodecode.events.repo import RepoEvent
22 22
23 23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
45 24 class PullRequestEvent(RepoEvent):
46 25 """
47 26 Base class for pull request events.
48 27
49 28 :param pullrequest: a :class:`PullRequest` instance
50 29 """
51 MarshmallowSchema = PullRequestEventSchema
52 30
53 31 def __init__(self, pullrequest):
54 32 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
55 33 self.pullrequest = pullrequest
56 34
35 def as_dict(self):
36 from rhodecode.model.pull_request import PullRequestModel
37 data = super(PullRequestEvent, self).as_dict()
38
39 commits = self._commits_as_dict(self.pullrequest.revisions)
40 issues = self._issues_as_dict(commits)
41
42 data.update({
43 'pullrequest': {
44 'title': self.pullrequest.title,
45 'issues': issues,
46 'pull_request_id': self.pullrequest.pull_request_id,
47 'url': PullRequestModel().get_url(self.pullrequest)
48 }
49 })
50 return data
51
57 52
58 53 class PullRequestCreateEvent(PullRequestEvent):
59 54 """
60 55 An instance of this class is emitted as an :term:`event` after a pull
61 56 request is created.
62 57 """
63 58 name = 'pullrequest-create'
59 display_name = lazy_ugettext('pullrequest created')
64 60
65 61
66 62 class PullRequestCloseEvent(PullRequestEvent):
67 63 """
68 64 An instance of this class is emitted as an :term:`event` after a pull
69 65 request is closed.
70 66 """
71 67 name = 'pullrequest-close'
68 display_name = lazy_ugettext('pullrequest closed')
72 69
73 70
74 71 class PullRequestUpdateEvent(PullRequestEvent):
75 72 """
76 73 An instance of this class is emitted as an :term:`event` after a pull
77 74 request is updated.
78 75 """
79 76 name = 'pullrequest-update'
77 display_name = lazy_ugettext('pullrequest updated')
80 78
81 79
82 80 class PullRequestMergeEvent(PullRequestEvent):
83 81 """
84 82 An instance of this class is emitted as an :term:`event` after a pull
85 83 request is merged.
86 84 """
87 85 name = 'pullrequest-merge'
86 display_name = lazy_ugettext('pullrequest merged')
88 87
89 88
90 89 class PullRequestReviewEvent(PullRequestEvent):
91 90 """
92 91 An instance of this class is emitted as an :term:`event` after a pull
93 92 request is reviewed.
94 93 """
95 94 name = 'pullrequest-review'
95 display_name = lazy_ugettext('pullrequest reviewed')
96 96
97 97
@@ -1,149 +1,219 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
19 import logging
20 20
21 from rhodecode.translation import lazy_ugettext
21 22 from rhodecode.model.db import User, Repository, Session
22 23 from rhodecode.events.base import RhodecodeEvent
23 24
24
25 def get_repo_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_repo_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repo = fields.Nested(RepositorySchema)
25 log = logging.getLogger()
44 26
45 27
46 28 class RepoEvent(RhodecodeEvent):
47 29 """
48 30 Base class for events acting on a repository.
49 31
50 32 :param repo: a :class:`Repository` instance
51 33 """
52 MarshmallowSchema = RepoEventSchema
53 34
54 35 def __init__(self, repo):
55 36 super(RepoEvent, self).__init__()
56 37 self.repo = repo
57 38
39 def as_dict(self):
40 from rhodecode.model.repo import RepoModel
41 data = super(RepoEvent, self).as_dict()
42 data.update({
43 'repo': {
44 'repo_id': self.repo.repo_id,
45 'repo_name': self.repo.repo_name,
46 'url': RepoModel().get_url(self.repo)
47 }
48 })
49 return data
50
51 def _commits_as_dict(self, commit_ids):
52 """ Helper function to serialize commit_ids """
53
54 from rhodecode.lib.utils2 import extract_mentioned_users
55 from rhodecode.model.db import Repository
56 from rhodecode.lib import helpers as h
57 from rhodecode.lib.helpers import process_patterns
58 from rhodecode.lib.helpers import urlify_commit_message
59 if not commit_ids:
60 return []
61 commits = []
62 reviewers = []
63 vcs_repo = self.repo.scm_instance(cache=False)
64 try:
65 for commit_id in commit_ids:
66 cs = vcs_repo.get_changeset(commit_id)
67 cs_data = cs.__json__()
68 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
69 cs_data['reviewers'] = reviewers
70 cs_data['url'] = h.url('changeset_home',
71 repo_name=self.repo.repo_name,
72 revision=cs_data['raw_id'],
73 qualified=True
74 )
75 urlified_message, issues_data = process_patterns(
76 cs_data['message'], self.repo.repo_name)
77 cs_data['issues'] = issues_data
78 cs_data['message_html'] = urlify_commit_message(cs_data['message'],
79 self.repo.repo_name)
80 commits.append(cs_data)
81 except Exception as e:
82 log.exception(e)
83 # we don't send any commits when crash happens, only full list matters
84 # we short circuit then.
85 return []
86 return commits
87
88 def _issues_as_dict(self, commits):
89 """ Helper function to serialize issues from commits """
90 issues = {}
91 for commit in commits:
92 for issue in commit['issues']:
93 issues[issue['id']] = issue
94 return issues
95
58 96
59 97 class RepoPreCreateEvent(RepoEvent):
60 98 """
61 99 An instance of this class is emitted as an :term:`event` before a repo is
62 100 created.
63 101 """
64 102 name = 'repo-pre-create'
103 display_name = lazy_ugettext('repository pre create')
65 104
66 105
67 class RepoCreatedEvent(RepoEvent):
106 class RepoCreateEvent(RepoEvent):
68 107 """
69 108 An instance of this class is emitted as an :term:`event` whenever a repo is
70 109 created.
71 110 """
72 name = 'repo-created'
111 name = 'repo-create'
112 display_name = lazy_ugettext('repository created')
73 113
74 114
75 115 class RepoPreDeleteEvent(RepoEvent):
76 116 """
77 117 An instance of this class is emitted as an :term:`event` whenever a repo is
78 118 created.
79 119 """
80 120 name = 'repo-pre-delete'
121 display_name = lazy_ugettext('repository pre delete')
81 122
82 123
83 class RepoDeletedEvent(RepoEvent):
124 class RepoDeleteEvent(RepoEvent):
84 125 """
85 126 An instance of this class is emitted as an :term:`event` whenever a repo is
86 127 created.
87 128 """
88 name = 'repo-deleted'
129 name = 'repo-delete'
130 display_name = lazy_ugettext('repository deleted')
89 131
90 132
91 133 class RepoVCSEvent(RepoEvent):
92 134 """
93 135 Base class for events triggered by the VCS
94 136 """
95 137 def __init__(self, repo_name, extras):
96 138 self.repo = Repository.get_by_repo_name(repo_name)
97 139 if not self.repo:
98 140 raise Exception('repo by this name %s does not exist' % repo_name)
99 141 self.extras = extras
100 142 super(RepoVCSEvent, self).__init__(self.repo)
101 143
102 144 @property
103 145 def actor(self):
104 146 if self.extras.get('username'):
105 147 return User.get_by_username(self.extras['username'])
106 148
107 149 @property
108 150 def actor_ip(self):
109 151 if self.extras.get('ip'):
110 152 return self.extras['ip']
111 153
112 154
113 155 class RepoPrePullEvent(RepoVCSEvent):
114 156 """
115 157 An instance of this class is emitted as an :term:`event` before commits
116 158 are pulled from a repo.
117 159 """
118 160 name = 'repo-pre-pull'
161 display_name = lazy_ugettext('repository pre pull')
119 162
120 163
121 164 class RepoPullEvent(RepoVCSEvent):
122 165 """
123 166 An instance of this class is emitted as an :term:`event` after commits
124 167 are pulled from a repo.
125 168 """
126 169 name = 'repo-pull'
170 display_name = lazy_ugettext('repository pull')
127 171
128 172
129 173 class RepoPrePushEvent(RepoVCSEvent):
130 174 """
131 175 An instance of this class is emitted as an :term:`event` before commits
132 176 are pushed to a repo.
133 177 """
134 178 name = 'repo-pre-push'
179 display_name = lazy_ugettext('repository pre push')
135 180
136 181
137 182 class RepoPushEvent(RepoVCSEvent):
138 183 """
139 184 An instance of this class is emitted as an :term:`event` after commits
140 185 are pushed to a repo.
141 186
142 187 :param extras: (optional) dict of data from proxied VCS actions
143 188 """
144 189 name = 'repo-push'
190 display_name = lazy_ugettext('repository push')
145 191
146 192 def __init__(self, repo_name, pushed_commit_ids, extras):
147 193 super(RepoPushEvent, self).__init__(repo_name, extras)
148 194 self.pushed_commit_ids = pushed_commit_ids
149 195
196 def as_dict(self):
197 data = super(RepoPushEvent, self).as_dict()
198 branch_url = repo_url = data['repo']['url']
199
200 commits = self._commits_as_dict(self.pushed_commit_ids)
201 issues = self._issues_as_dict(commits)
202
203 branches = set(
204 commit['branch'] for commit in commits if commit['branch'])
205 branches = [
206 {
207 'name': branch,
208 'url': '{}/changelog?branch={}'.format(
209 data['repo']['url'], branch)
210 }
211 for branch in branches
212 ]
213
214 data['push'] = {
215 'commits': commits,
216 'issues': issues,
217 'branches': branches,
218 }
219 return data No newline at end of file
@@ -1,55 +1,65 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from zope.interface import implementer
20 20
21 from rhodecode.translation import lazy_ugettext
21 22 from rhodecode.events.base import RhodecodeEvent
22 23 from rhodecode.events.interfaces import (
23 24 IUserRegistered, IUserPreCreate, IUserPreUpdate)
24 25
25 26
26 27 @implementer(IUserRegistered)
27 28 class UserRegistered(RhodecodeEvent):
28 29 """
29 30 An instance of this class is emitted as an :term:`event` whenever a user
30 31 account is registered.
31 32 """
33 name = 'user-register'
34 display_name = lazy_ugettext('user registered')
35
32 36 def __init__(self, user, session):
33 37 self.user = user
34 38 self.session = session
35 39
36 40
37 41 @implementer(IUserPreCreate)
38 42 class UserPreCreate(RhodecodeEvent):
39 43 """
40 44 An instance of this class is emitted as an :term:`event` before a new user
41 45 object is created.
42 46 """
47 name = 'user-pre-create'
48 display_name = lazy_ugettext('user pre create')
49
43 50 def __init__(self, user_data):
44 51 self.user_data = user_data
45 52
46 53
47 54 @implementer(IUserPreUpdate)
48 55 class UserPreUpdate(RhodecodeEvent):
49 56 """
50 57 An instance of this class is emitted as an :term:`event` before a user
51 58 object is updated.
52 59 """
60 name = 'user-pre-update'
61 display_name = lazy_ugettext('user pre update')
62
53 63 def __init__(self, user, user_data):
54 64 self.user = user
55 65 self.user_data = user_data
@@ -1,1900 +1,1931 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 import pygments
40 40
41 41 from datetime import datetime
42 42 from functools import partial
43 43 from pygments.formatters.html import HtmlFormatter
44 44 from pygments import highlight as code_highlight
45 45 from pygments.lexers import (
46 46 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
47 from pylons import url
47 from pylons import url as pylons_url
48 48 from pylons.i18n.translation import _, ungettext
49 49 from pyramid.threadlocal import get_current_request
50 50
51 51 from webhelpers.html import literal, HTML, escape
52 52 from webhelpers.html.tools import *
53 53 from webhelpers.html.builder import make_tag
54 54 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
55 55 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
56 56 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
57 57 submit, text, password, textarea, title, ul, xml_declaration, radio
58 58 from webhelpers.html.tools import auto_link, button_to, highlight, \
59 59 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
60 60 from webhelpers.pylonslib import Flash as _Flash
61 61 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
62 62 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
63 63 replace_whitespace, urlify, truncate, wrap_paragraphs
64 64 from webhelpers.date import time_ago_in_words
65 65 from webhelpers.paginate import Page as _Page
66 66 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
67 67 convert_boolean_attrs, NotGiven, _make_safe_id_component
68 68 from webhelpers2.number import format_byte_size
69 69
70 70 from rhodecode.lib.annotate import annotate_highlight
71 71 from rhodecode.lib.action_parser import action_parser
72 72 from rhodecode.lib.ext_json import json
73 73 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
74 74 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
75 75 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
76 76 AttributeDict, safe_int, md5, md5_safe
77 77 from rhodecode.lib.markup_renderer import MarkupRenderer
78 78 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
79 79 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
80 80 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
81 81 from rhodecode.model.changeset_status import ChangesetStatusModel
82 82 from rhodecode.model.db import Permission, User, Repository
83 83 from rhodecode.model.repo_group import RepoGroupModel
84 84 from rhodecode.model.settings import IssueTrackerSettingsModel
85 85
86 86 log = logging.getLogger(__name__)
87 87
88 88 DEFAULT_USER = User.DEFAULT_USER
89 89 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
90 90
91 def url(*args, **kw):
92 return pylons_url(*args, **kw)
93
94 def pylons_url_current(*args, **kw):
95 """
96 This function overrides pylons.url.current() which returns the current
97 path so that it will also work from a pyramid only context. This
98 should be removed once port to pyramid is complete.
99 """
100 if not args and not kw:
101 request = get_current_request()
102 return request.path
103 return pylons_url.current(*args, **kw)
104
105 url.current = pylons_url_current
106
91 107
92 108 def html_escape(text, html_escape_table=None):
93 109 """Produce entities within text."""
94 110 if not html_escape_table:
95 111 html_escape_table = {
96 112 "&": "&amp;",
97 113 '"': "&quot;",
98 114 "'": "&apos;",
99 115 ">": "&gt;",
100 116 "<": "&lt;",
101 117 }
102 118 return "".join(html_escape_table.get(c, c) for c in text)
103 119
104 120
105 121 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
106 122 """
107 123 Truncate string ``s`` at the first occurrence of ``sub``.
108 124
109 125 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
110 126 """
111 127 suffix_if_chopped = suffix_if_chopped or ''
112 128 pos = s.find(sub)
113 129 if pos == -1:
114 130 return s
115 131
116 132 if inclusive:
117 133 pos += len(sub)
118 134
119 135 chopped = s[:pos]
120 136 left = s[pos:].strip()
121 137
122 138 if left and suffix_if_chopped:
123 139 chopped += suffix_if_chopped
124 140
125 141 return chopped
126 142
127 143
128 144 def shorter(text, size=20):
129 145 postfix = '...'
130 146 if len(text) > size:
131 147 return text[:size - len(postfix)] + postfix
132 148 return text
133 149
134 150
135 151 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
136 152 """
137 153 Reset button
138 154 """
139 155 _set_input_attrs(attrs, type, name, value)
140 156 _set_id_attr(attrs, id, name)
141 157 convert_boolean_attrs(attrs, ["disabled"])
142 158 return HTML.input(**attrs)
143 159
144 160 reset = _reset
145 161 safeid = _make_safe_id_component
146 162
147 163
148 164 def branding(name, length=40):
149 165 return truncate(name, length, indicator="")
150 166
151 167
152 168 def FID(raw_id, path):
153 169 """
154 170 Creates a unique ID for filenode based on it's hash of path and commit
155 171 it's safe to use in urls
156 172
157 173 :param raw_id:
158 174 :param path:
159 175 """
160 176
161 177 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
162 178
163 179
164 180 class _GetError(object):
165 181 """Get error from form_errors, and represent it as span wrapped error
166 182 message
167 183
168 184 :param field_name: field to fetch errors for
169 185 :param form_errors: form errors dict
170 186 """
171 187
172 188 def __call__(self, field_name, form_errors):
173 189 tmpl = """<span class="error_msg">%s</span>"""
174 190 if form_errors and field_name in form_errors:
175 191 return literal(tmpl % form_errors.get(field_name))
176 192
177 193 get_error = _GetError()
178 194
179 195
180 196 class _ToolTip(object):
181 197
182 198 def __call__(self, tooltip_title, trim_at=50):
183 199 """
184 200 Special function just to wrap our text into nice formatted
185 201 autowrapped text
186 202
187 203 :param tooltip_title:
188 204 """
189 205 tooltip_title = escape(tooltip_title)
190 206 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
191 207 return tooltip_title
192 208 tooltip = _ToolTip()
193 209
194 210
195 211 def files_breadcrumbs(repo_name, commit_id, file_path):
196 212 if isinstance(file_path, str):
197 213 file_path = safe_unicode(file_path)
198 214
199 215 # TODO: johbo: Is this always a url like path, or is this operating
200 216 # system dependent?
201 217 path_segments = file_path.split('/')
202 218
203 219 repo_name_html = escape(repo_name)
204 220 if len(path_segments) == 1 and path_segments[0] == '':
205 221 url_segments = [repo_name_html]
206 222 else:
207 223 url_segments = [
208 224 link_to(
209 225 repo_name_html,
210 226 url('files_home',
211 227 repo_name=repo_name,
212 228 revision=commit_id,
213 229 f_path=''),
214 230 class_='pjax-link')]
215 231
216 232 last_cnt = len(path_segments) - 1
217 233 for cnt, segment in enumerate(path_segments):
218 234 if not segment:
219 235 continue
220 236 segment_html = escape(segment)
221 237
222 238 if cnt != last_cnt:
223 239 url_segments.append(
224 240 link_to(
225 241 segment_html,
226 242 url('files_home',
227 243 repo_name=repo_name,
228 244 revision=commit_id,
229 245 f_path='/'.join(path_segments[:cnt + 1])),
230 246 class_='pjax-link'))
231 247 else:
232 248 url_segments.append(segment_html)
233 249
234 250 return literal('/'.join(url_segments))
235 251
236 252
237 253 class CodeHtmlFormatter(HtmlFormatter):
238 254 """
239 255 My code Html Formatter for source codes
240 256 """
241 257
242 258 def wrap(self, source, outfile):
243 259 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
244 260
245 261 def _wrap_code(self, source):
246 262 for cnt, it in enumerate(source):
247 263 i, t = it
248 264 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
249 265 yield i, t
250 266
251 267 def _wrap_tablelinenos(self, inner):
252 268 dummyoutfile = StringIO.StringIO()
253 269 lncount = 0
254 270 for t, line in inner:
255 271 if t:
256 272 lncount += 1
257 273 dummyoutfile.write(line)
258 274
259 275 fl = self.linenostart
260 276 mw = len(str(lncount + fl - 1))
261 277 sp = self.linenospecial
262 278 st = self.linenostep
263 279 la = self.lineanchors
264 280 aln = self.anchorlinenos
265 281 nocls = self.noclasses
266 282 if sp:
267 283 lines = []
268 284
269 285 for i in range(fl, fl + lncount):
270 286 if i % st == 0:
271 287 if i % sp == 0:
272 288 if aln:
273 289 lines.append('<a href="#%s%d" class="special">%*d</a>' %
274 290 (la, i, mw, i))
275 291 else:
276 292 lines.append('<span class="special">%*d</span>' % (mw, i))
277 293 else:
278 294 if aln:
279 295 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
280 296 else:
281 297 lines.append('%*d' % (mw, i))
282 298 else:
283 299 lines.append('')
284 300 ls = '\n'.join(lines)
285 301 else:
286 302 lines = []
287 303 for i in range(fl, fl + lncount):
288 304 if i % st == 0:
289 305 if aln:
290 306 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
291 307 else:
292 308 lines.append('%*d' % (mw, i))
293 309 else:
294 310 lines.append('')
295 311 ls = '\n'.join(lines)
296 312
297 313 # in case you wonder about the seemingly redundant <div> here: since the
298 314 # content in the other cell also is wrapped in a div, some browsers in
299 315 # some configurations seem to mess up the formatting...
300 316 if nocls:
301 317 yield 0, ('<table class="%stable">' % self.cssclass +
302 318 '<tr><td><div class="linenodiv" '
303 319 'style="background-color: #f0f0f0; padding-right: 10px">'
304 320 '<pre style="line-height: 125%">' +
305 321 ls + '</pre></div></td><td id="hlcode" class="code">')
306 322 else:
307 323 yield 0, ('<table class="%stable">' % self.cssclass +
308 324 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
309 325 ls + '</pre></div></td><td id="hlcode" class="code">')
310 326 yield 0, dummyoutfile.getvalue()
311 327 yield 0, '</td></tr></table>'
312 328
313 329
314 330 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
315 331 def __init__(self, **kw):
316 332 # only show these line numbers if set
317 333 self.only_lines = kw.pop('only_line_numbers', [])
318 334 self.query_terms = kw.pop('query_terms', [])
319 335 self.max_lines = kw.pop('max_lines', 5)
320 336 self.line_context = kw.pop('line_context', 3)
321 337 self.url = kw.pop('url', None)
322 338
323 339 super(CodeHtmlFormatter, self).__init__(**kw)
324 340
325 341 def _wrap_code(self, source):
326 342 for cnt, it in enumerate(source):
327 343 i, t = it
328 344 t = '<pre>%s</pre>' % t
329 345 yield i, t
330 346
331 347 def _wrap_tablelinenos(self, inner):
332 348 yield 0, '<table class="code-highlight %stable">' % self.cssclass
333 349
334 350 last_shown_line_number = 0
335 351 current_line_number = 1
336 352
337 353 for t, line in inner:
338 354 if not t:
339 355 yield t, line
340 356 continue
341 357
342 358 if current_line_number in self.only_lines:
343 359 if last_shown_line_number + 1 != current_line_number:
344 360 yield 0, '<tr>'
345 361 yield 0, '<td class="line">...</td>'
346 362 yield 0, '<td id="hlcode" class="code"></td>'
347 363 yield 0, '</tr>'
348 364
349 365 yield 0, '<tr>'
350 366 if self.url:
351 367 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
352 368 self.url, current_line_number, current_line_number)
353 369 else:
354 370 yield 0, '<td class="line"><a href="">%i</a></td>' % (
355 371 current_line_number)
356 372 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
357 373 yield 0, '</tr>'
358 374
359 375 last_shown_line_number = current_line_number
360 376
361 377 current_line_number += 1
362 378
363 379
364 380 yield 0, '</table>'
365 381
366 382
367 383 def extract_phrases(text_query):
368 384 """
369 385 Extracts phrases from search term string making sure phrases
370 386 contained in double quotes are kept together - and discarding empty values
371 387 or fully whitespace values eg.
372 388
373 389 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
374 390
375 391 """
376 392
377 393 in_phrase = False
378 394 buf = ''
379 395 phrases = []
380 396 for char in text_query:
381 397 if in_phrase:
382 398 if char == '"': # end phrase
383 399 phrases.append(buf)
384 400 buf = ''
385 401 in_phrase = False
386 402 continue
387 403 else:
388 404 buf += char
389 405 continue
390 406 else:
391 407 if char == '"': # start phrase
392 408 in_phrase = True
393 409 phrases.append(buf)
394 410 buf = ''
395 411 continue
396 412 elif char == ' ':
397 413 phrases.append(buf)
398 414 buf = ''
399 415 continue
400 416 else:
401 417 buf += char
402 418
403 419 phrases.append(buf)
404 420 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
405 421 return phrases
406 422
407 423
408 424 def get_matching_offsets(text, phrases):
409 425 """
410 426 Returns a list of string offsets in `text` that the list of `terms` match
411 427
412 428 >>> get_matching_offsets('some text here', ['some', 'here'])
413 429 [(0, 4), (10, 14)]
414 430
415 431 """
416 432 offsets = []
417 433 for phrase in phrases:
418 434 for match in re.finditer(phrase, text):
419 435 offsets.append((match.start(), match.end()))
420 436
421 437 return offsets
422 438
423 439
424 440 def normalize_text_for_matching(x):
425 441 """
426 442 Replaces all non alnum characters to spaces and lower cases the string,
427 443 useful for comparing two text strings without punctuation
428 444 """
429 445 return re.sub(r'[^\w]', ' ', x.lower())
430 446
431 447
432 448 def get_matching_line_offsets(lines, terms):
433 449 """ Return a set of `lines` indices (starting from 1) matching a
434 450 text search query, along with `context` lines above/below matching lines
435 451
436 452 :param lines: list of strings representing lines
437 453 :param terms: search term string to match in lines eg. 'some text'
438 454 :param context: number of lines above/below a matching line to add to result
439 455 :param max_lines: cut off for lines of interest
440 456 eg.
441 457
442 458 text = '''
443 459 words words words
444 460 words words words
445 461 some text some
446 462 words words words
447 463 words words words
448 464 text here what
449 465 '''
450 466 get_matching_line_offsets(text, 'text', context=1)
451 467 {3: [(5, 9)], 6: [(0, 4)]]
452 468
453 469 """
454 470 matching_lines = {}
455 471 phrases = [normalize_text_for_matching(phrase)
456 472 for phrase in extract_phrases(terms)]
457 473
458 474 for line_index, line in enumerate(lines, start=1):
459 475 match_offsets = get_matching_offsets(
460 476 normalize_text_for_matching(line), phrases)
461 477 if match_offsets:
462 478 matching_lines[line_index] = match_offsets
463 479
464 480 return matching_lines
465 481
466 482
467 483 def get_lexer_safe(mimetype=None, filepath=None):
468 484 """
469 485 Tries to return a relevant pygments lexer using mimetype/filepath name,
470 486 defaulting to plain text if none could be found
471 487 """
472 488 lexer = None
473 489 try:
474 490 if mimetype:
475 491 lexer = get_lexer_for_mimetype(mimetype)
476 492 if not lexer:
477 493 lexer = get_lexer_for_filename(filepath)
478 494 except pygments.util.ClassNotFound:
479 495 pass
480 496
481 497 if not lexer:
482 498 lexer = get_lexer_by_name('text')
483 499
484 500 return lexer
485 501
486 502
487 503 def pygmentize(filenode, **kwargs):
488 504 """
489 505 pygmentize function using pygments
490 506
491 507 :param filenode:
492 508 """
493 509 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
494 510 return literal(code_highlight(filenode.content, lexer,
495 511 CodeHtmlFormatter(**kwargs)))
496 512
497 513
498 514 def pygmentize_annotation(repo_name, filenode, **kwargs):
499 515 """
500 516 pygmentize function for annotation
501 517
502 518 :param filenode:
503 519 """
504 520
505 521 color_dict = {}
506 522
507 523 def gen_color(n=10000):
508 524 """generator for getting n of evenly distributed colors using
509 525 hsv color and golden ratio. It always return same order of colors
510 526
511 527 :returns: RGB tuple
512 528 """
513 529
514 530 def hsv_to_rgb(h, s, v):
515 531 if s == 0.0:
516 532 return v, v, v
517 533 i = int(h * 6.0) # XXX assume int() truncates!
518 534 f = (h * 6.0) - i
519 535 p = v * (1.0 - s)
520 536 q = v * (1.0 - s * f)
521 537 t = v * (1.0 - s * (1.0 - f))
522 538 i = i % 6
523 539 if i == 0:
524 540 return v, t, p
525 541 if i == 1:
526 542 return q, v, p
527 543 if i == 2:
528 544 return p, v, t
529 545 if i == 3:
530 546 return p, q, v
531 547 if i == 4:
532 548 return t, p, v
533 549 if i == 5:
534 550 return v, p, q
535 551
536 552 golden_ratio = 0.618033988749895
537 553 h = 0.22717784590367374
538 554
539 555 for _ in xrange(n):
540 556 h += golden_ratio
541 557 h %= 1
542 558 HSV_tuple = [h, 0.95, 0.95]
543 559 RGB_tuple = hsv_to_rgb(*HSV_tuple)
544 560 yield map(lambda x: str(int(x * 256)), RGB_tuple)
545 561
546 562 cgenerator = gen_color()
547 563
548 564 def get_color_string(commit_id):
549 565 if commit_id in color_dict:
550 566 col = color_dict[commit_id]
551 567 else:
552 568 col = color_dict[commit_id] = cgenerator.next()
553 569 return "color: rgb(%s)! important;" % (', '.join(col))
554 570
555 571 def url_func(repo_name):
556 572
557 573 def _url_func(commit):
558 574 author = commit.author
559 575 date = commit.date
560 576 message = tooltip(commit.message)
561 577
562 578 tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
563 579 " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
564 580 "</b> %s<br/></div>")
565 581
566 582 tooltip_html = tooltip_html % (author, date, message)
567 583 lnk_format = '%5s:%s' % ('r%s' % commit.idx, commit.short_id)
568 584 uri = link_to(
569 585 lnk_format,
570 586 url('changeset_home', repo_name=repo_name,
571 587 revision=commit.raw_id),
572 588 style=get_color_string(commit.raw_id),
573 589 class_='tooltip',
574 590 title=tooltip_html
575 591 )
576 592
577 593 uri += '\n'
578 594 return uri
579 595 return _url_func
580 596
581 597 return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs))
582 598
583 599
584 600 def is_following_repo(repo_name, user_id):
585 601 from rhodecode.model.scm import ScmModel
586 602 return ScmModel().is_following_repo(repo_name, user_id)
587 603
588 604
589 605 class _Message(object):
590 606 """A message returned by ``Flash.pop_messages()``.
591 607
592 608 Converting the message to a string returns the message text. Instances
593 609 also have the following attributes:
594 610
595 611 * ``message``: the message text.
596 612 * ``category``: the category specified when the message was created.
597 613 """
598 614
599 615 def __init__(self, category, message):
600 616 self.category = category
601 617 self.message = message
602 618
603 619 def __str__(self):
604 620 return self.message
605 621
606 622 __unicode__ = __str__
607 623
608 624 def __html__(self):
609 625 return escape(safe_unicode(self.message))
610 626
611 627
612 628 class Flash(_Flash):
613 629
614 630 def pop_messages(self):
615 631 """Return all accumulated messages and delete them from the session.
616 632
617 633 The return value is a list of ``Message`` objects.
618 634 """
619 635 from pylons import session
620 636
621 637 messages = []
622 638
623 639 # Pop the 'old' pylons flash messages. They are tuples of the form
624 640 # (category, message)
625 641 for cat, msg in session.pop(self.session_key, []):
626 642 messages.append(_Message(cat, msg))
627 643
628 644 # Pop the 'new' pyramid flash messages for each category as list
629 645 # of strings.
630 646 for cat in self.categories:
631 647 for msg in session.pop_flash(queue=cat):
632 648 messages.append(_Message(cat, msg))
633 649 # Map messages from the default queue to the 'notice' category.
634 650 for msg in session.pop_flash():
635 651 messages.append(_Message('notice', msg))
636 652
637 653 session.save()
638 654 return messages
639 655
640 656 flash = Flash()
641 657
642 658 #==============================================================================
643 659 # SCM FILTERS available via h.
644 660 #==============================================================================
645 661 from rhodecode.lib.vcs.utils import author_name, author_email
646 662 from rhodecode.lib.utils2 import credentials_filter, age as _age
647 663 from rhodecode.model.db import User, ChangesetStatus
648 664
649 665 age = _age
650 666 capitalize = lambda x: x.capitalize()
651 667 email = author_email
652 668 short_id = lambda x: x[:12]
653 669 hide_credentials = lambda x: ''.join(credentials_filter(x))
654 670
655 671
656 672 def age_component(datetime_iso, value=None, time_is_local=False):
657 673 title = value or format_date(datetime_iso)
658 674
659 675 # detect if we have a timezone info, otherwise, add it
660 676 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
661 677 tzinfo = '+00:00'
662 678
663 679 if time_is_local:
664 680 tzinfo = time.strftime("+%H:%M",
665 681 time.gmtime(
666 682 (datetime.now() - datetime.utcnow()).seconds + 1
667 683 )
668 684 )
669 685
670 686 return literal(
671 687 '<time class="timeago tooltip" '
672 688 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
673 689 datetime_iso, title, tzinfo))
674 690
675 691
676 692 def _shorten_commit_id(commit_id):
677 693 from rhodecode import CONFIG
678 694 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
679 695 return commit_id[:def_len]
680 696
681 697
682 698 def show_id(commit):
683 699 """
684 700 Configurable function that shows ID
685 701 by default it's r123:fffeeefffeee
686 702
687 703 :param commit: commit instance
688 704 """
689 705 from rhodecode import CONFIG
690 706 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
691 707
692 708 raw_id = _shorten_commit_id(commit.raw_id)
693 709 if show_idx:
694 710 return 'r%s:%s' % (commit.idx, raw_id)
695 711 else:
696 712 return '%s' % (raw_id, )
697 713
698 714
699 715 def format_date(date):
700 716 """
701 717 use a standardized formatting for dates used in RhodeCode
702 718
703 719 :param date: date/datetime object
704 720 :return: formatted date
705 721 """
706 722
707 723 if date:
708 724 _fmt = "%a, %d %b %Y %H:%M:%S"
709 725 return safe_unicode(date.strftime(_fmt))
710 726
711 727 return u""
712 728
713 729
714 730 class _RepoChecker(object):
715 731
716 732 def __init__(self, backend_alias):
717 733 self._backend_alias = backend_alias
718 734
719 735 def __call__(self, repository):
720 736 if hasattr(repository, 'alias'):
721 737 _type = repository.alias
722 738 elif hasattr(repository, 'repo_type'):
723 739 _type = repository.repo_type
724 740 else:
725 741 _type = repository
726 742 return _type == self._backend_alias
727 743
728 744 is_git = _RepoChecker('git')
729 745 is_hg = _RepoChecker('hg')
730 746 is_svn = _RepoChecker('svn')
731 747
732 748
733 749 def get_repo_type_by_name(repo_name):
734 750 repo = Repository.get_by_repo_name(repo_name)
735 751 return repo.repo_type
736 752
737 753
738 754 def is_svn_without_proxy(repository):
739 755 from rhodecode import CONFIG
740 756 if is_svn(repository):
741 757 if not CONFIG.get('rhodecode_proxy_subversion_http_requests', False):
742 758 return True
743 759 return False
744 760
745 761
746 762 def discover_user(author):
747 763 """
748 764 Tries to discover RhodeCode User based on the autho string. Author string
749 765 is typically `FirstName LastName <email@address.com>`
750 766 """
751 767
752 768 # if author is already an instance use it for extraction
753 769 if isinstance(author, User):
754 770 return author
755 771
756 772 # Valid email in the attribute passed, see if they're in the system
757 773 _email = author_email(author)
758 774 if _email != '':
759 775 user = User.get_by_email(_email, case_insensitive=True, cache=True)
760 776 if user is not None:
761 777 return user
762 778
763 779 # Maybe it's a username, we try to extract it and fetch by username ?
764 780 _author = author_name(author)
765 781 user = User.get_by_username(_author, case_insensitive=True, cache=True)
766 782 if user is not None:
767 783 return user
768 784
769 785 return None
770 786
771 787
772 788 def email_or_none(author):
773 789 # extract email from the commit string
774 790 _email = author_email(author)
775 791
776 792 # If we have an email, use it, otherwise
777 793 # see if it contains a username we can get an email from
778 794 if _email != '':
779 795 return _email
780 796 else:
781 797 user = User.get_by_username(author_name(author), case_insensitive=True,
782 798 cache=True)
783 799
784 800 if user is not None:
785 801 return user.email
786 802
787 803 # No valid email, not a valid user in the system, none!
788 804 return None
789 805
790 806
791 807 def link_to_user(author, length=0, **kwargs):
792 808 user = discover_user(author)
793 809 # user can be None, but if we have it already it means we can re-use it
794 810 # in the person() function, so we save 1 intensive-query
795 811 if user:
796 812 author = user
797 813
798 814 display_person = person(author, 'username_or_name_or_email')
799 815 if length:
800 816 display_person = shorter(display_person, length)
801 817
802 818 if user:
803 819 return link_to(
804 820 escape(display_person),
805 821 url('user_profile', username=user.username),
806 822 **kwargs)
807 823 else:
808 824 return escape(display_person)
809 825
810 826
811 827 def person(author, show_attr="username_and_name"):
812 828 user = discover_user(author)
813 829 if user:
814 830 return getattr(user, show_attr)
815 831 else:
816 832 _author = author_name(author)
817 833 _email = email(author)
818 834 return _author or _email
819 835
820 836
821 837 def author_string(email):
822 838 if email:
823 839 user = User.get_by_email(email, case_insensitive=True, cache=True)
824 840 if user:
825 841 if user.firstname or user.lastname:
826 842 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
827 843 else:
828 844 return email
829 845 else:
830 846 return email
831 847 else:
832 848 return None
833 849
834 850
835 851 def person_by_id(id_, show_attr="username_and_name"):
836 852 # attr to return from fetched user
837 853 person_getter = lambda usr: getattr(usr, show_attr)
838 854
839 855 #maybe it's an ID ?
840 856 if str(id_).isdigit() or isinstance(id_, int):
841 857 id_ = int(id_)
842 858 user = User.get(id_)
843 859 if user is not None:
844 860 return person_getter(user)
845 861 return id_
846 862
847 863
848 864 def gravatar_with_user(author, show_disabled=False):
849 865 from rhodecode.lib.utils import PartialRenderer
850 866 _render = PartialRenderer('base/base.html')
851 867 return _render('gravatar_with_user', author, show_disabled=show_disabled)
852 868
853 869
854 870 def desc_stylize(value):
855 871 """
856 872 converts tags from value into html equivalent
857 873
858 874 :param value:
859 875 """
860 876 if not value:
861 877 return ''
862 878
863 879 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
864 880 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
865 881 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
866 882 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
867 883 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
868 884 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
869 885 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
870 886 '<div class="metatag" tag="lang">\\2</div>', value)
871 887 value = re.sub(r'\[([a-z]+)\]',
872 888 '<div class="metatag" tag="\\1">\\1</div>', value)
873 889
874 890 return value
875 891
876 892
877 893 def escaped_stylize(value):
878 894 """
879 895 converts tags from value into html equivalent, but escaping its value first
880 896 """
881 897 if not value:
882 898 return ''
883 899
884 900 # Using default webhelper escape method, but has to force it as a
885 901 # plain unicode instead of a markup tag to be used in regex expressions
886 902 value = unicode(escape(safe_unicode(value)))
887 903
888 904 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
889 905 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
890 906 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
891 907 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
892 908 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
893 909 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
894 910 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
895 911 '<div class="metatag" tag="lang">\\2</div>', value)
896 912 value = re.sub(r'\[([a-z]+)\]',
897 913 '<div class="metatag" tag="\\1">\\1</div>', value)
898 914
899 915 return value
900 916
901 917
902 918 def bool2icon(value):
903 919 """
904 920 Returns boolean value of a given value, represented as html element with
905 921 classes that will represent icons
906 922
907 923 :param value: given value to convert to html node
908 924 """
909 925
910 926 if value: # does bool conversion
911 927 return HTML.tag('i', class_="icon-true")
912 928 else: # not true as bool
913 929 return HTML.tag('i', class_="icon-false")
914 930
915 931
916 932 #==============================================================================
917 933 # PERMS
918 934 #==============================================================================
919 935 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
920 936 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
921 937 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token
922 938
923 939
924 940 #==============================================================================
925 941 # GRAVATAR URL
926 942 #==============================================================================
927 943 class InitialsGravatar(object):
928 944 def __init__(self, email_address, first_name, last_name, size=30,
929 945 background=None, text_color='#fff'):
930 946 self.size = size
931 947 self.first_name = first_name
932 948 self.last_name = last_name
933 949 self.email_address = email_address
934 950 self.background = background or self.str2color(email_address)
935 951 self.text_color = text_color
936 952
937 953 def get_color_bank(self):
938 954 """
939 955 returns a predefined list of colors that gravatars can use.
940 956 Those are randomized distinct colors that guarantee readability and
941 957 uniqueness.
942 958
943 959 generated with: http://phrogz.net/css/distinct-colors.html
944 960 """
945 961 return [
946 962 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
947 963 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
948 964 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
949 965 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
950 966 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
951 967 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
952 968 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
953 969 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
954 970 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
955 971 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
956 972 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
957 973 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
958 974 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
959 975 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
960 976 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
961 977 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
962 978 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
963 979 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
964 980 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
965 981 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
966 982 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
967 983 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
968 984 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
969 985 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
970 986 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
971 987 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
972 988 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
973 989 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
974 990 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
975 991 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
976 992 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
977 993 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
978 994 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
979 995 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
980 996 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
981 997 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
982 998 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
983 999 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
984 1000 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
985 1001 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
986 1002 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
987 1003 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
988 1004 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
989 1005 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
990 1006 '#4f8c46', '#368dd9', '#5c0073'
991 1007 ]
992 1008
993 1009 def rgb_to_hex_color(self, rgb_tuple):
994 1010 """
995 1011 Converts an rgb_tuple passed to an hex color.
996 1012
997 1013 :param rgb_tuple: tuple with 3 ints represents rgb color space
998 1014 """
999 1015 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1000 1016
1001 1017 def email_to_int_list(self, email_str):
1002 1018 """
1003 1019 Get every byte of the hex digest value of email and turn it to integer.
1004 1020 It's going to be always between 0-255
1005 1021 """
1006 1022 digest = md5_safe(email_str.lower())
1007 1023 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1008 1024
1009 1025 def pick_color_bank_index(self, email_str, color_bank):
1010 1026 return self.email_to_int_list(email_str)[0] % len(color_bank)
1011 1027
1012 1028 def str2color(self, email_str):
1013 1029 """
1014 1030 Tries to map in a stable algorithm an email to color
1015 1031
1016 1032 :param email_str:
1017 1033 """
1018 1034 color_bank = self.get_color_bank()
1019 1035 # pick position (module it's length so we always find it in the
1020 1036 # bank even if it's smaller than 256 values
1021 1037 pos = self.pick_color_bank_index(email_str, color_bank)
1022 1038 return color_bank[pos]
1023 1039
1024 1040 def normalize_email(self, email_address):
1025 1041 import unicodedata
1026 1042 # default host used to fill in the fake/missing email
1027 1043 default_host = u'localhost'
1028 1044
1029 1045 if not email_address:
1030 1046 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1031 1047
1032 1048 email_address = safe_unicode(email_address)
1033 1049
1034 1050 if u'@' not in email_address:
1035 1051 email_address = u'%s@%s' % (email_address, default_host)
1036 1052
1037 1053 if email_address.endswith(u'@'):
1038 1054 email_address = u'%s%s' % (email_address, default_host)
1039 1055
1040 1056 email_address = unicodedata.normalize('NFKD', email_address)\
1041 1057 .encode('ascii', 'ignore')
1042 1058 return email_address
1043 1059
1044 1060 def get_initials(self):
1045 1061 """
1046 1062 Returns 2 letter initials calculated based on the input.
1047 1063 The algorithm picks first given email address, and takes first letter
1048 1064 of part before @, and then the first letter of server name. In case
1049 1065 the part before @ is in a format of `somestring.somestring2` it replaces
1050 1066 the server letter with first letter of somestring2
1051 1067
1052 1068 In case function was initialized with both first and lastname, this
1053 1069 overrides the extraction from email by first letter of the first and
1054 1070 last name. We add special logic to that functionality, In case Full name
1055 1071 is compound, like Guido Von Rossum, we use last part of the last name
1056 1072 (Von Rossum) picking `R`.
1057 1073
1058 1074 Function also normalizes the non-ascii characters to they ascii
1059 1075 representation, eg Ä„ => A
1060 1076 """
1061 1077 import unicodedata
1062 1078 # replace non-ascii to ascii
1063 1079 first_name = unicodedata.normalize(
1064 1080 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1065 1081 last_name = unicodedata.normalize(
1066 1082 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1067 1083
1068 1084 # do NFKD encoding, and also make sure email has proper format
1069 1085 email_address = self.normalize_email(self.email_address)
1070 1086
1071 1087 # first push the email initials
1072 1088 prefix, server = email_address.split('@', 1)
1073 1089
1074 1090 # check if prefix is maybe a 'firstname.lastname' syntax
1075 1091 _dot_split = prefix.rsplit('.', 1)
1076 1092 if len(_dot_split) == 2:
1077 1093 initials = [_dot_split[0][0], _dot_split[1][0]]
1078 1094 else:
1079 1095 initials = [prefix[0], server[0]]
1080 1096
1081 1097 # then try to replace either firtname or lastname
1082 1098 fn_letter = (first_name or " ")[0].strip()
1083 1099 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1084 1100
1085 1101 if fn_letter:
1086 1102 initials[0] = fn_letter
1087 1103
1088 1104 if ln_letter:
1089 1105 initials[1] = ln_letter
1090 1106
1091 1107 return ''.join(initials).upper()
1092 1108
1093 1109 def get_img_data_by_type(self, font_family, img_type):
1094 1110 default_user = """
1095 1111 <svg xmlns="http://www.w3.org/2000/svg"
1096 1112 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1097 1113 viewBox="-15 -10 439.165 429.164"
1098 1114
1099 1115 xml:space="preserve"
1100 1116 style="background:{background};" >
1101 1117
1102 1118 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1103 1119 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1104 1120 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1105 1121 168.596,153.916,216.671,
1106 1122 204.583,216.671z" fill="{text_color}"/>
1107 1123 <path d="M407.164,374.717L360.88,
1108 1124 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1109 1125 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1110 1126 15.366-44.203,23.488-69.076,23.488c-24.877,
1111 1127 0-48.762-8.122-69.078-23.488
1112 1128 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1113 1129 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1114 1130 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1115 1131 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1116 1132 19.402-10.527 C409.699,390.129,
1117 1133 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1118 1134 </svg>""".format(
1119 1135 size=self.size,
1120 1136 background='#979797', # @grey4
1121 1137 text_color=self.text_color,
1122 1138 font_family=font_family)
1123 1139
1124 1140 return {
1125 1141 "default_user": default_user
1126 1142 }[img_type]
1127 1143
1128 1144 def get_img_data(self, svg_type=None):
1129 1145 """
1130 1146 generates the svg metadata for image
1131 1147 """
1132 1148
1133 1149 font_family = ','.join([
1134 1150 'proximanovaregular',
1135 1151 'Proxima Nova Regular',
1136 1152 'Proxima Nova',
1137 1153 'Arial',
1138 1154 'Lucida Grande',
1139 1155 'sans-serif'
1140 1156 ])
1141 1157 if svg_type:
1142 1158 return self.get_img_data_by_type(font_family, svg_type)
1143 1159
1144 1160 initials = self.get_initials()
1145 1161 img_data = """
1146 1162 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1147 1163 width="{size}" height="{size}"
1148 1164 style="width: 100%; height: 100%; background-color: {background}"
1149 1165 viewBox="0 0 {size} {size}">
1150 1166 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1151 1167 pointer-events="auto" fill="{text_color}"
1152 1168 font-family="{font_family}"
1153 1169 style="font-weight: 400; font-size: {f_size}px;">{text}
1154 1170 </text>
1155 1171 </svg>""".format(
1156 1172 size=self.size,
1157 1173 f_size=self.size/1.85, # scale the text inside the box nicely
1158 1174 background=self.background,
1159 1175 text_color=self.text_color,
1160 1176 text=initials.upper(),
1161 1177 font_family=font_family)
1162 1178
1163 1179 return img_data
1164 1180
1165 1181 def generate_svg(self, svg_type=None):
1166 1182 img_data = self.get_img_data(svg_type)
1167 1183 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1168 1184
1169 1185
1170 1186 def initials_gravatar(email_address, first_name, last_name, size=30):
1171 1187 svg_type = None
1172 1188 if email_address == User.DEFAULT_USER_EMAIL:
1173 1189 svg_type = 'default_user'
1174 1190 klass = InitialsGravatar(email_address, first_name, last_name, size)
1175 1191 return klass.generate_svg(svg_type=svg_type)
1176 1192
1177 1193
1178 1194 def gravatar_url(email_address, size=30):
1179 1195 # doh, we need to re-import those to mock it later
1180 1196 from pylons import tmpl_context as c
1181 1197
1182 1198 _use_gravatar = c.visual.use_gravatar
1183 1199 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1184 1200
1185 1201 email_address = email_address or User.DEFAULT_USER_EMAIL
1186 1202 if isinstance(email_address, unicode):
1187 1203 # hashlib crashes on unicode items
1188 1204 email_address = safe_str(email_address)
1189 1205
1190 1206 # empty email or default user
1191 1207 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1192 1208 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1193 1209
1194 1210 if _use_gravatar:
1195 1211 # TODO: Disuse pyramid thread locals. Think about another solution to
1196 1212 # get the host and schema here.
1197 1213 request = get_current_request()
1198 1214 tmpl = safe_str(_gravatar_url)
1199 1215 tmpl = tmpl.replace('{email}', email_address)\
1200 1216 .replace('{md5email}', md5_safe(email_address.lower())) \
1201 1217 .replace('{netloc}', request.host)\
1202 1218 .replace('{scheme}', request.scheme)\
1203 1219 .replace('{size}', safe_str(size))
1204 1220 return tmpl
1205 1221 else:
1206 1222 return initials_gravatar(email_address, '', '', size=size)
1207 1223
1208 1224
1209 1225 class Page(_Page):
1210 1226 """
1211 1227 Custom pager to match rendering style with paginator
1212 1228 """
1213 1229
1214 1230 def _get_pos(self, cur_page, max_page, items):
1215 1231 edge = (items / 2) + 1
1216 1232 if (cur_page <= edge):
1217 1233 radius = max(items / 2, items - cur_page)
1218 1234 elif (max_page - cur_page) < edge:
1219 1235 radius = (items - 1) - (max_page - cur_page)
1220 1236 else:
1221 1237 radius = items / 2
1222 1238
1223 1239 left = max(1, (cur_page - (radius)))
1224 1240 right = min(max_page, cur_page + (radius))
1225 1241 return left, cur_page, right
1226 1242
1227 1243 def _range(self, regexp_match):
1228 1244 """
1229 1245 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1230 1246
1231 1247 Arguments:
1232 1248
1233 1249 regexp_match
1234 1250 A "re" (regular expressions) match object containing the
1235 1251 radius of linked pages around the current page in
1236 1252 regexp_match.group(1) as a string
1237 1253
1238 1254 This function is supposed to be called as a callable in
1239 1255 re.sub.
1240 1256
1241 1257 """
1242 1258 radius = int(regexp_match.group(1))
1243 1259
1244 1260 # Compute the first and last page number within the radius
1245 1261 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1246 1262 # -> leftmost_page = 5
1247 1263 # -> rightmost_page = 9
1248 1264 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1249 1265 self.last_page,
1250 1266 (radius * 2) + 1)
1251 1267 nav_items = []
1252 1268
1253 1269 # Create a link to the first page (unless we are on the first page
1254 1270 # or there would be no need to insert '..' spacers)
1255 1271 if self.page != self.first_page and self.first_page < leftmost_page:
1256 1272 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1257 1273
1258 1274 # Insert dots if there are pages between the first page
1259 1275 # and the currently displayed page range
1260 1276 if leftmost_page - self.first_page > 1:
1261 1277 # Wrap in a SPAN tag if nolink_attr is set
1262 1278 text = '..'
1263 1279 if self.dotdot_attr:
1264 1280 text = HTML.span(c=text, **self.dotdot_attr)
1265 1281 nav_items.append(text)
1266 1282
1267 1283 for thispage in xrange(leftmost_page, rightmost_page + 1):
1268 1284 # Hilight the current page number and do not use a link
1269 1285 if thispage == self.page:
1270 1286 text = '%s' % (thispage,)
1271 1287 # Wrap in a SPAN tag if nolink_attr is set
1272 1288 if self.curpage_attr:
1273 1289 text = HTML.span(c=text, **self.curpage_attr)
1274 1290 nav_items.append(text)
1275 1291 # Otherwise create just a link to that page
1276 1292 else:
1277 1293 text = '%s' % (thispage,)
1278 1294 nav_items.append(self._pagerlink(thispage, text))
1279 1295
1280 1296 # Insert dots if there are pages between the displayed
1281 1297 # page numbers and the end of the page range
1282 1298 if self.last_page - rightmost_page > 1:
1283 1299 text = '..'
1284 1300 # Wrap in a SPAN tag if nolink_attr is set
1285 1301 if self.dotdot_attr:
1286 1302 text = HTML.span(c=text, **self.dotdot_attr)
1287 1303 nav_items.append(text)
1288 1304
1289 1305 # Create a link to the very last page (unless we are on the last
1290 1306 # page or there would be no need to insert '..' spacers)
1291 1307 if self.page != self.last_page and rightmost_page < self.last_page:
1292 1308 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1293 1309
1294 1310 ## prerender links
1295 1311 #_page_link = url.current()
1296 1312 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1297 1313 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1298 1314 return self.separator.join(nav_items)
1299 1315
1300 1316 def pager(self, format='~2~', page_param='page', partial_param='partial',
1301 1317 show_if_single_page=False, separator=' ', onclick=None,
1302 1318 symbol_first='<<', symbol_last='>>',
1303 1319 symbol_previous='<', symbol_next='>',
1304 1320 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1305 1321 curpage_attr={'class': 'pager_curpage'},
1306 1322 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1307 1323
1308 1324 self.curpage_attr = curpage_attr
1309 1325 self.separator = separator
1310 1326 self.pager_kwargs = kwargs
1311 1327 self.page_param = page_param
1312 1328 self.partial_param = partial_param
1313 1329 self.onclick = onclick
1314 1330 self.link_attr = link_attr
1315 1331 self.dotdot_attr = dotdot_attr
1316 1332
1317 1333 # Don't show navigator if there is no more than one page
1318 1334 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1319 1335 return ''
1320 1336
1321 1337 from string import Template
1322 1338 # Replace ~...~ in token format by range of pages
1323 1339 result = re.sub(r'~(\d+)~', self._range, format)
1324 1340
1325 1341 # Interpolate '%' variables
1326 1342 result = Template(result).safe_substitute({
1327 1343 'first_page': self.first_page,
1328 1344 'last_page': self.last_page,
1329 1345 'page': self.page,
1330 1346 'page_count': self.page_count,
1331 1347 'items_per_page': self.items_per_page,
1332 1348 'first_item': self.first_item,
1333 1349 'last_item': self.last_item,
1334 1350 'item_count': self.item_count,
1335 1351 'link_first': self.page > self.first_page and \
1336 1352 self._pagerlink(self.first_page, symbol_first) or '',
1337 1353 'link_last': self.page < self.last_page and \
1338 1354 self._pagerlink(self.last_page, symbol_last) or '',
1339 1355 'link_previous': self.previous_page and \
1340 1356 self._pagerlink(self.previous_page, symbol_previous) \
1341 1357 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1342 1358 'link_next': self.next_page and \
1343 1359 self._pagerlink(self.next_page, symbol_next) \
1344 1360 or HTML.span(symbol_next, class_="pg-next disabled")
1345 1361 })
1346 1362
1347 1363 return literal(result)
1348 1364
1349 1365
1350 1366 #==============================================================================
1351 1367 # REPO PAGER, PAGER FOR REPOSITORY
1352 1368 #==============================================================================
1353 1369 class RepoPage(Page):
1354 1370
1355 1371 def __init__(self, collection, page=1, items_per_page=20,
1356 1372 item_count=None, url=None, **kwargs):
1357 1373
1358 1374 """Create a "RepoPage" instance. special pager for paging
1359 1375 repository
1360 1376 """
1361 1377 self._url_generator = url
1362 1378
1363 1379 # Safe the kwargs class-wide so they can be used in the pager() method
1364 1380 self.kwargs = kwargs
1365 1381
1366 1382 # Save a reference to the collection
1367 1383 self.original_collection = collection
1368 1384
1369 1385 self.collection = collection
1370 1386
1371 1387 # The self.page is the number of the current page.
1372 1388 # The first page has the number 1!
1373 1389 try:
1374 1390 self.page = int(page) # make it int() if we get it as a string
1375 1391 except (ValueError, TypeError):
1376 1392 self.page = 1
1377 1393
1378 1394 self.items_per_page = items_per_page
1379 1395
1380 1396 # Unless the user tells us how many items the collections has
1381 1397 # we calculate that ourselves.
1382 1398 if item_count is not None:
1383 1399 self.item_count = item_count
1384 1400 else:
1385 1401 self.item_count = len(self.collection)
1386 1402
1387 1403 # Compute the number of the first and last available page
1388 1404 if self.item_count > 0:
1389 1405 self.first_page = 1
1390 1406 self.page_count = int(math.ceil(float(self.item_count) /
1391 1407 self.items_per_page))
1392 1408 self.last_page = self.first_page + self.page_count - 1
1393 1409
1394 1410 # Make sure that the requested page number is the range of
1395 1411 # valid pages
1396 1412 if self.page > self.last_page:
1397 1413 self.page = self.last_page
1398 1414 elif self.page < self.first_page:
1399 1415 self.page = self.first_page
1400 1416
1401 1417 # Note: the number of items on this page can be less than
1402 1418 # items_per_page if the last page is not full
1403 1419 self.first_item = max(0, (self.item_count) - (self.page *
1404 1420 items_per_page))
1405 1421 self.last_item = ((self.item_count - 1) - items_per_page *
1406 1422 (self.page - 1))
1407 1423
1408 1424 self.items = list(self.collection[self.first_item:self.last_item + 1])
1409 1425
1410 1426 # Links to previous and next page
1411 1427 if self.page > self.first_page:
1412 1428 self.previous_page = self.page - 1
1413 1429 else:
1414 1430 self.previous_page = None
1415 1431
1416 1432 if self.page < self.last_page:
1417 1433 self.next_page = self.page + 1
1418 1434 else:
1419 1435 self.next_page = None
1420 1436
1421 1437 # No items available
1422 1438 else:
1423 1439 self.first_page = None
1424 1440 self.page_count = 0
1425 1441 self.last_page = None
1426 1442 self.first_item = None
1427 1443 self.last_item = None
1428 1444 self.previous_page = None
1429 1445 self.next_page = None
1430 1446 self.items = []
1431 1447
1432 1448 # This is a subclass of the 'list' type. Initialise the list now.
1433 1449 list.__init__(self, reversed(self.items))
1434 1450
1435 1451
1436 1452 def changed_tooltip(nodes):
1437 1453 """
1438 1454 Generates a html string for changed nodes in commit page.
1439 1455 It limits the output to 30 entries
1440 1456
1441 1457 :param nodes: LazyNodesGenerator
1442 1458 """
1443 1459 if nodes:
1444 1460 pref = ': <br/> '
1445 1461 suf = ''
1446 1462 if len(nodes) > 30:
1447 1463 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1448 1464 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1449 1465 for x in nodes[:30]]) + suf)
1450 1466 else:
1451 1467 return ': ' + _('No Files')
1452 1468
1453 1469
1454 1470 def breadcrumb_repo_link(repo):
1455 1471 """
1456 1472 Makes a breadcrumbs path link to repo
1457 1473
1458 1474 ex::
1459 1475 group >> subgroup >> repo
1460 1476
1461 1477 :param repo: a Repository instance
1462 1478 """
1463 1479
1464 1480 path = [
1465 1481 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1466 1482 for group in repo.groups_with_parents
1467 1483 ] + [
1468 1484 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1469 1485 ]
1470 1486
1471 1487 return literal(' &raquo; '.join(path))
1472 1488
1473 1489
1474 1490 def format_byte_size_binary(file_size):
1475 1491 """
1476 1492 Formats file/folder sizes to standard.
1477 1493 """
1478 1494 formatted_size = format_byte_size(file_size, binary=True)
1479 1495 return formatted_size
1480 1496
1481 1497
1482 1498 def fancy_file_stats(stats):
1483 1499 """
1484 1500 Displays a fancy two colored bar for number of added/deleted
1485 1501 lines of code on file
1486 1502
1487 1503 :param stats: two element list of added/deleted lines of code
1488 1504 """
1489 1505 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1490 1506 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1491 1507
1492 1508 def cgen(l_type, a_v, d_v):
1493 1509 mapping = {'tr': 'top-right-rounded-corner-mid',
1494 1510 'tl': 'top-left-rounded-corner-mid',
1495 1511 'br': 'bottom-right-rounded-corner-mid',
1496 1512 'bl': 'bottom-left-rounded-corner-mid'}
1497 1513 map_getter = lambda x: mapping[x]
1498 1514
1499 1515 if l_type == 'a' and d_v:
1500 1516 #case when added and deleted are present
1501 1517 return ' '.join(map(map_getter, ['tl', 'bl']))
1502 1518
1503 1519 if l_type == 'a' and not d_v:
1504 1520 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1505 1521
1506 1522 if l_type == 'd' and a_v:
1507 1523 return ' '.join(map(map_getter, ['tr', 'br']))
1508 1524
1509 1525 if l_type == 'd' and not a_v:
1510 1526 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1511 1527
1512 1528 a, d = stats['added'], stats['deleted']
1513 1529 width = 100
1514 1530
1515 1531 if stats['binary']: # binary operations like chmod/rename etc
1516 1532 lbl = []
1517 1533 bin_op = 0 # undefined
1518 1534
1519 1535 # prefix with bin for binary files
1520 1536 if BIN_FILENODE in stats['ops']:
1521 1537 lbl += ['bin']
1522 1538
1523 1539 if NEW_FILENODE in stats['ops']:
1524 1540 lbl += [_('new file')]
1525 1541 bin_op = NEW_FILENODE
1526 1542 elif MOD_FILENODE in stats['ops']:
1527 1543 lbl += [_('mod')]
1528 1544 bin_op = MOD_FILENODE
1529 1545 elif DEL_FILENODE in stats['ops']:
1530 1546 lbl += [_('del')]
1531 1547 bin_op = DEL_FILENODE
1532 1548 elif RENAMED_FILENODE in stats['ops']:
1533 1549 lbl += [_('rename')]
1534 1550 bin_op = RENAMED_FILENODE
1535 1551
1536 1552 # chmod can go with other operations, so we add a + to lbl if needed
1537 1553 if CHMOD_FILENODE in stats['ops']:
1538 1554 lbl += [_('chmod')]
1539 1555 if bin_op == 0:
1540 1556 bin_op = CHMOD_FILENODE
1541 1557
1542 1558 lbl = '+'.join(lbl)
1543 1559 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1544 1560 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1545 1561 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1546 1562 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1547 1563
1548 1564 t = stats['added'] + stats['deleted']
1549 1565 unit = float(width) / (t or 1)
1550 1566
1551 1567 # needs > 9% of width to be visible or 0 to be hidden
1552 1568 a_p = max(9, unit * a) if a > 0 else 0
1553 1569 d_p = max(9, unit * d) if d > 0 else 0
1554 1570 p_sum = a_p + d_p
1555 1571
1556 1572 if p_sum > width:
1557 1573 #adjust the percentage to be == 100% since we adjusted to 9
1558 1574 if a_p > d_p:
1559 1575 a_p = a_p - (p_sum - width)
1560 1576 else:
1561 1577 d_p = d_p - (p_sum - width)
1562 1578
1563 1579 a_v = a if a > 0 else ''
1564 1580 d_v = d if d > 0 else ''
1565 1581
1566 1582 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1567 1583 cgen('a', a_v, d_v), a_p, a_v
1568 1584 )
1569 1585 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1570 1586 cgen('d', a_v, d_v), d_p, d_v
1571 1587 )
1572 1588 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1573 1589
1574 1590
1575 1591 def urlify_text(text_, safe=True):
1576 1592 """
1577 1593 Extrac urls from text and make html links out of them
1578 1594
1579 1595 :param text_:
1580 1596 """
1581 1597
1582 1598 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1583 1599 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1584 1600
1585 1601 def url_func(match_obj):
1586 1602 url_full = match_obj.groups()[0]
1587 1603 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1588 1604 _newtext = url_pat.sub(url_func, text_)
1589 1605 if safe:
1590 1606 return literal(_newtext)
1591 1607 return _newtext
1592 1608
1593 1609
1594 1610 def urlify_commits(text_, repository):
1595 1611 """
1596 1612 Extract commit ids from text and make link from them
1597 1613
1598 1614 :param text_:
1599 1615 :param repository: repo name to build the URL with
1600 1616 """
1601 1617 from pylons import url # doh, we need to re-import url to mock it later
1602 1618 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1603 1619
1604 1620 def url_func(match_obj):
1605 1621 commit_id = match_obj.groups()[1]
1606 1622 pref = match_obj.groups()[0]
1607 1623 suf = match_obj.groups()[2]
1608 1624
1609 1625 tmpl = (
1610 1626 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1611 1627 '%(commit_id)s</a>%(suf)s'
1612 1628 )
1613 1629 return tmpl % {
1614 1630 'pref': pref,
1615 1631 'cls': 'revision-link',
1616 1632 'url': url('changeset_home', repo_name=repository,
1617 revision=commit_id),
1633 revision=commit_id, qualified=True),
1618 1634 'commit_id': commit_id,
1619 1635 'suf': suf
1620 1636 }
1621 1637
1622 1638 newtext = URL_PAT.sub(url_func, text_)
1623 1639
1624 1640 return newtext
1625 1641
1626 1642
1627 def _process_url_func(match_obj, repo_name, uid, entry):
1643 def _process_url_func(match_obj, repo_name, uid, entry,
1644 return_raw_data=False):
1628 1645 pref = ''
1629 1646 if match_obj.group().startswith(' '):
1630 1647 pref = ' '
1631 1648
1632 1649 issue_id = ''.join(match_obj.groups())
1633 1650 tmpl = (
1634 1651 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 1652 '%(issue-prefix)s%(id-repr)s'
1636 1653 '</a>')
1637 1654
1638 1655 (repo_name_cleaned,
1639 1656 parent_group_name) = RepoGroupModel().\
1640 1657 _get_group_name_and_parent(repo_name)
1641 1658
1642 1659 # variables replacement
1643 1660 named_vars = {
1644 1661 'id': issue_id,
1645 1662 'repo': repo_name,
1646 1663 'repo_name': repo_name_cleaned,
1647 1664 'group_name': parent_group_name
1648 1665 }
1649 1666 # named regex variables
1650 1667 named_vars.update(match_obj.groupdict())
1651 1668 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1652 1669
1653 return tmpl % {
1670 data = {
1654 1671 'pref': pref,
1655 1672 'cls': 'issue-tracker-link',
1656 1673 'url': _url,
1657 1674 'id-repr': issue_id,
1658 1675 'issue-prefix': entry['pref'],
1659 1676 'serv': entry['url'],
1660 1677 }
1678 if return_raw_data:
1679 return {
1680 'id': issue_id,
1681 'url': _url
1682 }
1683 return tmpl % data
1661 1684
1662 1685
1663 def process_patterns(text_string, repo_name, config):
1686 def process_patterns(text_string, repo_name, config=None):
1664 1687 repo = None
1665 1688 if repo_name:
1666 1689 # Retrieving repo_name to avoid invalid repo_name to explode on
1667 1690 # IssueTrackerSettingsModel but still passing invalid name further down
1668 1691 repo = Repository.get_by_repo_name(repo_name, cache=True)
1669 1692
1670 1693 settings_model = IssueTrackerSettingsModel(repo=repo)
1671 1694 active_entries = settings_model.get_settings(cache=True)
1672 1695
1696 issues_data = []
1673 1697 newtext = text_string
1674 1698 for uid, entry in active_entries.items():
1675 url_func = partial(
1676 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1677
1678 1699 log.debug('found issue tracker entry with uid %s' % (uid,))
1679 1700
1680 1701 if not (entry['pat'] and entry['url']):
1681 1702 log.debug('skipping due to missing data')
1682 1703 continue
1683 1704
1684 1705 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1685 1706 % (uid, entry['pat'], entry['url'], entry['pref']))
1686 1707
1687 1708 try:
1688 1709 pattern = re.compile(r'%s' % entry['pat'])
1689 1710 except re.error:
1690 1711 log.exception(
1691 1712 'issue tracker pattern: `%s` failed to compile',
1692 1713 entry['pat'])
1693 1714 continue
1694 1715
1716 data_func = partial(
1717 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1718 return_raw_data=True)
1719
1720 for match_obj in pattern.finditer(text_string):
1721 issues_data.append(data_func(match_obj))
1722
1723 url_func = partial(
1724 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1725
1695 1726 newtext = pattern.sub(url_func, newtext)
1696 1727 log.debug('processed prefix:uid `%s`' % (uid,))
1697 1728
1698 return newtext
1729 return newtext, issues_data
1699 1730
1700 1731
1701 1732 def urlify_commit_message(commit_text, repository=None):
1702 1733 """
1703 1734 Parses given text message and makes proper links.
1704 1735 issues are linked to given issue-server, and rest is a commit link
1705 1736
1706 1737 :param commit_text:
1707 1738 :param repository:
1708 1739 """
1709 1740 from pylons import url # doh, we need to re-import url to mock it later
1710 from rhodecode import CONFIG
1711 1741
1712 1742 def escaper(string):
1713 1743 return string.replace('<', '&lt;').replace('>', '&gt;')
1714 1744
1715 1745 newtext = escaper(commit_text)
1746
1747 # extract http/https links and make them real urls
1748 newtext = urlify_text(newtext, safe=False)
1749
1716 1750 # urlify commits - extract commit ids and make link out of them, if we have
1717 1751 # the scope of repository present.
1718 1752 if repository:
1719 1753 newtext = urlify_commits(newtext, repository)
1720 1754
1721 # extract http/https links and make them real urls
1722 newtext = urlify_text(newtext, safe=False)
1723
1724 1755 # process issue tracker patterns
1725 newtext = process_patterns(newtext, repository or '', CONFIG)
1756 newtext, issues = process_patterns(newtext, repository or '')
1726 1757
1727 1758 return literal(newtext)
1728 1759
1729 1760
1730 1761 def rst(source, mentions=False):
1731 1762 return literal('<div class="rst-block">%s</div>' %
1732 1763 MarkupRenderer.rst(source, mentions=mentions))
1733 1764
1734 1765
1735 1766 def markdown(source, mentions=False):
1736 1767 return literal('<div class="markdown-block">%s</div>' %
1737 1768 MarkupRenderer.markdown(source, flavored=True,
1738 1769 mentions=mentions))
1739 1770
1740 1771 def renderer_from_filename(filename, exclude=None):
1741 1772 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1742 1773
1743 1774
1744 1775 def render(source, renderer='rst', mentions=False):
1745 1776 if renderer == 'rst':
1746 1777 return rst(source, mentions=mentions)
1747 1778 if renderer == 'markdown':
1748 1779 return markdown(source, mentions=mentions)
1749 1780
1750 1781
1751 1782 def commit_status(repo, commit_id):
1752 1783 return ChangesetStatusModel().get_status(repo, commit_id)
1753 1784
1754 1785
1755 1786 def commit_status_lbl(commit_status):
1756 1787 return dict(ChangesetStatus.STATUSES).get(commit_status)
1757 1788
1758 1789
1759 1790 def commit_time(repo_name, commit_id):
1760 1791 repo = Repository.get_by_repo_name(repo_name)
1761 1792 commit = repo.get_commit(commit_id=commit_id)
1762 1793 return commit.date
1763 1794
1764 1795
1765 1796 def get_permission_name(key):
1766 1797 return dict(Permission.PERMS).get(key)
1767 1798
1768 1799
1769 1800 def journal_filter_help():
1770 1801 return _(
1771 1802 'Example filter terms:\n' +
1772 1803 ' repository:vcs\n' +
1773 1804 ' username:marcin\n' +
1774 1805 ' action:*push*\n' +
1775 1806 ' ip:127.0.0.1\n' +
1776 1807 ' date:20120101\n' +
1777 1808 ' date:[20120101100000 TO 20120102]\n' +
1778 1809 '\n' +
1779 1810 'Generate wildcards using \'*\' character:\n' +
1780 1811 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1781 1812 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1782 1813 '\n' +
1783 1814 'Optional AND / OR operators in queries\n' +
1784 1815 ' "repository:vcs OR repository:test"\n' +
1785 1816 ' "username:test AND repository:test*"\n'
1786 1817 )
1787 1818
1788 1819
1789 1820 def not_mapped_error(repo_name):
1790 1821 flash(_('%s repository is not mapped to db perhaps'
1791 1822 ' it was created or renamed from the filesystem'
1792 1823 ' please run the application again'
1793 1824 ' in order to rescan repositories') % repo_name, category='error')
1794 1825
1795 1826
1796 1827 def ip_range(ip_addr):
1797 1828 from rhodecode.model.db import UserIpMap
1798 1829 s, e = UserIpMap._get_ip_range(ip_addr)
1799 1830 return '%s - %s' % (s, e)
1800 1831
1801 1832
1802 1833 def form(url, method='post', needs_csrf_token=True, **attrs):
1803 1834 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1804 1835 if method.lower() != 'get' and needs_csrf_token:
1805 1836 raise Exception(
1806 1837 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1807 1838 'CSRF token. If the endpoint does not require such token you can ' +
1808 1839 'explicitly set the parameter needs_csrf_token to false.')
1809 1840
1810 1841 return wh_form(url, method=method, **attrs)
1811 1842
1812 1843
1813 1844 def secure_form(url, method="POST", multipart=False, **attrs):
1814 1845 """Start a form tag that points the action to an url. This
1815 1846 form tag will also include the hidden field containing
1816 1847 the auth token.
1817 1848
1818 1849 The url options should be given either as a string, or as a
1819 1850 ``url()`` function. The method for the form defaults to POST.
1820 1851
1821 1852 Options:
1822 1853
1823 1854 ``multipart``
1824 1855 If set to True, the enctype is set to "multipart/form-data".
1825 1856 ``method``
1826 1857 The method to use when submitting the form, usually either
1827 1858 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1828 1859 hidden input with name _method is added to simulate the verb
1829 1860 over POST.
1830 1861
1831 1862 """
1832 1863 from webhelpers.pylonslib.secure_form import insecure_form
1833 1864 from rhodecode.lib.auth import get_csrf_token, csrf_token_key
1834 1865 form = insecure_form(url, method, multipart, **attrs)
1835 1866 token = HTML.div(hidden(csrf_token_key, get_csrf_token()), style="display: none;")
1836 1867 return literal("%s\n%s" % (form, token))
1837 1868
1838 1869 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1839 1870 select_html = select(name, selected, options, **attrs)
1840 1871 select2 = """
1841 1872 <script>
1842 1873 $(document).ready(function() {
1843 1874 $('#%s').select2({
1844 1875 containerCssClass: 'drop-menu',
1845 1876 dropdownCssClass: 'drop-menu-dropdown',
1846 1877 dropdownAutoWidth: true%s
1847 1878 });
1848 1879 });
1849 1880 </script>
1850 1881 """
1851 1882 filter_option = """,
1852 1883 minimumResultsForSearch: -1
1853 1884 """
1854 1885 input_id = attrs.get('id') or name
1855 1886 filter_enabled = "" if enable_filter else filter_option
1856 1887 select_script = literal(select2 % (input_id, filter_enabled))
1857 1888
1858 1889 return literal(select_html+select_script)
1859 1890
1860 1891
1861 1892 def get_visual_attr(tmpl_context_var, attr_name):
1862 1893 """
1863 1894 A safe way to get a variable from visual variable of template context
1864 1895
1865 1896 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1866 1897 :param attr_name: name of the attribute we fetch from the c.visual
1867 1898 """
1868 1899 visual = getattr(tmpl_context_var, 'visual', None)
1869 1900 if not visual:
1870 1901 return
1871 1902 else:
1872 1903 return getattr(visual, attr_name, None)
1873 1904
1874 1905
1875 1906 def get_last_path_part(file_node):
1876 1907 if not file_node.path:
1877 1908 return u''
1878 1909
1879 1910 path = safe_unicode(file_node.path.split('/')[-1])
1880 1911 return u'../' + path
1881 1912
1882 1913
1883 1914 def route_path(*args, **kwds):
1884 1915 """
1885 1916 Wrapper around pyramids `route_path` function. It is used to generate
1886 1917 URLs from within pylons views or templates. This will be removed when
1887 1918 pyramid migration if finished.
1888 1919 """
1889 1920 req = get_current_request()
1890 1921 return req.route_path(*args, **kwds)
1891 1922
1892 1923
1893 1924 def resource_path(*args, **kwds):
1894 1925 """
1895 1926 Wrapper around pyramids `route_path` function. It is used to generate
1896 1927 URLs from within pylons views or templates. This will be removed when
1897 1928 pyramid migration if finished.
1898 1929 """
1899 1930 req = get_current_request()
1900 1931 return req.resource_path(*args, **kwds)
@@ -1,260 +1,278 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import logging
23 import urlparse
23 24 import threading
24 25 from BaseHTTPServer import BaseHTTPRequestHandler
25 26 from SocketServer import TCPServer
27 from routes.util import URLGenerator
26 28
27 29 import Pyro4
30 import pylons
31 import rhodecode
28 32
29 33 from rhodecode.lib import hooks_base
30 34 from rhodecode.lib.utils2 import AttributeDict
31 35
32 36
33 37 log = logging.getLogger(__name__)
34 38
35 39
36 40 class HooksHttpHandler(BaseHTTPRequestHandler):
37 41 def do_POST(self):
38 42 method, extras = self._read_request()
39 43 try:
40 44 result = self._call_hook(method, extras)
41 45 except Exception as e:
42 46 result = {
43 47 'exception': e.__class__.__name__,
44 48 'exception_args': e.args
45 49 }
46 50 self._write_response(result)
47 51
48 52 def _read_request(self):
49 53 length = int(self.headers['Content-Length'])
50 54 body = self.rfile.read(length).decode('utf-8')
51 55 data = json.loads(body)
52 56 return data['method'], data['extras']
53 57
54 58 def _write_response(self, result):
55 59 self.send_response(200)
56 60 self.send_header("Content-type", "text/json")
57 61 self.end_headers()
58 62 self.wfile.write(json.dumps(result))
59 63
60 64 def _call_hook(self, method, extras):
61 65 hooks = Hooks()
62 66 result = getattr(hooks, method)(extras)
63 67 return result
64 68
65 69 def log_message(self, format, *args):
66 70 """
67 71 This is an overriden method of BaseHTTPRequestHandler which logs using
68 72 logging library instead of writing directly to stderr.
69 73 """
70 74
71 75 message = format % args
72 76
73 77 # TODO: mikhail: add different log levels support
74 78 log.debug(
75 79 "%s - - [%s] %s", self.client_address[0],
76 80 self.log_date_time_string(), message)
77 81
78 82
79 83 class DummyHooksCallbackDaemon(object):
80 84 def __init__(self):
81 85 self.hooks_module = Hooks.__module__
82 86
83 87 def __enter__(self):
84 88 log.debug('Running dummy hooks callback daemon')
85 89 return self
86 90
87 91 def __exit__(self, exc_type, exc_val, exc_tb):
88 92 log.debug('Exiting dummy hooks callback daemon')
89 93
90 94
91 95 class ThreadedHookCallbackDaemon(object):
92 96
93 97 _callback_thread = None
94 98 _daemon = None
95 99 _done = False
96 100
97 101 def __init__(self):
98 102 self._prepare()
99 103
100 104 def __enter__(self):
101 105 self._run()
102 106 return self
103 107
104 108 def __exit__(self, exc_type, exc_val, exc_tb):
105 109 self._stop()
106 110
107 111 def _prepare(self):
108 112 raise NotImplementedError()
109 113
110 114 def _run(self):
111 115 raise NotImplementedError()
112 116
113 117 def _stop(self):
114 118 raise NotImplementedError()
115 119
116 120
117 121 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
118 122 """
119 123 Context manager which will run a callback daemon in a background thread.
120 124 """
121 125
122 126 hooks_uri = None
123 127
124 128 def _prepare(self):
125 129 log.debug("Preparing callback daemon and registering hook object")
126 130 self._daemon = Pyro4.Daemon()
127 131 hooks_interface = Hooks()
128 132 self.hooks_uri = str(self._daemon.register(hooks_interface))
129 133 log.debug("Hooks uri is: %s", self.hooks_uri)
130 134
131 135 def _run(self):
132 136 log.debug("Running event loop of callback daemon in background thread")
133 137 callback_thread = threading.Thread(
134 138 target=self._daemon.requestLoop,
135 139 kwargs={'loopCondition': lambda: not self._done})
136 140 callback_thread.daemon = True
137 141 callback_thread.start()
138 142 self._callback_thread = callback_thread
139 143
140 144 def _stop(self):
141 145 log.debug("Waiting for background thread to finish.")
142 146 self._done = True
143 147 self._callback_thread.join()
144 148 self._daemon.close()
145 149 self._daemon = None
146 150 self._callback_thread = None
147 151
148 152
149 153 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
150 154 """
151 155 Context manager which will run a callback daemon in a background thread.
152 156 """
153 157
154 158 hooks_uri = None
155 159
156 160 IP_ADDRESS = '127.0.0.1'
157 161
158 162 # From Python docs: Polling reduces our responsiveness to a shutdown
159 163 # request and wastes cpu at all other times.
160 164 POLL_INTERVAL = 0.1
161 165
162 166 def _prepare(self):
163 167 log.debug("Preparing callback daemon and registering hook object")
164 168
165 169 self._done = False
166 170 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
167 171 _, port = self._daemon.server_address
168 172 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
169 173
170 174 log.debug("Hooks uri is: %s", self.hooks_uri)
171 175
172 176 def _run(self):
173 177 log.debug("Running event loop of callback daemon in background thread")
174 178 callback_thread = threading.Thread(
175 179 target=self._daemon.serve_forever,
176 180 kwargs={'poll_interval': self.POLL_INTERVAL})
177 181 callback_thread.daemon = True
178 182 callback_thread.start()
179 183 self._callback_thread = callback_thread
180 184
181 185 def _stop(self):
182 186 log.debug("Waiting for background thread to finish.")
183 187 self._daemon.shutdown()
184 188 self._callback_thread.join()
185 189 self._daemon = None
186 190 self._callback_thread = None
187 191
188 192
189 193 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
190 194 callback_daemon = None
191 195 protocol = protocol.lower() if protocol else None
192 196
193 197 if use_direct_calls:
194 198 callback_daemon = DummyHooksCallbackDaemon()
195 199 extras['hooks_module'] = callback_daemon.hooks_module
196 200 else:
197 201 callback_daemon = (
198 202 Pyro4HooksCallbackDaemon()
199 203 if protocol == 'pyro4'
200 204 else HttpHooksCallbackDaemon())
201 205 extras['hooks_uri'] = callback_daemon.hooks_uri
202 206 extras['hooks_protocol'] = protocol
203 207
204 208 return callback_daemon, extras
205 209
206 210
207 211 class Hooks(object):
208 212 """
209 213 Exposes the hooks for remote call backs
210 214 """
211 215
212 216 @Pyro4.callback
213 217 def repo_size(self, extras):
214 218 log.debug("Called repo_size of Hooks object")
215 219 return self._call_hook(hooks_base.repo_size, extras)
216 220
217 221 @Pyro4.callback
218 222 def pre_pull(self, extras):
219 223 log.debug("Called pre_pull of Hooks object")
220 224 return self._call_hook(hooks_base.pre_pull, extras)
221 225
222 226 @Pyro4.callback
223 227 def post_pull(self, extras):
224 228 log.debug("Called post_pull of Hooks object")
225 229 return self._call_hook(hooks_base.post_pull, extras)
226 230
227 231 @Pyro4.callback
228 232 def pre_push(self, extras):
229 233 log.debug("Called pre_push of Hooks object")
230 234 return self._call_hook(hooks_base.pre_push, extras)
231 235
232 236 @Pyro4.callback
233 237 def post_push(self, extras):
234 238 log.debug("Called post_push of Hooks object")
235 239 return self._call_hook(hooks_base.post_push, extras)
236 240
237 241 def _call_hook(self, hook, extras):
238 242 extras = AttributeDict(extras)
243 netloc = urlparse.urlparse(extras.server_url).netloc
244 environ = {
245 'SERVER_NAME': netloc.split(':')[0],
246 'SERVER_PORT': ':' in netloc and netloc.split(':')[1] or '80',
247 'SCRIPT_NAME': '',
248 'PATH_INFO': '/',
249 'HTTP_HOST': 'localhost',
250 'REQUEST_METHOD': 'GET',
251 }
252 pylons_router = URLGenerator(rhodecode.CONFIG['routes.map'], environ)
253 pylons.url._push_object(pylons_router)
239 254
240 255 try:
241 256 result = hook(extras)
242 257 except Exception as error:
243 258 log.exception('Exception when handling hook %s', hook)
244 259 error_args = error.args
245 260 return {
246 261 'status': 128,
247 262 'output': '',
248 263 'exception': type(error).__name__,
249 264 'exception_args': error_args,
250 265 }
266 finally:
267 pylons.url._pop_object()
268
251 269 return {
252 270 'status': result.status,
253 271 'output': result.output,
254 272 }
255 273
256 274 def __enter__(self):
257 275 return self
258 276
259 277 def __exit__(self, exc_type, exc_val, exc_tb):
260 278 pass
@@ -1,3477 +1,3516 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import os
26 26 import sys
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.exc import IntegrityError
40 40 from sqlalchemy.ext.declarative import declared_attr
41 41 from sqlalchemy.ext.hybrid import hybrid_property
42 42 from sqlalchemy.orm import (
43 43 relationship, joinedload, class_mapper, validates, aliased)
44 44 from sqlalchemy.sql.expression import true
45 45 from beaker.cache import cache_region, region_invalidate
46 46 from webob.exc import HTTPNotFound
47 47 from zope.cachedescriptors.property import Lazy as LazyProperty
48 48
49 49 from pylons import url
50 50 from pylons.i18n.translation import lazy_ugettext as _
51 51
52 52 from rhodecode.lib.vcs import get_backend
53 53 from rhodecode.lib.vcs.utils.helpers import get_scm
54 54 from rhodecode.lib.vcs.exceptions import VCSError
55 55 from rhodecode.lib.vcs.backends.base import (
56 56 EmptyCommit, Reference, MergeFailureReason)
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict)
60 60 from rhodecode.lib.ext_json import json
61 61 from rhodecode.lib.caching_query import FromCache
62 62 from rhodecode.lib.encrypt import AESCipher
63 63
64 64 from rhodecode.model.meta import Base, Session
65 65
66 66 URL_SEP = '/'
67 67 log = logging.getLogger(__name__)
68 68
69 69 # =============================================================================
70 70 # BASE CLASSES
71 71 # =============================================================================
72 72
73 73 # this is propagated from .ini file rhodecode.encrypted_values.secret or
74 74 # beaker.session.secret if first is not set.
75 75 # and initialized at environment.py
76 76 ENCRYPTION_KEY = None
77 77
78 78 # used to sort permissions by types, '#' used here is not allowed to be in
79 79 # usernames, and it's very early in sorted string.printable table.
80 80 PERMISSION_TYPE_SORT = {
81 81 'admin': '####',
82 82 'write': '###',
83 83 'read': '##',
84 84 'none': '#',
85 85 }
86 86
87 87
88 88 def display_sort(obj):
89 89 """
90 90 Sort function used to sort permissions in .permissions() function of
91 91 Repository, RepoGroup, UserGroup. Also it put the default user in front
92 92 of all other resources
93 93 """
94 94
95 95 if obj.username == User.DEFAULT_USER:
96 96 return '#####'
97 97 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
98 98 return prefix + obj.username
99 99
100 100
101 101 def _hash_key(k):
102 102 return md5_safe(k)
103 103
104 104
105 105 class EncryptedTextValue(TypeDecorator):
106 106 """
107 107 Special column for encrypted long text data, use like::
108 108
109 109 value = Column("encrypted_value", EncryptedValue(), nullable=False)
110 110
111 111 This column is intelligent so if value is in unencrypted form it return
112 112 unencrypted form, but on save it always encrypts
113 113 """
114 114 impl = Text
115 115
116 116 def process_bind_param(self, value, dialect):
117 117 if not value:
118 118 return value
119 119 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
120 120 # protect against double encrypting if someone manually starts
121 121 # doing
122 122 raise ValueError('value needs to be in unencrypted format, ie. '
123 123 'not starting with enc$aes')
124 124 return 'enc$aes_hmac$%s' % AESCipher(
125 125 ENCRYPTION_KEY, hmac=True).encrypt(value)
126 126
127 127 def process_result_value(self, value, dialect):
128 128 import rhodecode
129 129
130 130 if not value:
131 131 return value
132 132
133 133 parts = value.split('$', 3)
134 134 if not len(parts) == 3:
135 135 # probably not encrypted values
136 136 return value
137 137 else:
138 138 if parts[0] != 'enc':
139 139 # parts ok but without our header ?
140 140 return value
141 141 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
142 142 'rhodecode.encrypted_values.strict') or True)
143 143 # at that stage we know it's our encryption
144 144 if parts[1] == 'aes':
145 145 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
146 146 elif parts[1] == 'aes_hmac':
147 147 decrypted_data = AESCipher(
148 148 ENCRYPTION_KEY, hmac=True,
149 149 strict_verification=enc_strict_mode).decrypt(parts[2])
150 150 else:
151 151 raise ValueError(
152 152 'Encryption type part is wrong, must be `aes` '
153 153 'or `aes_hmac`, got `%s` instead' % (parts[1]))
154 154 return decrypted_data
155 155
156 156
157 157 class BaseModel(object):
158 158 """
159 159 Base Model for all classes
160 160 """
161 161
162 162 @classmethod
163 163 def _get_keys(cls):
164 164 """return column names for this model """
165 165 return class_mapper(cls).c.keys()
166 166
167 167 def get_dict(self):
168 168 """
169 169 return dict with keys and values corresponding
170 170 to this model data """
171 171
172 172 d = {}
173 173 for k in self._get_keys():
174 174 d[k] = getattr(self, k)
175 175
176 176 # also use __json__() if present to get additional fields
177 177 _json_attr = getattr(self, '__json__', None)
178 178 if _json_attr:
179 179 # update with attributes from __json__
180 180 if callable(_json_attr):
181 181 _json_attr = _json_attr()
182 182 for k, val in _json_attr.iteritems():
183 183 d[k] = val
184 184 return d
185 185
186 186 def get_appstruct(self):
187 187 """return list with keys and values tuples corresponding
188 188 to this model data """
189 189
190 190 l = []
191 191 for k in self._get_keys():
192 192 l.append((k, getattr(self, k),))
193 193 return l
194 194
195 195 def populate_obj(self, populate_dict):
196 196 """populate model with data from given populate_dict"""
197 197
198 198 for k in self._get_keys():
199 199 if k in populate_dict:
200 200 setattr(self, k, populate_dict[k])
201 201
202 202 @classmethod
203 203 def query(cls):
204 204 return Session().query(cls)
205 205
206 206 @classmethod
207 207 def get(cls, id_):
208 208 if id_:
209 209 return cls.query().get(id_)
210 210
211 211 @classmethod
212 212 def get_or_404(cls, id_):
213 213 try:
214 214 id_ = int(id_)
215 215 except (TypeError, ValueError):
216 216 raise HTTPNotFound
217 217
218 218 res = cls.query().get(id_)
219 219 if not res:
220 220 raise HTTPNotFound
221 221 return res
222 222
223 223 @classmethod
224 224 def getAll(cls):
225 225 # deprecated and left for backward compatibility
226 226 return cls.get_all()
227 227
228 228 @classmethod
229 229 def get_all(cls):
230 230 return cls.query().all()
231 231
232 232 @classmethod
233 233 def delete(cls, id_):
234 234 obj = cls.query().get(id_)
235 235 Session().delete(obj)
236 236
237 237 @classmethod
238 238 def identity_cache(cls, session, attr_name, value):
239 239 exist_in_session = []
240 240 for (item_cls, pkey), instance in session.identity_map.items():
241 241 if cls == item_cls and getattr(instance, attr_name) == value:
242 242 exist_in_session.append(instance)
243 243 if exist_in_session:
244 244 if len(exist_in_session) == 1:
245 245 return exist_in_session[0]
246 246 log.exception(
247 247 'multiple objects with attr %s and '
248 248 'value %s found with same name: %r',
249 249 attr_name, value, exist_in_session)
250 250
251 251 def __repr__(self):
252 252 if hasattr(self, '__unicode__'):
253 253 # python repr needs to return str
254 254 try:
255 255 return safe_str(self.__unicode__())
256 256 except UnicodeDecodeError:
257 257 pass
258 258 return '<DB:%s>' % (self.__class__.__name__)
259 259
260 260
261 261 class RhodeCodeSetting(Base, BaseModel):
262 262 __tablename__ = 'rhodecode_settings'
263 263 __table_args__ = (
264 264 UniqueConstraint('app_settings_name'),
265 265 {'extend_existing': True, 'mysql_engine': 'InnoDB',
266 266 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
267 267 )
268 268
269 269 SETTINGS_TYPES = {
270 270 'str': safe_str,
271 271 'int': safe_int,
272 272 'unicode': safe_unicode,
273 273 'bool': str2bool,
274 274 'list': functools.partial(aslist, sep=',')
275 275 }
276 276 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
277 277 GLOBAL_CONF_KEY = 'app_settings'
278 278
279 279 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
280 280 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
281 281 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
282 282 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
283 283
284 284 def __init__(self, key='', val='', type='unicode'):
285 285 self.app_settings_name = key
286 286 self.app_settings_type = type
287 287 self.app_settings_value = val
288 288
289 289 @validates('_app_settings_value')
290 290 def validate_settings_value(self, key, val):
291 291 assert type(val) == unicode
292 292 return val
293 293
294 294 @hybrid_property
295 295 def app_settings_value(self):
296 296 v = self._app_settings_value
297 297 _type = self.app_settings_type
298 298 if _type:
299 299 _type = self.app_settings_type.split('.')[0]
300 300 # decode the encrypted value
301 301 if 'encrypted' in self.app_settings_type:
302 302 cipher = EncryptedTextValue()
303 303 v = safe_unicode(cipher.process_result_value(v, None))
304 304
305 305 converter = self.SETTINGS_TYPES.get(_type) or \
306 306 self.SETTINGS_TYPES['unicode']
307 307 return converter(v)
308 308
309 309 @app_settings_value.setter
310 310 def app_settings_value(self, val):
311 311 """
312 312 Setter that will always make sure we use unicode in app_settings_value
313 313
314 314 :param val:
315 315 """
316 316 val = safe_unicode(val)
317 317 # encode the encrypted value
318 318 if 'encrypted' in self.app_settings_type:
319 319 cipher = EncryptedTextValue()
320 320 val = safe_unicode(cipher.process_bind_param(val, None))
321 321 self._app_settings_value = val
322 322
323 323 @hybrid_property
324 324 def app_settings_type(self):
325 325 return self._app_settings_type
326 326
327 327 @app_settings_type.setter
328 328 def app_settings_type(self, val):
329 329 if val.split('.')[0] not in self.SETTINGS_TYPES:
330 330 raise Exception('type must be one of %s got %s'
331 331 % (self.SETTINGS_TYPES.keys(), val))
332 332 self._app_settings_type = val
333 333
334 334 def __unicode__(self):
335 335 return u"<%s('%s:%s[%s]')>" % (
336 336 self.__class__.__name__,
337 337 self.app_settings_name, self.app_settings_value,
338 338 self.app_settings_type
339 339 )
340 340
341 341
342 342 class RhodeCodeUi(Base, BaseModel):
343 343 __tablename__ = 'rhodecode_ui'
344 344 __table_args__ = (
345 345 UniqueConstraint('ui_key'),
346 346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
347 347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
348 348 )
349 349
350 350 HOOK_REPO_SIZE = 'changegroup.repo_size'
351 351 # HG
352 352 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
353 353 HOOK_PULL = 'outgoing.pull_logger'
354 354 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
355 355 HOOK_PUSH = 'changegroup.push_logger'
356 356
357 357 # TODO: johbo: Unify way how hooks are configured for git and hg,
358 358 # git part is currently hardcoded.
359 359
360 360 # SVN PATTERNS
361 361 SVN_BRANCH_ID = 'vcs_svn_branch'
362 362 SVN_TAG_ID = 'vcs_svn_tag'
363 363
364 364 ui_id = Column(
365 365 "ui_id", Integer(), nullable=False, unique=True, default=None,
366 366 primary_key=True)
367 367 ui_section = Column(
368 368 "ui_section", String(255), nullable=True, unique=None, default=None)
369 369 ui_key = Column(
370 370 "ui_key", String(255), nullable=True, unique=None, default=None)
371 371 ui_value = Column(
372 372 "ui_value", String(255), nullable=True, unique=None, default=None)
373 373 ui_active = Column(
374 374 "ui_active", Boolean(), nullable=True, unique=None, default=True)
375 375
376 376 def __repr__(self):
377 377 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
378 378 self.ui_key, self.ui_value)
379 379
380 380
381 381 class RepoRhodeCodeSetting(Base, BaseModel):
382 382 __tablename__ = 'repo_rhodecode_settings'
383 383 __table_args__ = (
384 384 UniqueConstraint(
385 385 'app_settings_name', 'repository_id',
386 386 name='uq_repo_rhodecode_setting_name_repo_id'),
387 387 {'extend_existing': True, 'mysql_engine': 'InnoDB',
388 388 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
389 389 )
390 390
391 391 repository_id = Column(
392 392 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
393 393 nullable=False)
394 394 app_settings_id = Column(
395 395 "app_settings_id", Integer(), nullable=False, unique=True,
396 396 default=None, primary_key=True)
397 397 app_settings_name = Column(
398 398 "app_settings_name", String(255), nullable=True, unique=None,
399 399 default=None)
400 400 _app_settings_value = Column(
401 401 "app_settings_value", String(4096), nullable=True, unique=None,
402 402 default=None)
403 403 _app_settings_type = Column(
404 404 "app_settings_type", String(255), nullable=True, unique=None,
405 405 default=None)
406 406
407 407 repository = relationship('Repository')
408 408
409 409 def __init__(self, repository_id, key='', val='', type='unicode'):
410 410 self.repository_id = repository_id
411 411 self.app_settings_name = key
412 412 self.app_settings_type = type
413 413 self.app_settings_value = val
414 414
415 415 @validates('_app_settings_value')
416 416 def validate_settings_value(self, key, val):
417 417 assert type(val) == unicode
418 418 return val
419 419
420 420 @hybrid_property
421 421 def app_settings_value(self):
422 422 v = self._app_settings_value
423 423 type_ = self.app_settings_type
424 424 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
425 425 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
426 426 return converter(v)
427 427
428 428 @app_settings_value.setter
429 429 def app_settings_value(self, val):
430 430 """
431 431 Setter that will always make sure we use unicode in app_settings_value
432 432
433 433 :param val:
434 434 """
435 435 self._app_settings_value = safe_unicode(val)
436 436
437 437 @hybrid_property
438 438 def app_settings_type(self):
439 439 return self._app_settings_type
440 440
441 441 @app_settings_type.setter
442 442 def app_settings_type(self, val):
443 443 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
444 444 if val not in SETTINGS_TYPES:
445 445 raise Exception('type must be one of %s got %s'
446 446 % (SETTINGS_TYPES.keys(), val))
447 447 self._app_settings_type = val
448 448
449 449 def __unicode__(self):
450 450 return u"<%s('%s:%s:%s[%s]')>" % (
451 451 self.__class__.__name__, self.repository.repo_name,
452 452 self.app_settings_name, self.app_settings_value,
453 453 self.app_settings_type
454 454 )
455 455
456 456
457 457 class RepoRhodeCodeUi(Base, BaseModel):
458 458 __tablename__ = 'repo_rhodecode_ui'
459 459 __table_args__ = (
460 460 UniqueConstraint(
461 461 'repository_id', 'ui_section', 'ui_key',
462 462 name='uq_repo_rhodecode_ui_repository_id_section_key'),
463 463 {'extend_existing': True, 'mysql_engine': 'InnoDB',
464 464 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
465 465 )
466 466
467 467 repository_id = Column(
468 468 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
469 469 nullable=False)
470 470 ui_id = Column(
471 471 "ui_id", Integer(), nullable=False, unique=True, default=None,
472 472 primary_key=True)
473 473 ui_section = Column(
474 474 "ui_section", String(255), nullable=True, unique=None, default=None)
475 475 ui_key = Column(
476 476 "ui_key", String(255), nullable=True, unique=None, default=None)
477 477 ui_value = Column(
478 478 "ui_value", String(255), nullable=True, unique=None, default=None)
479 479 ui_active = Column(
480 480 "ui_active", Boolean(), nullable=True, unique=None, default=True)
481 481
482 482 repository = relationship('Repository')
483 483
484 484 def __repr__(self):
485 485 return '<%s[%s:%s]%s=>%s]>' % (
486 486 self.__class__.__name__, self.repository.repo_name,
487 487 self.ui_section, self.ui_key, self.ui_value)
488 488
489 489
490 490 class User(Base, BaseModel):
491 491 __tablename__ = 'users'
492 492 __table_args__ = (
493 493 UniqueConstraint('username'), UniqueConstraint('email'),
494 494 Index('u_username_idx', 'username'),
495 495 Index('u_email_idx', 'email'),
496 496 {'extend_existing': True, 'mysql_engine': 'InnoDB',
497 497 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
498 498 )
499 499 DEFAULT_USER = 'default'
500 500 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
501 501 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
502 502
503 503 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
504 504 username = Column("username", String(255), nullable=True, unique=None, default=None)
505 505 password = Column("password", String(255), nullable=True, unique=None, default=None)
506 506 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
507 507 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
508 508 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
509 509 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
510 510 _email = Column("email", String(255), nullable=True, unique=None, default=None)
511 511 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
512 512 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
513 513 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
514 514 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
515 515 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
516 516 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
517 517 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
518 518
519 519 user_log = relationship('UserLog')
520 520 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
521 521
522 522 repositories = relationship('Repository')
523 523 repository_groups = relationship('RepoGroup')
524 524 user_groups = relationship('UserGroup')
525 525
526 526 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
527 527 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
528 528
529 529 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
530 530 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
531 531 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
532 532
533 533 group_member = relationship('UserGroupMember', cascade='all')
534 534
535 535 notifications = relationship('UserNotification', cascade='all')
536 536 # notifications assigned to this user
537 537 user_created_notifications = relationship('Notification', cascade='all')
538 538 # comments created by this user
539 539 user_comments = relationship('ChangesetComment', cascade='all')
540 540 # user profile extra info
541 541 user_emails = relationship('UserEmailMap', cascade='all')
542 542 user_ip_map = relationship('UserIpMap', cascade='all')
543 543 user_auth_tokens = relationship('UserApiKeys', cascade='all')
544 544 # gists
545 545 user_gists = relationship('Gist', cascade='all')
546 546 # user pull requests
547 547 user_pull_requests = relationship('PullRequest', cascade='all')
548 548 # external identities
549 549 extenal_identities = relationship(
550 550 'ExternalIdentity',
551 551 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
552 552 cascade='all')
553 553
554 554 def __unicode__(self):
555 555 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
556 556 self.user_id, self.username)
557 557
558 558 @hybrid_property
559 559 def email(self):
560 560 return self._email
561 561
562 562 @email.setter
563 563 def email(self, val):
564 564 self._email = val.lower() if val else None
565 565
566 566 @property
567 567 def firstname(self):
568 568 # alias for future
569 569 return self.name
570 570
571 571 @property
572 572 def emails(self):
573 573 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
574 574 return [self.email] + [x.email for x in other]
575 575
576 576 @property
577 577 def auth_tokens(self):
578 578 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
579 579
580 580 @property
581 581 def extra_auth_tokens(self):
582 582 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
583 583
584 584 @property
585 585 def feed_token(self):
586 586 feed_tokens = UserApiKeys.query()\
587 587 .filter(UserApiKeys.user == self)\
588 588 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
589 589 .all()
590 590 if feed_tokens:
591 591 return feed_tokens[0].api_key
592 592 else:
593 593 # use the main token so we don't end up with nothing...
594 594 return self.api_key
595 595
596 596 @classmethod
597 597 def extra_valid_auth_tokens(cls, user, role=None):
598 598 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
599 599 .filter(or_(UserApiKeys.expires == -1,
600 600 UserApiKeys.expires >= time.time()))
601 601 if role:
602 602 tokens = tokens.filter(or_(UserApiKeys.role == role,
603 603 UserApiKeys.role == UserApiKeys.ROLE_ALL))
604 604 return tokens.all()
605 605
606 606 @property
607 607 def ip_addresses(self):
608 608 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
609 609 return [x.ip_addr for x in ret]
610 610
611 611 @property
612 612 def username_and_name(self):
613 613 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
614 614
615 615 @property
616 616 def username_or_name_or_email(self):
617 617 full_name = self.full_name if self.full_name is not ' ' else None
618 618 return self.username or full_name or self.email
619 619
620 620 @property
621 621 def full_name(self):
622 622 return '%s %s' % (self.firstname, self.lastname)
623 623
624 624 @property
625 625 def full_name_or_username(self):
626 626 return ('%s %s' % (self.firstname, self.lastname)
627 627 if (self.firstname and self.lastname) else self.username)
628 628
629 629 @property
630 630 def full_contact(self):
631 631 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
632 632
633 633 @property
634 634 def short_contact(self):
635 635 return '%s %s' % (self.firstname, self.lastname)
636 636
637 637 @property
638 638 def is_admin(self):
639 639 return self.admin
640 640
641 641 @property
642 642 def AuthUser(self):
643 643 """
644 644 Returns instance of AuthUser for this user
645 645 """
646 646 from rhodecode.lib.auth import AuthUser
647 647 return AuthUser(user_id=self.user_id, api_key=self.api_key,
648 648 username=self.username)
649 649
650 650 @hybrid_property
651 651 def user_data(self):
652 652 if not self._user_data:
653 653 return {}
654 654
655 655 try:
656 656 return json.loads(self._user_data)
657 657 except TypeError:
658 658 return {}
659 659
660 660 @user_data.setter
661 661 def user_data(self, val):
662 662 if not isinstance(val, dict):
663 663 raise Exception('user_data must be dict, got %s' % type(val))
664 664 try:
665 665 self._user_data = json.dumps(val)
666 666 except Exception:
667 667 log.error(traceback.format_exc())
668 668
669 669 @classmethod
670 670 def get_by_username(cls, username, case_insensitive=False,
671 671 cache=False, identity_cache=False):
672 672 session = Session()
673 673
674 674 if case_insensitive:
675 675 q = cls.query().filter(
676 676 func.lower(cls.username) == func.lower(username))
677 677 else:
678 678 q = cls.query().filter(cls.username == username)
679 679
680 680 if cache:
681 681 if identity_cache:
682 682 val = cls.identity_cache(session, 'username', username)
683 683 if val:
684 684 return val
685 685 else:
686 686 q = q.options(
687 687 FromCache("sql_cache_short",
688 688 "get_user_by_name_%s" % _hash_key(username)))
689 689
690 690 return q.scalar()
691 691
692 692 @classmethod
693 693 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
694 694 q = cls.query().filter(cls.api_key == auth_token)
695 695
696 696 if cache:
697 697 q = q.options(FromCache("sql_cache_short",
698 698 "get_auth_token_%s" % auth_token))
699 699 res = q.scalar()
700 700
701 701 if fallback and not res:
702 702 #fallback to additional keys
703 703 _res = UserApiKeys.query()\
704 704 .filter(UserApiKeys.api_key == auth_token)\
705 705 .filter(or_(UserApiKeys.expires == -1,
706 706 UserApiKeys.expires >= time.time()))\
707 707 .first()
708 708 if _res:
709 709 res = _res.user
710 710 return res
711 711
712 712 @classmethod
713 713 def get_by_email(cls, email, case_insensitive=False, cache=False):
714 714
715 715 if case_insensitive:
716 716 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
717 717
718 718 else:
719 719 q = cls.query().filter(cls.email == email)
720 720
721 721 if cache:
722 722 q = q.options(FromCache("sql_cache_short",
723 723 "get_email_key_%s" % email))
724 724
725 725 ret = q.scalar()
726 726 if ret is None:
727 727 q = UserEmailMap.query()
728 728 # try fetching in alternate email map
729 729 if case_insensitive:
730 730 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
731 731 else:
732 732 q = q.filter(UserEmailMap.email == email)
733 733 q = q.options(joinedload(UserEmailMap.user))
734 734 if cache:
735 735 q = q.options(FromCache("sql_cache_short",
736 736 "get_email_map_key_%s" % email))
737 737 ret = getattr(q.scalar(), 'user', None)
738 738
739 739 return ret
740 740
741 741 @classmethod
742 742 def get_from_cs_author(cls, author):
743 743 """
744 744 Tries to get User objects out of commit author string
745 745
746 746 :param author:
747 747 """
748 748 from rhodecode.lib.helpers import email, author_name
749 749 # Valid email in the attribute passed, see if they're in the system
750 750 _email = email(author)
751 751 if _email:
752 752 user = cls.get_by_email(_email, case_insensitive=True)
753 753 if user:
754 754 return user
755 755 # Maybe we can match by username?
756 756 _author = author_name(author)
757 757 user = cls.get_by_username(_author, case_insensitive=True)
758 758 if user:
759 759 return user
760 760
761 761 def update_userdata(self, **kwargs):
762 762 usr = self
763 763 old = usr.user_data
764 764 old.update(**kwargs)
765 765 usr.user_data = old
766 766 Session().add(usr)
767 767 log.debug('updated userdata with ', kwargs)
768 768
769 769 def update_lastlogin(self):
770 770 """Update user lastlogin"""
771 771 self.last_login = datetime.datetime.now()
772 772 Session().add(self)
773 773 log.debug('updated user %s lastlogin', self.username)
774 774
775 775 def update_lastactivity(self):
776 776 """Update user lastactivity"""
777 777 usr = self
778 778 old = usr.user_data
779 779 old.update({'last_activity': time.time()})
780 780 usr.user_data = old
781 781 Session().add(usr)
782 782 log.debug('updated user %s lastactivity', usr.username)
783 783
784 784 def update_password(self, new_password, change_api_key=False):
785 785 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
786 786
787 787 self.password = get_crypt_password(new_password)
788 788 if change_api_key:
789 789 self.api_key = generate_auth_token(self.username)
790 790 Session().add(self)
791 791
792 792 @classmethod
793 793 def get_first_super_admin(cls):
794 794 user = User.query().filter(User.admin == true()).first()
795 795 if user is None:
796 796 raise Exception('FATAL: Missing administrative account!')
797 797 return user
798 798
799 799 @classmethod
800 800 def get_all_super_admins(cls):
801 801 """
802 802 Returns all admin accounts sorted by username
803 803 """
804 804 return User.query().filter(User.admin == true())\
805 805 .order_by(User.username.asc()).all()
806 806
807 807 @classmethod
808 808 def get_default_user(cls, cache=False):
809 809 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
810 810 if user is None:
811 811 raise Exception('FATAL: Missing default account!')
812 812 return user
813 813
814 814 def _get_default_perms(self, user, suffix=''):
815 815 from rhodecode.model.permission import PermissionModel
816 816 return PermissionModel().get_default_perms(user.user_perms, suffix)
817 817
818 818 def get_default_perms(self, suffix=''):
819 819 return self._get_default_perms(self, suffix)
820 820
821 821 def get_api_data(self, include_secrets=False, details='full'):
822 822 """
823 823 Common function for generating user related data for API
824 824
825 825 :param include_secrets: By default secrets in the API data will be replaced
826 826 by a placeholder value to prevent exposing this data by accident. In case
827 827 this data shall be exposed, set this flag to ``True``.
828 828
829 829 :param details: details can be 'basic|full' basic gives only a subset of
830 830 the available user information that includes user_id, name and emails.
831 831 """
832 832 user = self
833 833 user_data = self.user_data
834 834 data = {
835 835 'user_id': user.user_id,
836 836 'username': user.username,
837 837 'firstname': user.name,
838 838 'lastname': user.lastname,
839 839 'email': user.email,
840 840 'emails': user.emails,
841 841 }
842 842 if details == 'basic':
843 843 return data
844 844
845 845 api_key_length = 40
846 846 api_key_replacement = '*' * api_key_length
847 847
848 848 extras = {
849 849 'api_key': api_key_replacement,
850 850 'api_keys': [api_key_replacement],
851 851 'active': user.active,
852 852 'admin': user.admin,
853 853 'extern_type': user.extern_type,
854 854 'extern_name': user.extern_name,
855 855 'last_login': user.last_login,
856 856 'ip_addresses': user.ip_addresses,
857 857 'language': user_data.get('language')
858 858 }
859 859 data.update(extras)
860 860
861 861 if include_secrets:
862 862 data['api_key'] = user.api_key
863 863 data['api_keys'] = user.auth_tokens
864 864 return data
865 865
866 866 def __json__(self):
867 867 data = {
868 868 'full_name': self.full_name,
869 869 'full_name_or_username': self.full_name_or_username,
870 870 'short_contact': self.short_contact,
871 871 'full_contact': self.full_contact,
872 872 }
873 873 data.update(self.get_api_data())
874 874 return data
875 875
876 876
877 877 class UserApiKeys(Base, BaseModel):
878 878 __tablename__ = 'user_api_keys'
879 879 __table_args__ = (
880 880 Index('uak_api_key_idx', 'api_key'),
881 881 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
882 882 UniqueConstraint('api_key'),
883 883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
884 884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
885 885 )
886 886 __mapper_args__ = {}
887 887
888 888 # ApiKey role
889 889 ROLE_ALL = 'token_role_all'
890 890 ROLE_HTTP = 'token_role_http'
891 891 ROLE_VCS = 'token_role_vcs'
892 892 ROLE_API = 'token_role_api'
893 893 ROLE_FEED = 'token_role_feed'
894 894 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
895 895
896 896 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
897 897 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
898 898 api_key = Column("api_key", String(255), nullable=False, unique=True)
899 899 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
900 900 expires = Column('expires', Float(53), nullable=False)
901 901 role = Column('role', String(255), nullable=True)
902 902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
903 903
904 904 user = relationship('User', lazy='joined')
905 905
906 906 @classmethod
907 907 def _get_role_name(cls, role):
908 908 return {
909 909 cls.ROLE_ALL: _('all'),
910 910 cls.ROLE_HTTP: _('http/web interface'),
911 911 cls.ROLE_VCS: _('vcs (git/hg protocol)'),
912 912 cls.ROLE_API: _('api calls'),
913 913 cls.ROLE_FEED: _('feed access'),
914 914 }.get(role, role)
915 915
916 916 @property
917 917 def expired(self):
918 918 if self.expires == -1:
919 919 return False
920 920 return time.time() > self.expires
921 921
922 922 @property
923 923 def role_humanized(self):
924 924 return self._get_role_name(self.role)
925 925
926 926
927 927 class UserEmailMap(Base, BaseModel):
928 928 __tablename__ = 'user_email_map'
929 929 __table_args__ = (
930 930 Index('uem_email_idx', 'email'),
931 931 UniqueConstraint('email'),
932 932 {'extend_existing': True, 'mysql_engine': 'InnoDB',
933 933 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
934 934 )
935 935 __mapper_args__ = {}
936 936
937 937 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
938 938 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
939 939 _email = Column("email", String(255), nullable=True, unique=False, default=None)
940 940 user = relationship('User', lazy='joined')
941 941
942 942 @validates('_email')
943 943 def validate_email(self, key, email):
944 944 # check if this email is not main one
945 945 main_email = Session().query(User).filter(User.email == email).scalar()
946 946 if main_email is not None:
947 947 raise AttributeError('email %s is present is user table' % email)
948 948 return email
949 949
950 950 @hybrid_property
951 951 def email(self):
952 952 return self._email
953 953
954 954 @email.setter
955 955 def email(self, val):
956 956 self._email = val.lower() if val else None
957 957
958 958
959 959 class UserIpMap(Base, BaseModel):
960 960 __tablename__ = 'user_ip_map'
961 961 __table_args__ = (
962 962 UniqueConstraint('user_id', 'ip_addr'),
963 963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
964 964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
965 965 )
966 966 __mapper_args__ = {}
967 967
968 968 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
969 969 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
970 970 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
971 971 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
972 972 description = Column("description", String(10000), nullable=True, unique=None, default=None)
973 973 user = relationship('User', lazy='joined')
974 974
975 975 @classmethod
976 976 def _get_ip_range(cls, ip_addr):
977 977 net = ipaddress.ip_network(ip_addr, strict=False)
978 978 return [str(net.network_address), str(net.broadcast_address)]
979 979
980 980 def __json__(self):
981 981 return {
982 982 'ip_addr': self.ip_addr,
983 983 'ip_range': self._get_ip_range(self.ip_addr),
984 984 }
985 985
986 986 def __unicode__(self):
987 987 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
988 988 self.user_id, self.ip_addr)
989 989
990 990 class UserLog(Base, BaseModel):
991 991 __tablename__ = 'user_logs'
992 992 __table_args__ = (
993 993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
994 994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
995 995 )
996 996 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
997 997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
998 998 username = Column("username", String(255), nullable=True, unique=None, default=None)
999 999 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1000 1000 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1001 1001 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1002 1002 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1003 1003 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1004 1004
1005 1005 def __unicode__(self):
1006 1006 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1007 1007 self.repository_name,
1008 1008 self.action)
1009 1009
1010 1010 @property
1011 1011 def action_as_day(self):
1012 1012 return datetime.date(*self.action_date.timetuple()[:3])
1013 1013
1014 1014 user = relationship('User')
1015 1015 repository = relationship('Repository', cascade='')
1016 1016
1017 1017
1018 1018 class UserGroup(Base, BaseModel):
1019 1019 __tablename__ = 'users_groups'
1020 1020 __table_args__ = (
1021 1021 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1022 1022 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1023 1023 )
1024 1024
1025 1025 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1026 1026 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1027 1027 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1028 1028 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1029 1029 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1030 1030 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1031 1031 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1032 1032 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1033 1033
1034 1034 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1035 1035 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1036 1036 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1037 1037 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1038 1038 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1039 1039 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1040 1040
1041 1041 user = relationship('User')
1042 1042
1043 1043 @hybrid_property
1044 1044 def group_data(self):
1045 1045 if not self._group_data:
1046 1046 return {}
1047 1047
1048 1048 try:
1049 1049 return json.loads(self._group_data)
1050 1050 except TypeError:
1051 1051 return {}
1052 1052
1053 1053 @group_data.setter
1054 1054 def group_data(self, val):
1055 1055 try:
1056 1056 self._group_data = json.dumps(val)
1057 1057 except Exception:
1058 1058 log.error(traceback.format_exc())
1059 1059
1060 1060 def __unicode__(self):
1061 1061 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1062 1062 self.users_group_id,
1063 1063 self.users_group_name)
1064 1064
1065 1065 @classmethod
1066 1066 def get_by_group_name(cls, group_name, cache=False,
1067 1067 case_insensitive=False):
1068 1068 if case_insensitive:
1069 1069 q = cls.query().filter(func.lower(cls.users_group_name) ==
1070 1070 func.lower(group_name))
1071 1071
1072 1072 else:
1073 1073 q = cls.query().filter(cls.users_group_name == group_name)
1074 1074 if cache:
1075 1075 q = q.options(FromCache(
1076 1076 "sql_cache_short",
1077 1077 "get_group_%s" % _hash_key(group_name)))
1078 1078 return q.scalar()
1079 1079
1080 1080 @classmethod
1081 1081 def get(cls, user_group_id, cache=False):
1082 1082 user_group = cls.query()
1083 1083 if cache:
1084 1084 user_group = user_group.options(FromCache("sql_cache_short",
1085 1085 "get_users_group_%s" % user_group_id))
1086 1086 return user_group.get(user_group_id)
1087 1087
1088 1088 def permissions(self, with_admins=True, with_owner=True):
1089 1089 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1090 1090 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1091 1091 joinedload(UserUserGroupToPerm.user),
1092 1092 joinedload(UserUserGroupToPerm.permission),)
1093 1093
1094 1094 # get owners and admins and permissions. We do a trick of re-writing
1095 1095 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1096 1096 # has a global reference and changing one object propagates to all
1097 1097 # others. This means if admin is also an owner admin_row that change
1098 1098 # would propagate to both objects
1099 1099 perm_rows = []
1100 1100 for _usr in q.all():
1101 1101 usr = AttributeDict(_usr.user.get_dict())
1102 1102 usr.permission = _usr.permission.permission_name
1103 1103 perm_rows.append(usr)
1104 1104
1105 1105 # filter the perm rows by 'default' first and then sort them by
1106 1106 # admin,write,read,none permissions sorted again alphabetically in
1107 1107 # each group
1108 1108 perm_rows = sorted(perm_rows, key=display_sort)
1109 1109
1110 1110 _admin_perm = 'usergroup.admin'
1111 1111 owner_row = []
1112 1112 if with_owner:
1113 1113 usr = AttributeDict(self.user.get_dict())
1114 1114 usr.owner_row = True
1115 1115 usr.permission = _admin_perm
1116 1116 owner_row.append(usr)
1117 1117
1118 1118 super_admin_rows = []
1119 1119 if with_admins:
1120 1120 for usr in User.get_all_super_admins():
1121 1121 # if this admin is also owner, don't double the record
1122 1122 if usr.user_id == owner_row[0].user_id:
1123 1123 owner_row[0].admin_row = True
1124 1124 else:
1125 1125 usr = AttributeDict(usr.get_dict())
1126 1126 usr.admin_row = True
1127 1127 usr.permission = _admin_perm
1128 1128 super_admin_rows.append(usr)
1129 1129
1130 1130 return super_admin_rows + owner_row + perm_rows
1131 1131
1132 1132 def permission_user_groups(self):
1133 1133 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1134 1134 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1135 1135 joinedload(UserGroupUserGroupToPerm.target_user_group),
1136 1136 joinedload(UserGroupUserGroupToPerm.permission),)
1137 1137
1138 1138 perm_rows = []
1139 1139 for _user_group in q.all():
1140 1140 usr = AttributeDict(_user_group.user_group.get_dict())
1141 1141 usr.permission = _user_group.permission.permission_name
1142 1142 perm_rows.append(usr)
1143 1143
1144 1144 return perm_rows
1145 1145
1146 1146 def _get_default_perms(self, user_group, suffix=''):
1147 1147 from rhodecode.model.permission import PermissionModel
1148 1148 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1149 1149
1150 1150 def get_default_perms(self, suffix=''):
1151 1151 return self._get_default_perms(self, suffix)
1152 1152
1153 1153 def get_api_data(self, with_group_members=True, include_secrets=False):
1154 1154 """
1155 1155 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1156 1156 basically forwarded.
1157 1157
1158 1158 """
1159 1159 user_group = self
1160 1160
1161 1161 data = {
1162 1162 'users_group_id': user_group.users_group_id,
1163 1163 'group_name': user_group.users_group_name,
1164 1164 'group_description': user_group.user_group_description,
1165 1165 'active': user_group.users_group_active,
1166 1166 'owner': user_group.user.username,
1167 1167 }
1168 1168 if with_group_members:
1169 1169 users = []
1170 1170 for user in user_group.members:
1171 1171 user = user.user
1172 1172 users.append(user.get_api_data(include_secrets=include_secrets))
1173 1173 data['users'] = users
1174 1174
1175 1175 return data
1176 1176
1177 1177
1178 1178 class UserGroupMember(Base, BaseModel):
1179 1179 __tablename__ = 'users_groups_members'
1180 1180 __table_args__ = (
1181 1181 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1182 1182 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1183 1183 )
1184 1184
1185 1185 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 1186 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1187 1187 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1188 1188
1189 1189 user = relationship('User', lazy='joined')
1190 1190 users_group = relationship('UserGroup')
1191 1191
1192 1192 def __init__(self, gr_id='', u_id=''):
1193 1193 self.users_group_id = gr_id
1194 1194 self.user_id = u_id
1195 1195
1196 1196
1197 1197 class RepositoryField(Base, BaseModel):
1198 1198 __tablename__ = 'repositories_fields'
1199 1199 __table_args__ = (
1200 1200 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1201 1201 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1202 1202 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1203 1203 )
1204 1204 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1205 1205
1206 1206 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1207 1207 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1208 1208 field_key = Column("field_key", String(250))
1209 1209 field_label = Column("field_label", String(1024), nullable=False)
1210 1210 field_value = Column("field_value", String(10000), nullable=False)
1211 1211 field_desc = Column("field_desc", String(1024), nullable=False)
1212 1212 field_type = Column("field_type", String(255), nullable=False, unique=None)
1213 1213 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1214 1214
1215 1215 repository = relationship('Repository')
1216 1216
1217 1217 @property
1218 1218 def field_key_prefixed(self):
1219 1219 return 'ex_%s' % self.field_key
1220 1220
1221 1221 @classmethod
1222 1222 def un_prefix_key(cls, key):
1223 1223 if key.startswith(cls.PREFIX):
1224 1224 return key[len(cls.PREFIX):]
1225 1225 return key
1226 1226
1227 1227 @classmethod
1228 1228 def get_by_key_name(cls, key, repo):
1229 1229 row = cls.query()\
1230 1230 .filter(cls.repository == repo)\
1231 1231 .filter(cls.field_key == key).scalar()
1232 1232 return row
1233 1233
1234 1234
1235 1235 class Repository(Base, BaseModel):
1236 1236 __tablename__ = 'repositories'
1237 1237 __table_args__ = (
1238 1238 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1239 1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1240 1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1241 1241 )
1242 1242 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1243 1243 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1244 1244
1245 1245 STATE_CREATED = 'repo_state_created'
1246 1246 STATE_PENDING = 'repo_state_pending'
1247 1247 STATE_ERROR = 'repo_state_error'
1248 1248
1249 1249 LOCK_AUTOMATIC = 'lock_auto'
1250 1250 LOCK_API = 'lock_api'
1251 1251 LOCK_WEB = 'lock_web'
1252 1252 LOCK_PULL = 'lock_pull'
1253 1253
1254 1254 NAME_SEP = URL_SEP
1255 1255
1256 1256 repo_id = Column(
1257 1257 "repo_id", Integer(), nullable=False, unique=True, default=None,
1258 1258 primary_key=True)
1259 1259 _repo_name = Column(
1260 1260 "repo_name", Text(), nullable=False, default=None)
1261 1261 _repo_name_hash = Column(
1262 1262 "repo_name_hash", String(255), nullable=False, unique=True)
1263 1263 repo_state = Column("repo_state", String(255), nullable=True)
1264 1264
1265 1265 clone_uri = Column(
1266 1266 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1267 1267 default=None)
1268 1268 repo_type = Column(
1269 1269 "repo_type", String(255), nullable=False, unique=False, default=None)
1270 1270 user_id = Column(
1271 1271 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1272 1272 unique=False, default=None)
1273 1273 private = Column(
1274 1274 "private", Boolean(), nullable=True, unique=None, default=None)
1275 1275 enable_statistics = Column(
1276 1276 "statistics", Boolean(), nullable=True, unique=None, default=True)
1277 1277 enable_downloads = Column(
1278 1278 "downloads", Boolean(), nullable=True, unique=None, default=True)
1279 1279 description = Column(
1280 1280 "description", String(10000), nullable=True, unique=None, default=None)
1281 1281 created_on = Column(
1282 1282 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1283 1283 default=datetime.datetime.now)
1284 1284 updated_on = Column(
1285 1285 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1286 1286 default=datetime.datetime.now)
1287 1287 _landing_revision = Column(
1288 1288 "landing_revision", String(255), nullable=False, unique=False,
1289 1289 default=None)
1290 1290 enable_locking = Column(
1291 1291 "enable_locking", Boolean(), nullable=False, unique=None,
1292 1292 default=False)
1293 1293 _locked = Column(
1294 1294 "locked", String(255), nullable=True, unique=False, default=None)
1295 1295 _changeset_cache = Column(
1296 1296 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1297 1297
1298 1298 fork_id = Column(
1299 1299 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1300 1300 nullable=True, unique=False, default=None)
1301 1301 group_id = Column(
1302 1302 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1303 1303 unique=False, default=None)
1304 1304
1305 1305 user = relationship('User', lazy='joined')
1306 1306 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1307 1307 group = relationship('RepoGroup', lazy='joined')
1308 1308 repo_to_perm = relationship(
1309 1309 'UserRepoToPerm', cascade='all',
1310 1310 order_by='UserRepoToPerm.repo_to_perm_id')
1311 1311 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1312 1312 stats = relationship('Statistics', cascade='all', uselist=False)
1313 1313
1314 1314 followers = relationship(
1315 1315 'UserFollowing',
1316 1316 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1317 1317 cascade='all')
1318 1318 extra_fields = relationship(
1319 1319 'RepositoryField', cascade="all, delete, delete-orphan")
1320 1320 logs = relationship('UserLog')
1321 1321 comments = relationship(
1322 1322 'ChangesetComment', cascade="all, delete, delete-orphan")
1323 1323 pull_requests_source = relationship(
1324 1324 'PullRequest',
1325 1325 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1326 1326 cascade="all, delete, delete-orphan")
1327 1327 pull_requests_target = relationship(
1328 1328 'PullRequest',
1329 1329 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1330 1330 cascade="all, delete, delete-orphan")
1331 1331 ui = relationship('RepoRhodeCodeUi', cascade="all")
1332 1332 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1333 1333
1334 1334 def __unicode__(self):
1335 1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1336 1336 safe_unicode(self.repo_name))
1337 1337
1338 1338 @hybrid_property
1339 1339 def landing_rev(self):
1340 1340 # always should return [rev_type, rev]
1341 1341 if self._landing_revision:
1342 1342 _rev_info = self._landing_revision.split(':')
1343 1343 if len(_rev_info) < 2:
1344 1344 _rev_info.insert(0, 'rev')
1345 1345 return [_rev_info[0], _rev_info[1]]
1346 1346 return [None, None]
1347 1347
1348 1348 @landing_rev.setter
1349 1349 def landing_rev(self, val):
1350 1350 if ':' not in val:
1351 1351 raise ValueError('value must be delimited with `:` and consist '
1352 1352 'of <rev_type>:<rev>, got %s instead' % val)
1353 1353 self._landing_revision = val
1354 1354
1355 1355 @hybrid_property
1356 1356 def locked(self):
1357 1357 if self._locked:
1358 1358 user_id, timelocked, reason = self._locked.split(':')
1359 1359 lock_values = int(user_id), timelocked, reason
1360 1360 else:
1361 1361 lock_values = [None, None, None]
1362 1362 return lock_values
1363 1363
1364 1364 @locked.setter
1365 1365 def locked(self, val):
1366 1366 if val and isinstance(val, (list, tuple)):
1367 1367 self._locked = ':'.join(map(str, val))
1368 1368 else:
1369 1369 self._locked = None
1370 1370
1371 1371 @hybrid_property
1372 1372 def changeset_cache(self):
1373 1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1374 1374 dummy = EmptyCommit().__json__()
1375 1375 if not self._changeset_cache:
1376 1376 return dummy
1377 1377 try:
1378 1378 return json.loads(self._changeset_cache)
1379 1379 except TypeError:
1380 1380 return dummy
1381 1381 except Exception:
1382 1382 log.error(traceback.format_exc())
1383 1383 return dummy
1384 1384
1385 1385 @changeset_cache.setter
1386 1386 def changeset_cache(self, val):
1387 1387 try:
1388 1388 self._changeset_cache = json.dumps(val)
1389 1389 except Exception:
1390 1390 log.error(traceback.format_exc())
1391 1391
1392 1392 @hybrid_property
1393 1393 def repo_name(self):
1394 1394 return self._repo_name
1395 1395
1396 1396 @repo_name.setter
1397 1397 def repo_name(self, value):
1398 1398 self._repo_name = value
1399 1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1400 1400
1401 1401 @classmethod
1402 1402 def normalize_repo_name(cls, repo_name):
1403 1403 """
1404 1404 Normalizes os specific repo_name to the format internally stored inside
1405 1405 database using URL_SEP
1406 1406
1407 1407 :param cls:
1408 1408 :param repo_name:
1409 1409 """
1410 1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1411 1411
1412 1412 @classmethod
1413 1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1414 1414 session = Session()
1415 1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1416 1416
1417 1417 if cache:
1418 1418 if identity_cache:
1419 1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1420 1420 if val:
1421 1421 return val
1422 1422 else:
1423 1423 q = q.options(
1424 1424 FromCache("sql_cache_short",
1425 1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1426 1426
1427 1427 return q.scalar()
1428 1428
1429 1429 @classmethod
1430 1430 def get_by_full_path(cls, repo_full_path):
1431 1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1432 1432 repo_name = cls.normalize_repo_name(repo_name)
1433 1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1434 1434
1435 1435 @classmethod
1436 1436 def get_repo_forks(cls, repo_id):
1437 1437 return cls.query().filter(Repository.fork_id == repo_id)
1438 1438
1439 1439 @classmethod
1440 1440 def base_path(cls):
1441 1441 """
1442 1442 Returns base path when all repos are stored
1443 1443
1444 1444 :param cls:
1445 1445 """
1446 1446 q = Session().query(RhodeCodeUi)\
1447 1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1448 1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1449 1449 return q.one().ui_value
1450 1450
1451 1451 @classmethod
1452 1452 def is_valid(cls, repo_name):
1453 1453 """
1454 1454 returns True if given repo name is a valid filesystem repository
1455 1455
1456 1456 :param cls:
1457 1457 :param repo_name:
1458 1458 """
1459 1459 from rhodecode.lib.utils import is_valid_repo
1460 1460
1461 1461 return is_valid_repo(repo_name, cls.base_path())
1462 1462
1463 1463 @classmethod
1464 1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1465 1465 case_insensitive=True):
1466 1466 q = Repository.query()
1467 1467
1468 1468 if not isinstance(user_id, Optional):
1469 1469 q = q.filter(Repository.user_id == user_id)
1470 1470
1471 1471 if not isinstance(group_id, Optional):
1472 1472 q = q.filter(Repository.group_id == group_id)
1473 1473
1474 1474 if case_insensitive:
1475 1475 q = q.order_by(func.lower(Repository.repo_name))
1476 1476 else:
1477 1477 q = q.order_by(Repository.repo_name)
1478 1478 return q.all()
1479 1479
1480 1480 @property
1481 1481 def forks(self):
1482 1482 """
1483 1483 Return forks of this repo
1484 1484 """
1485 1485 return Repository.get_repo_forks(self.repo_id)
1486 1486
1487 1487 @property
1488 1488 def parent(self):
1489 1489 """
1490 1490 Returns fork parent
1491 1491 """
1492 1492 return self.fork
1493 1493
1494 1494 @property
1495 1495 def just_name(self):
1496 1496 return self.repo_name.split(self.NAME_SEP)[-1]
1497 1497
1498 1498 @property
1499 1499 def groups_with_parents(self):
1500 1500 groups = []
1501 1501 if self.group is None:
1502 1502 return groups
1503 1503
1504 1504 cur_gr = self.group
1505 1505 groups.insert(0, cur_gr)
1506 1506 while 1:
1507 1507 gr = getattr(cur_gr, 'parent_group', None)
1508 1508 cur_gr = cur_gr.parent_group
1509 1509 if gr is None:
1510 1510 break
1511 1511 groups.insert(0, gr)
1512 1512
1513 1513 return groups
1514 1514
1515 1515 @property
1516 1516 def groups_and_repo(self):
1517 1517 return self.groups_with_parents, self
1518 1518
1519 1519 @LazyProperty
1520 1520 def repo_path(self):
1521 1521 """
1522 1522 Returns base full path for that repository means where it actually
1523 1523 exists on a filesystem
1524 1524 """
1525 1525 q = Session().query(RhodeCodeUi).filter(
1526 1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1527 1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1528 1528 return q.one().ui_value
1529 1529
1530 1530 @property
1531 1531 def repo_full_path(self):
1532 1532 p = [self.repo_path]
1533 1533 # we need to split the name by / since this is how we store the
1534 1534 # names in the database, but that eventually needs to be converted
1535 1535 # into a valid system path
1536 1536 p += self.repo_name.split(self.NAME_SEP)
1537 1537 return os.path.join(*map(safe_unicode, p))
1538 1538
1539 1539 @property
1540 1540 def cache_keys(self):
1541 1541 """
1542 1542 Returns associated cache keys for that repo
1543 1543 """
1544 1544 return CacheKey.query()\
1545 1545 .filter(CacheKey.cache_args == self.repo_name)\
1546 1546 .order_by(CacheKey.cache_key)\
1547 1547 .all()
1548 1548
1549 1549 def get_new_name(self, repo_name):
1550 1550 """
1551 1551 returns new full repository name based on assigned group and new new
1552 1552
1553 1553 :param group_name:
1554 1554 """
1555 1555 path_prefix = self.group.full_path_splitted if self.group else []
1556 1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1557 1557
1558 1558 @property
1559 1559 def _config(self):
1560 1560 """
1561 1561 Returns db based config object.
1562 1562 """
1563 1563 from rhodecode.lib.utils import make_db_config
1564 1564 return make_db_config(clear_session=False, repo=self)
1565 1565
1566 1566 def permissions(self, with_admins=True, with_owner=True):
1567 1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1568 1568 q = q.options(joinedload(UserRepoToPerm.repository),
1569 1569 joinedload(UserRepoToPerm.user),
1570 1570 joinedload(UserRepoToPerm.permission),)
1571 1571
1572 1572 # get owners and admins and permissions. We do a trick of re-writing
1573 1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1574 1574 # has a global reference and changing one object propagates to all
1575 1575 # others. This means if admin is also an owner admin_row that change
1576 1576 # would propagate to both objects
1577 1577 perm_rows = []
1578 1578 for _usr in q.all():
1579 1579 usr = AttributeDict(_usr.user.get_dict())
1580 1580 usr.permission = _usr.permission.permission_name
1581 1581 perm_rows.append(usr)
1582 1582
1583 1583 # filter the perm rows by 'default' first and then sort them by
1584 1584 # admin,write,read,none permissions sorted again alphabetically in
1585 1585 # each group
1586 1586 perm_rows = sorted(perm_rows, key=display_sort)
1587 1587
1588 1588 _admin_perm = 'repository.admin'
1589 1589 owner_row = []
1590 1590 if with_owner:
1591 1591 usr = AttributeDict(self.user.get_dict())
1592 1592 usr.owner_row = True
1593 1593 usr.permission = _admin_perm
1594 1594 owner_row.append(usr)
1595 1595
1596 1596 super_admin_rows = []
1597 1597 if with_admins:
1598 1598 for usr in User.get_all_super_admins():
1599 1599 # if this admin is also owner, don't double the record
1600 1600 if usr.user_id == owner_row[0].user_id:
1601 1601 owner_row[0].admin_row = True
1602 1602 else:
1603 1603 usr = AttributeDict(usr.get_dict())
1604 1604 usr.admin_row = True
1605 1605 usr.permission = _admin_perm
1606 1606 super_admin_rows.append(usr)
1607 1607
1608 1608 return super_admin_rows + owner_row + perm_rows
1609 1609
1610 1610 def permission_user_groups(self):
1611 1611 q = UserGroupRepoToPerm.query().filter(
1612 1612 UserGroupRepoToPerm.repository == self)
1613 1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1614 1614 joinedload(UserGroupRepoToPerm.users_group),
1615 1615 joinedload(UserGroupRepoToPerm.permission),)
1616 1616
1617 1617 perm_rows = []
1618 1618 for _user_group in q.all():
1619 1619 usr = AttributeDict(_user_group.users_group.get_dict())
1620 1620 usr.permission = _user_group.permission.permission_name
1621 1621 perm_rows.append(usr)
1622 1622
1623 1623 return perm_rows
1624 1624
1625 1625 def get_api_data(self, include_secrets=False):
1626 1626 """
1627 1627 Common function for generating repo api data
1628 1628
1629 1629 :param include_secrets: See :meth:`User.get_api_data`.
1630 1630
1631 1631 """
1632 1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1633 1633 # move this methods on models level.
1634 1634 from rhodecode.model.settings import SettingsModel
1635 1635
1636 1636 repo = self
1637 1637 _user_id, _time, _reason = self.locked
1638 1638
1639 1639 data = {
1640 1640 'repo_id': repo.repo_id,
1641 1641 'repo_name': repo.repo_name,
1642 1642 'repo_type': repo.repo_type,
1643 1643 'clone_uri': repo.clone_uri or '',
1644 1644 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1645 1645 'private': repo.private,
1646 1646 'created_on': repo.created_on,
1647 1647 'description': repo.description,
1648 1648 'landing_rev': repo.landing_rev,
1649 1649 'owner': repo.user.username,
1650 1650 'fork_of': repo.fork.repo_name if repo.fork else None,
1651 1651 'enable_statistics': repo.enable_statistics,
1652 1652 'enable_locking': repo.enable_locking,
1653 1653 'enable_downloads': repo.enable_downloads,
1654 1654 'last_changeset': repo.changeset_cache,
1655 1655 'locked_by': User.get(_user_id).get_api_data(
1656 1656 include_secrets=include_secrets) if _user_id else None,
1657 1657 'locked_date': time_to_datetime(_time) if _time else None,
1658 1658 'lock_reason': _reason if _reason else None,
1659 1659 }
1660 1660
1661 1661 # TODO: mikhail: should be per-repo settings here
1662 1662 rc_config = SettingsModel().get_all_settings()
1663 1663 repository_fields = str2bool(
1664 1664 rc_config.get('rhodecode_repository_fields'))
1665 1665 if repository_fields:
1666 1666 for f in self.extra_fields:
1667 1667 data[f.field_key_prefixed] = f.field_value
1668 1668
1669 1669 return data
1670 1670
1671 1671 @classmethod
1672 1672 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1673 1673 if not lock_time:
1674 1674 lock_time = time.time()
1675 1675 if not lock_reason:
1676 1676 lock_reason = cls.LOCK_AUTOMATIC
1677 1677 repo.locked = [user_id, lock_time, lock_reason]
1678 1678 Session().add(repo)
1679 1679 Session().commit()
1680 1680
1681 1681 @classmethod
1682 1682 def unlock(cls, repo):
1683 1683 repo.locked = None
1684 1684 Session().add(repo)
1685 1685 Session().commit()
1686 1686
1687 1687 @classmethod
1688 1688 def getlock(cls, repo):
1689 1689 return repo.locked
1690 1690
1691 1691 def is_user_lock(self, user_id):
1692 1692 if self.lock[0]:
1693 1693 lock_user_id = safe_int(self.lock[0])
1694 1694 user_id = safe_int(user_id)
1695 1695 # both are ints, and they are equal
1696 1696 return all([lock_user_id, user_id]) and lock_user_id == user_id
1697 1697
1698 1698 return False
1699 1699
1700 1700 def get_locking_state(self, action, user_id, only_when_enabled=True):
1701 1701 """
1702 1702 Checks locking on this repository, if locking is enabled and lock is
1703 1703 present returns a tuple of make_lock, locked, locked_by.
1704 1704 make_lock can have 3 states None (do nothing) True, make lock
1705 1705 False release lock, This value is later propagated to hooks, which
1706 1706 do the locking. Think about this as signals passed to hooks what to do.
1707 1707
1708 1708 """
1709 1709 # TODO: johbo: This is part of the business logic and should be moved
1710 1710 # into the RepositoryModel.
1711 1711
1712 1712 if action not in ('push', 'pull'):
1713 1713 raise ValueError("Invalid action value: %s" % repr(action))
1714 1714
1715 1715 # defines if locked error should be thrown to user
1716 1716 currently_locked = False
1717 1717 # defines if new lock should be made, tri-state
1718 1718 make_lock = None
1719 1719 repo = self
1720 1720 user = User.get(user_id)
1721 1721
1722 1722 lock_info = repo.locked
1723 1723
1724 1724 if repo and (repo.enable_locking or not only_when_enabled):
1725 1725 if action == 'push':
1726 1726 # check if it's already locked !, if it is compare users
1727 1727 locked_by_user_id = lock_info[0]
1728 1728 if user.user_id == locked_by_user_id:
1729 1729 log.debug(
1730 1730 'Got `push` action from user %s, now unlocking', user)
1731 1731 # unlock if we have push from user who locked
1732 1732 make_lock = False
1733 1733 else:
1734 1734 # we're not the same user who locked, ban with
1735 1735 # code defined in settings (default is 423 HTTP Locked) !
1736 1736 log.debug('Repo %s is currently locked by %s', repo, user)
1737 1737 currently_locked = True
1738 1738 elif action == 'pull':
1739 1739 # [0] user [1] date
1740 1740 if lock_info[0] and lock_info[1]:
1741 1741 log.debug('Repo %s is currently locked by %s', repo, user)
1742 1742 currently_locked = True
1743 1743 else:
1744 1744 log.debug('Setting lock on repo %s by %s', repo, user)
1745 1745 make_lock = True
1746 1746
1747 1747 else:
1748 1748 log.debug('Repository %s do not have locking enabled', repo)
1749 1749
1750 1750 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1751 1751 make_lock, currently_locked, lock_info)
1752 1752
1753 1753 from rhodecode.lib.auth import HasRepoPermissionAny
1754 1754 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1755 1755 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1756 1756 # if we don't have at least write permission we cannot make a lock
1757 1757 log.debug('lock state reset back to FALSE due to lack '
1758 1758 'of at least read permission')
1759 1759 make_lock = False
1760 1760
1761 1761 return make_lock, currently_locked, lock_info
1762 1762
1763 1763 @property
1764 1764 def last_db_change(self):
1765 1765 return self.updated_on
1766 1766
1767 1767 @property
1768 1768 def clone_uri_hidden(self):
1769 1769 clone_uri = self.clone_uri
1770 1770 if clone_uri:
1771 1771 import urlobject
1772 1772 url_obj = urlobject.URLObject(clone_uri)
1773 1773 if url_obj.password:
1774 1774 clone_uri = url_obj.with_password('*****')
1775 1775 return clone_uri
1776 1776
1777 1777 def clone_url(self, **override):
1778 1778 qualified_home_url = url('home', qualified=True)
1779 1779
1780 1780 uri_tmpl = None
1781 1781 if 'with_id' in override:
1782 1782 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1783 1783 del override['with_id']
1784 1784
1785 1785 if 'uri_tmpl' in override:
1786 1786 uri_tmpl = override['uri_tmpl']
1787 1787 del override['uri_tmpl']
1788 1788
1789 1789 # we didn't override our tmpl from **overrides
1790 1790 if not uri_tmpl:
1791 1791 uri_tmpl = self.DEFAULT_CLONE_URI
1792 1792 try:
1793 1793 from pylons import tmpl_context as c
1794 1794 uri_tmpl = c.clone_uri_tmpl
1795 1795 except Exception:
1796 1796 # in any case if we call this outside of request context,
1797 1797 # ie, not having tmpl_context set up
1798 1798 pass
1799 1799
1800 1800 return get_clone_url(uri_tmpl=uri_tmpl,
1801 1801 qualifed_home_url=qualified_home_url,
1802 1802 repo_name=self.repo_name,
1803 1803 repo_id=self.repo_id, **override)
1804 1804
1805 1805 def set_state(self, state):
1806 1806 self.repo_state = state
1807 1807 Session().add(self)
1808 1808 #==========================================================================
1809 1809 # SCM PROPERTIES
1810 1810 #==========================================================================
1811 1811
1812 1812 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1813 1813 return get_commit_safe(
1814 1814 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1815 1815
1816 1816 def get_changeset(self, rev=None, pre_load=None):
1817 1817 warnings.warn("Use get_commit", DeprecationWarning)
1818 1818 commit_id = None
1819 1819 commit_idx = None
1820 1820 if isinstance(rev, basestring):
1821 1821 commit_id = rev
1822 1822 else:
1823 1823 commit_idx = rev
1824 1824 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1825 1825 pre_load=pre_load)
1826 1826
1827 1827 def get_landing_commit(self):
1828 1828 """
1829 1829 Returns landing commit, or if that doesn't exist returns the tip
1830 1830 """
1831 1831 _rev_type, _rev = self.landing_rev
1832 1832 commit = self.get_commit(_rev)
1833 1833 if isinstance(commit, EmptyCommit):
1834 1834 return self.get_commit()
1835 1835 return commit
1836 1836
1837 1837 def update_commit_cache(self, cs_cache=None, config=None):
1838 1838 """
1839 1839 Update cache of last changeset for repository, keys should be::
1840 1840
1841 1841 short_id
1842 1842 raw_id
1843 1843 revision
1844 1844 parents
1845 1845 message
1846 1846 date
1847 1847 author
1848 1848
1849 1849 :param cs_cache:
1850 1850 """
1851 1851 from rhodecode.lib.vcs.backends.base import BaseChangeset
1852 1852 if cs_cache is None:
1853 1853 # use no-cache version here
1854 1854 scm_repo = self.scm_instance(cache=False, config=config)
1855 1855 if scm_repo:
1856 1856 cs_cache = scm_repo.get_commit(
1857 1857 pre_load=["author", "date", "message", "parents"])
1858 1858 else:
1859 1859 cs_cache = EmptyCommit()
1860 1860
1861 1861 if isinstance(cs_cache, BaseChangeset):
1862 1862 cs_cache = cs_cache.__json__()
1863 1863
1864 1864 def is_outdated(new_cs_cache):
1865 1865 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1866 1866 new_cs_cache['revision'] != self.changeset_cache['revision']):
1867 1867 return True
1868 1868 return False
1869 1869
1870 1870 # check if we have maybe already latest cached revision
1871 1871 if is_outdated(cs_cache) or not self.changeset_cache:
1872 1872 _default = datetime.datetime.fromtimestamp(0)
1873 1873 last_change = cs_cache.get('date') or _default
1874 1874 log.debug('updated repo %s with new cs cache %s',
1875 1875 self.repo_name, cs_cache)
1876 1876 self.updated_on = last_change
1877 1877 self.changeset_cache = cs_cache
1878 1878 Session().add(self)
1879 1879 Session().commit()
1880 1880 else:
1881 1881 log.debug('Skipping update_commit_cache for repo:`%s` '
1882 1882 'commit already with latest changes', self.repo_name)
1883 1883
1884 1884 @property
1885 1885 def tip(self):
1886 1886 return self.get_commit('tip')
1887 1887
1888 1888 @property
1889 1889 def author(self):
1890 1890 return self.tip.author
1891 1891
1892 1892 @property
1893 1893 def last_change(self):
1894 1894 return self.scm_instance().last_change
1895 1895
1896 1896 def get_comments(self, revisions=None):
1897 1897 """
1898 1898 Returns comments for this repository grouped by revisions
1899 1899
1900 1900 :param revisions: filter query by revisions only
1901 1901 """
1902 1902 cmts = ChangesetComment.query()\
1903 1903 .filter(ChangesetComment.repo == self)
1904 1904 if revisions:
1905 1905 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1906 1906 grouped = collections.defaultdict(list)
1907 1907 for cmt in cmts.all():
1908 1908 grouped[cmt.revision].append(cmt)
1909 1909 return grouped
1910 1910
1911 1911 def statuses(self, revisions=None):
1912 1912 """
1913 1913 Returns statuses for this repository
1914 1914
1915 1915 :param revisions: list of revisions to get statuses for
1916 1916 """
1917 1917 statuses = ChangesetStatus.query()\
1918 1918 .filter(ChangesetStatus.repo == self)\
1919 1919 .filter(ChangesetStatus.version == 0)
1920 1920
1921 1921 if revisions:
1922 1922 # Try doing the filtering in chunks to avoid hitting limits
1923 1923 size = 500
1924 1924 status_results = []
1925 1925 for chunk in xrange(0, len(revisions), size):
1926 1926 status_results += statuses.filter(
1927 1927 ChangesetStatus.revision.in_(
1928 1928 revisions[chunk: chunk+size])
1929 1929 ).all()
1930 1930 else:
1931 1931 status_results = statuses.all()
1932 1932
1933 1933 grouped = {}
1934 1934
1935 1935 # maybe we have open new pullrequest without a status?
1936 1936 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1937 1937 status_lbl = ChangesetStatus.get_status_lbl(stat)
1938 1938 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1939 1939 for rev in pr.revisions:
1940 1940 pr_id = pr.pull_request_id
1941 1941 pr_repo = pr.target_repo.repo_name
1942 1942 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1943 1943
1944 1944 for stat in status_results:
1945 1945 pr_id = pr_repo = None
1946 1946 if stat.pull_request:
1947 1947 pr_id = stat.pull_request.pull_request_id
1948 1948 pr_repo = stat.pull_request.target_repo.repo_name
1949 1949 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1950 1950 pr_id, pr_repo]
1951 1951 return grouped
1952 1952
1953 1953 # ==========================================================================
1954 1954 # SCM CACHE INSTANCE
1955 1955 # ==========================================================================
1956 1956
1957 1957 def scm_instance(self, **kwargs):
1958 1958 import rhodecode
1959 1959
1960 1960 # Passing a config will not hit the cache currently only used
1961 1961 # for repo2dbmapper
1962 1962 config = kwargs.pop('config', None)
1963 1963 cache = kwargs.pop('cache', None)
1964 1964 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1965 1965 # if cache is NOT defined use default global, else we have a full
1966 1966 # control over cache behaviour
1967 1967 if cache is None and full_cache and not config:
1968 1968 return self._get_instance_cached()
1969 1969 return self._get_instance(cache=bool(cache), config=config)
1970 1970
1971 1971 def _get_instance_cached(self):
1972 1972 @cache_region('long_term')
1973 1973 def _get_repo(cache_key):
1974 1974 return self._get_instance()
1975 1975
1976 1976 invalidator_context = CacheKey.repo_context_cache(
1977 1977 _get_repo, self.repo_name, None)
1978 1978
1979 1979 with invalidator_context as context:
1980 1980 context.invalidate()
1981 1981 repo = context.compute()
1982 1982
1983 1983 return repo
1984 1984
1985 1985 def _get_instance(self, cache=True, config=None):
1986 1986 repo_full_path = self.repo_full_path
1987 1987 try:
1988 1988 vcs_alias = get_scm(repo_full_path)[0]
1989 1989 log.debug(
1990 1990 'Creating instance of %s repository from %s',
1991 1991 vcs_alias, repo_full_path)
1992 1992 backend = get_backend(vcs_alias)
1993 1993 except VCSError:
1994 1994 log.exception(
1995 1995 'Perhaps this repository is in db and not in '
1996 1996 'filesystem run rescan repositories with '
1997 1997 '"destroy old data" option from admin panel')
1998 1998 return
1999 1999
2000 2000 config = config or self._config
2001 2001 custom_wire = {
2002 2002 'cache': cache # controls the vcs.remote cache
2003 2003 }
2004 2004 repo = backend(
2005 2005 safe_str(repo_full_path), config=config, create=False,
2006 2006 with_wire=custom_wire)
2007 2007
2008 2008 return repo
2009 2009
2010 2010 def __json__(self):
2011 2011 return {'landing_rev': self.landing_rev}
2012 2012
2013 2013 def get_dict(self):
2014 2014
2015 2015 # Since we transformed `repo_name` to a hybrid property, we need to
2016 2016 # keep compatibility with the code which uses `repo_name` field.
2017 2017
2018 2018 result = super(Repository, self).get_dict()
2019 2019 result['repo_name'] = result.pop('_repo_name', None)
2020 2020 return result
2021 2021
2022 2022
2023 2023 class RepoGroup(Base, BaseModel):
2024 2024 __tablename__ = 'groups'
2025 2025 __table_args__ = (
2026 2026 UniqueConstraint('group_name', 'group_parent_id'),
2027 2027 CheckConstraint('group_id != group_parent_id'),
2028 2028 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2029 2029 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2030 2030 )
2031 2031 __mapper_args__ = {'order_by': 'group_name'}
2032 2032
2033 2033 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2034 2034
2035 2035 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2036 2036 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2037 2037 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2038 2038 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2039 2039 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2040 2040 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2041 2041 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2042 2042
2043 2043 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2044 2044 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2045 2045 parent_group = relationship('RepoGroup', remote_side=group_id)
2046 2046 user = relationship('User')
2047 2047
2048 2048 def __init__(self, group_name='', parent_group=None):
2049 2049 self.group_name = group_name
2050 2050 self.parent_group = parent_group
2051 2051
2052 2052 def __unicode__(self):
2053 2053 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2054 2054 self.group_name)
2055 2055
2056 2056 @classmethod
2057 2057 def _generate_choice(cls, repo_group):
2058 2058 from webhelpers.html import literal as _literal
2059 2059 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2060 2060 return repo_group.group_id, _name(repo_group.full_path_splitted)
2061 2061
2062 2062 @classmethod
2063 2063 def groups_choices(cls, groups=None, show_empty_group=True):
2064 2064 if not groups:
2065 2065 groups = cls.query().all()
2066 2066
2067 2067 repo_groups = []
2068 2068 if show_empty_group:
2069 2069 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2070 2070
2071 2071 repo_groups.extend([cls._generate_choice(x) for x in groups])
2072 2072
2073 2073 repo_groups = sorted(
2074 2074 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2075 2075 return repo_groups
2076 2076
2077 2077 @classmethod
2078 2078 def url_sep(cls):
2079 2079 return URL_SEP
2080 2080
2081 2081 @classmethod
2082 2082 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2083 2083 if case_insensitive:
2084 2084 gr = cls.query().filter(func.lower(cls.group_name)
2085 2085 == func.lower(group_name))
2086 2086 else:
2087 2087 gr = cls.query().filter(cls.group_name == group_name)
2088 2088 if cache:
2089 2089 gr = gr.options(FromCache(
2090 2090 "sql_cache_short",
2091 2091 "get_group_%s" % _hash_key(group_name)))
2092 2092 return gr.scalar()
2093 2093
2094 2094 @classmethod
2095 2095 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2096 2096 case_insensitive=True):
2097 2097 q = RepoGroup.query()
2098 2098
2099 2099 if not isinstance(user_id, Optional):
2100 2100 q = q.filter(RepoGroup.user_id == user_id)
2101 2101
2102 2102 if not isinstance(group_id, Optional):
2103 2103 q = q.filter(RepoGroup.group_parent_id == group_id)
2104 2104
2105 2105 if case_insensitive:
2106 2106 q = q.order_by(func.lower(RepoGroup.group_name))
2107 2107 else:
2108 2108 q = q.order_by(RepoGroup.group_name)
2109 2109 return q.all()
2110 2110
2111 2111 @property
2112 2112 def parents(self):
2113 2113 parents_recursion_limit = 10
2114 2114 groups = []
2115 2115 if self.parent_group is None:
2116 2116 return groups
2117 2117 cur_gr = self.parent_group
2118 2118 groups.insert(0, cur_gr)
2119 2119 cnt = 0
2120 2120 while 1:
2121 2121 cnt += 1
2122 2122 gr = getattr(cur_gr, 'parent_group', None)
2123 2123 cur_gr = cur_gr.parent_group
2124 2124 if gr is None:
2125 2125 break
2126 2126 if cnt == parents_recursion_limit:
2127 2127 # this will prevent accidental infinit loops
2128 2128 log.error(('more than %s parents found for group %s, stopping '
2129 2129 'recursive parent fetching' % (parents_recursion_limit, self)))
2130 2130 break
2131 2131
2132 2132 groups.insert(0, gr)
2133 2133 return groups
2134 2134
2135 2135 @property
2136 2136 def children(self):
2137 2137 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2138 2138
2139 2139 @property
2140 2140 def name(self):
2141 2141 return self.group_name.split(RepoGroup.url_sep())[-1]
2142 2142
2143 2143 @property
2144 2144 def full_path(self):
2145 2145 return self.group_name
2146 2146
2147 2147 @property
2148 2148 def full_path_splitted(self):
2149 2149 return self.group_name.split(RepoGroup.url_sep())
2150 2150
2151 2151 @property
2152 2152 def repositories(self):
2153 2153 return Repository.query()\
2154 2154 .filter(Repository.group == self)\
2155 2155 .order_by(Repository.repo_name)
2156 2156
2157 2157 @property
2158 2158 def repositories_recursive_count(self):
2159 2159 cnt = self.repositories.count()
2160 2160
2161 2161 def children_count(group):
2162 2162 cnt = 0
2163 2163 for child in group.children:
2164 2164 cnt += child.repositories.count()
2165 2165 cnt += children_count(child)
2166 2166 return cnt
2167 2167
2168 2168 return cnt + children_count(self)
2169 2169
2170 2170 def _recursive_objects(self, include_repos=True):
2171 2171 all_ = []
2172 2172
2173 2173 def _get_members(root_gr):
2174 2174 if include_repos:
2175 2175 for r in root_gr.repositories:
2176 2176 all_.append(r)
2177 2177 childs = root_gr.children.all()
2178 2178 if childs:
2179 2179 for gr in childs:
2180 2180 all_.append(gr)
2181 2181 _get_members(gr)
2182 2182
2183 2183 _get_members(self)
2184 2184 return [self] + all_
2185 2185
2186 2186 def recursive_groups_and_repos(self):
2187 2187 """
2188 2188 Recursive return all groups, with repositories in those groups
2189 2189 """
2190 2190 return self._recursive_objects()
2191 2191
2192 2192 def recursive_groups(self):
2193 2193 """
2194 2194 Returns all children groups for this group including children of children
2195 2195 """
2196 2196 return self._recursive_objects(include_repos=False)
2197 2197
2198 2198 def get_new_name(self, group_name):
2199 2199 """
2200 2200 returns new full group name based on parent and new name
2201 2201
2202 2202 :param group_name:
2203 2203 """
2204 2204 path_prefix = (self.parent_group.full_path_splitted if
2205 2205 self.parent_group else [])
2206 2206 return RepoGroup.url_sep().join(path_prefix + [group_name])
2207 2207
2208 2208 def permissions(self, with_admins=True, with_owner=True):
2209 2209 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2210 2210 q = q.options(joinedload(UserRepoGroupToPerm.group),
2211 2211 joinedload(UserRepoGroupToPerm.user),
2212 2212 joinedload(UserRepoGroupToPerm.permission),)
2213 2213
2214 2214 # get owners and admins and permissions. We do a trick of re-writing
2215 2215 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2216 2216 # has a global reference and changing one object propagates to all
2217 2217 # others. This means if admin is also an owner admin_row that change
2218 2218 # would propagate to both objects
2219 2219 perm_rows = []
2220 2220 for _usr in q.all():
2221 2221 usr = AttributeDict(_usr.user.get_dict())
2222 2222 usr.permission = _usr.permission.permission_name
2223 2223 perm_rows.append(usr)
2224 2224
2225 2225 # filter the perm rows by 'default' first and then sort them by
2226 2226 # admin,write,read,none permissions sorted again alphabetically in
2227 2227 # each group
2228 2228 perm_rows = sorted(perm_rows, key=display_sort)
2229 2229
2230 2230 _admin_perm = 'group.admin'
2231 2231 owner_row = []
2232 2232 if with_owner:
2233 2233 usr = AttributeDict(self.user.get_dict())
2234 2234 usr.owner_row = True
2235 2235 usr.permission = _admin_perm
2236 2236 owner_row.append(usr)
2237 2237
2238 2238 super_admin_rows = []
2239 2239 if with_admins:
2240 2240 for usr in User.get_all_super_admins():
2241 2241 # if this admin is also owner, don't double the record
2242 2242 if usr.user_id == owner_row[0].user_id:
2243 2243 owner_row[0].admin_row = True
2244 2244 else:
2245 2245 usr = AttributeDict(usr.get_dict())
2246 2246 usr.admin_row = True
2247 2247 usr.permission = _admin_perm
2248 2248 super_admin_rows.append(usr)
2249 2249
2250 2250 return super_admin_rows + owner_row + perm_rows
2251 2251
2252 2252 def permission_user_groups(self):
2253 2253 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2254 2254 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2255 2255 joinedload(UserGroupRepoGroupToPerm.users_group),
2256 2256 joinedload(UserGroupRepoGroupToPerm.permission),)
2257 2257
2258 2258 perm_rows = []
2259 2259 for _user_group in q.all():
2260 2260 usr = AttributeDict(_user_group.users_group.get_dict())
2261 2261 usr.permission = _user_group.permission.permission_name
2262 2262 perm_rows.append(usr)
2263 2263
2264 2264 return perm_rows
2265 2265
2266 2266 def get_api_data(self):
2267 2267 """
2268 2268 Common function for generating api data
2269 2269
2270 2270 """
2271 2271 group = self
2272 2272 data = {
2273 2273 'group_id': group.group_id,
2274 2274 'group_name': group.group_name,
2275 2275 'group_description': group.group_description,
2276 2276 'parent_group': group.parent_group.group_name if group.parent_group else None,
2277 2277 'repositories': [x.repo_name for x in group.repositories],
2278 2278 'owner': group.user.username,
2279 2279 }
2280 2280 return data
2281 2281
2282 2282
2283 2283 class Permission(Base, BaseModel):
2284 2284 __tablename__ = 'permissions'
2285 2285 __table_args__ = (
2286 2286 Index('p_perm_name_idx', 'permission_name'),
2287 2287 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2288 2288 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2289 2289 )
2290 2290 PERMS = [
2291 2291 ('hg.admin', _('RhodeCode Super Administrator')),
2292 2292
2293 2293 ('repository.none', _('Repository no access')),
2294 2294 ('repository.read', _('Repository read access')),
2295 2295 ('repository.write', _('Repository write access')),
2296 2296 ('repository.admin', _('Repository admin access')),
2297 2297
2298 2298 ('group.none', _('Repository group no access')),
2299 2299 ('group.read', _('Repository group read access')),
2300 2300 ('group.write', _('Repository group write access')),
2301 2301 ('group.admin', _('Repository group admin access')),
2302 2302
2303 2303 ('usergroup.none', _('User group no access')),
2304 2304 ('usergroup.read', _('User group read access')),
2305 2305 ('usergroup.write', _('User group write access')),
2306 2306 ('usergroup.admin', _('User group admin access')),
2307 2307
2308 2308 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2309 2309 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2310 2310
2311 2311 ('hg.usergroup.create.false', _('User Group creation disabled')),
2312 2312 ('hg.usergroup.create.true', _('User Group creation enabled')),
2313 2313
2314 2314 ('hg.create.none', _('Repository creation disabled')),
2315 2315 ('hg.create.repository', _('Repository creation enabled')),
2316 2316 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2317 2317 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2318 2318
2319 2319 ('hg.fork.none', _('Repository forking disabled')),
2320 2320 ('hg.fork.repository', _('Repository forking enabled')),
2321 2321
2322 2322 ('hg.register.none', _('Registration disabled')),
2323 2323 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2324 2324 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2325 2325
2326 2326 ('hg.extern_activate.manual', _('Manual activation of external account')),
2327 2327 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2328 2328
2329 2329 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2330 2330 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2331 2331 ]
2332 2332
2333 2333 # definition of system default permissions for DEFAULT user
2334 2334 DEFAULT_USER_PERMISSIONS = [
2335 2335 'repository.read',
2336 2336 'group.read',
2337 2337 'usergroup.read',
2338 2338 'hg.create.repository',
2339 2339 'hg.repogroup.create.false',
2340 2340 'hg.usergroup.create.false',
2341 2341 'hg.create.write_on_repogroup.true',
2342 2342 'hg.fork.repository',
2343 2343 'hg.register.manual_activate',
2344 2344 'hg.extern_activate.auto',
2345 2345 'hg.inherit_default_perms.true',
2346 2346 ]
2347 2347
2348 2348 # defines which permissions are more important higher the more important
2349 2349 # Weight defines which permissions are more important.
2350 2350 # The higher number the more important.
2351 2351 PERM_WEIGHTS = {
2352 2352 'repository.none': 0,
2353 2353 'repository.read': 1,
2354 2354 'repository.write': 3,
2355 2355 'repository.admin': 4,
2356 2356
2357 2357 'group.none': 0,
2358 2358 'group.read': 1,
2359 2359 'group.write': 3,
2360 2360 'group.admin': 4,
2361 2361
2362 2362 'usergroup.none': 0,
2363 2363 'usergroup.read': 1,
2364 2364 'usergroup.write': 3,
2365 2365 'usergroup.admin': 4,
2366 2366
2367 2367 'hg.repogroup.create.false': 0,
2368 2368 'hg.repogroup.create.true': 1,
2369 2369
2370 2370 'hg.usergroup.create.false': 0,
2371 2371 'hg.usergroup.create.true': 1,
2372 2372
2373 2373 'hg.fork.none': 0,
2374 2374 'hg.fork.repository': 1,
2375 2375 'hg.create.none': 0,
2376 2376 'hg.create.repository': 1
2377 2377 }
2378 2378
2379 2379 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2380 2380 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2381 2381 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2382 2382
2383 2383 def __unicode__(self):
2384 2384 return u"<%s('%s:%s')>" % (
2385 2385 self.__class__.__name__, self.permission_id, self.permission_name
2386 2386 )
2387 2387
2388 2388 @classmethod
2389 2389 def get_by_key(cls, key):
2390 2390 return cls.query().filter(cls.permission_name == key).scalar()
2391 2391
2392 2392 @classmethod
2393 2393 def get_default_repo_perms(cls, user_id, repo_id=None):
2394 2394 q = Session().query(UserRepoToPerm, Repository, Permission)\
2395 2395 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2396 2396 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2397 2397 .filter(UserRepoToPerm.user_id == user_id)
2398 2398 if repo_id:
2399 2399 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2400 2400 return q.all()
2401 2401
2402 2402 @classmethod
2403 2403 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2404 2404 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2405 2405 .join(
2406 2406 Permission,
2407 2407 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2408 2408 .join(
2409 2409 Repository,
2410 2410 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2411 2411 .join(
2412 2412 UserGroup,
2413 2413 UserGroupRepoToPerm.users_group_id ==
2414 2414 UserGroup.users_group_id)\
2415 2415 .join(
2416 2416 UserGroupMember,
2417 2417 UserGroupRepoToPerm.users_group_id ==
2418 2418 UserGroupMember.users_group_id)\
2419 2419 .filter(
2420 2420 UserGroupMember.user_id == user_id,
2421 2421 UserGroup.users_group_active == true())
2422 2422 if repo_id:
2423 2423 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2424 2424 return q.all()
2425 2425
2426 2426 @classmethod
2427 2427 def get_default_group_perms(cls, user_id, repo_group_id=None):
2428 2428 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2429 2429 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2430 2430 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2431 2431 .filter(UserRepoGroupToPerm.user_id == user_id)
2432 2432 if repo_group_id:
2433 2433 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2434 2434 return q.all()
2435 2435
2436 2436 @classmethod
2437 2437 def get_default_group_perms_from_user_group(
2438 2438 cls, user_id, repo_group_id=None):
2439 2439 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2440 2440 .join(
2441 2441 Permission,
2442 2442 UserGroupRepoGroupToPerm.permission_id ==
2443 2443 Permission.permission_id)\
2444 2444 .join(
2445 2445 RepoGroup,
2446 2446 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2447 2447 .join(
2448 2448 UserGroup,
2449 2449 UserGroupRepoGroupToPerm.users_group_id ==
2450 2450 UserGroup.users_group_id)\
2451 2451 .join(
2452 2452 UserGroupMember,
2453 2453 UserGroupRepoGroupToPerm.users_group_id ==
2454 2454 UserGroupMember.users_group_id)\
2455 2455 .filter(
2456 2456 UserGroupMember.user_id == user_id,
2457 2457 UserGroup.users_group_active == true())
2458 2458 if repo_group_id:
2459 2459 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2460 2460 return q.all()
2461 2461
2462 2462 @classmethod
2463 2463 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2464 2464 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2465 2465 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2466 2466 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2467 2467 .filter(UserUserGroupToPerm.user_id == user_id)
2468 2468 if user_group_id:
2469 2469 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2470 2470 return q.all()
2471 2471
2472 2472 @classmethod
2473 2473 def get_default_user_group_perms_from_user_group(
2474 2474 cls, user_id, user_group_id=None):
2475 2475 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2476 2476 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2477 2477 .join(
2478 2478 Permission,
2479 2479 UserGroupUserGroupToPerm.permission_id ==
2480 2480 Permission.permission_id)\
2481 2481 .join(
2482 2482 TargetUserGroup,
2483 2483 UserGroupUserGroupToPerm.target_user_group_id ==
2484 2484 TargetUserGroup.users_group_id)\
2485 2485 .join(
2486 2486 UserGroup,
2487 2487 UserGroupUserGroupToPerm.user_group_id ==
2488 2488 UserGroup.users_group_id)\
2489 2489 .join(
2490 2490 UserGroupMember,
2491 2491 UserGroupUserGroupToPerm.user_group_id ==
2492 2492 UserGroupMember.users_group_id)\
2493 2493 .filter(
2494 2494 UserGroupMember.user_id == user_id,
2495 2495 UserGroup.users_group_active == true())
2496 2496 if user_group_id:
2497 2497 q = q.filter(
2498 2498 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2499 2499
2500 2500 return q.all()
2501 2501
2502 2502
2503 2503 class UserRepoToPerm(Base, BaseModel):
2504 2504 __tablename__ = 'repo_to_perm'
2505 2505 __table_args__ = (
2506 2506 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2507 2507 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2508 2508 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2509 2509 )
2510 2510 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2511 2511 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2512 2512 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2513 2513 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2514 2514
2515 2515 user = relationship('User')
2516 2516 repository = relationship('Repository')
2517 2517 permission = relationship('Permission')
2518 2518
2519 2519 @classmethod
2520 2520 def create(cls, user, repository, permission):
2521 2521 n = cls()
2522 2522 n.user = user
2523 2523 n.repository = repository
2524 2524 n.permission = permission
2525 2525 Session().add(n)
2526 2526 return n
2527 2527
2528 2528 def __unicode__(self):
2529 2529 return u'<%s => %s >' % (self.user, self.repository)
2530 2530
2531 2531
2532 2532 class UserUserGroupToPerm(Base, BaseModel):
2533 2533 __tablename__ = 'user_user_group_to_perm'
2534 2534 __table_args__ = (
2535 2535 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2536 2536 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2537 2537 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2538 2538 )
2539 2539 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2540 2540 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2541 2541 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2542 2542 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2543 2543
2544 2544 user = relationship('User')
2545 2545 user_group = relationship('UserGroup')
2546 2546 permission = relationship('Permission')
2547 2547
2548 2548 @classmethod
2549 2549 def create(cls, user, user_group, permission):
2550 2550 n = cls()
2551 2551 n.user = user
2552 2552 n.user_group = user_group
2553 2553 n.permission = permission
2554 2554 Session().add(n)
2555 2555 return n
2556 2556
2557 2557 def __unicode__(self):
2558 2558 return u'<%s => %s >' % (self.user, self.user_group)
2559 2559
2560 2560
2561 2561 class UserToPerm(Base, BaseModel):
2562 2562 __tablename__ = 'user_to_perm'
2563 2563 __table_args__ = (
2564 2564 UniqueConstraint('user_id', 'permission_id'),
2565 2565 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2566 2566 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2567 2567 )
2568 2568 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2569 2569 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2570 2570 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2571 2571
2572 2572 user = relationship('User')
2573 2573 permission = relationship('Permission', lazy='joined')
2574 2574
2575 2575 def __unicode__(self):
2576 2576 return u'<%s => %s >' % (self.user, self.permission)
2577 2577
2578 2578
2579 2579 class UserGroupRepoToPerm(Base, BaseModel):
2580 2580 __tablename__ = 'users_group_repo_to_perm'
2581 2581 __table_args__ = (
2582 2582 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2583 2583 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2584 2584 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2585 2585 )
2586 2586 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2587 2587 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2588 2588 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2589 2589 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2590 2590
2591 2591 users_group = relationship('UserGroup')
2592 2592 permission = relationship('Permission')
2593 2593 repository = relationship('Repository')
2594 2594
2595 2595 @classmethod
2596 2596 def create(cls, users_group, repository, permission):
2597 2597 n = cls()
2598 2598 n.users_group = users_group
2599 2599 n.repository = repository
2600 2600 n.permission = permission
2601 2601 Session().add(n)
2602 2602 return n
2603 2603
2604 2604 def __unicode__(self):
2605 2605 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2606 2606
2607 2607
2608 2608 class UserGroupUserGroupToPerm(Base, BaseModel):
2609 2609 __tablename__ = 'user_group_user_group_to_perm'
2610 2610 __table_args__ = (
2611 2611 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2612 2612 CheckConstraint('target_user_group_id != user_group_id'),
2613 2613 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2614 2614 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2615 2615 )
2616 2616 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2617 2617 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2618 2618 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2619 2619 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2620 2620
2621 2621 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2622 2622 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2623 2623 permission = relationship('Permission')
2624 2624
2625 2625 @classmethod
2626 2626 def create(cls, target_user_group, user_group, permission):
2627 2627 n = cls()
2628 2628 n.target_user_group = target_user_group
2629 2629 n.user_group = user_group
2630 2630 n.permission = permission
2631 2631 Session().add(n)
2632 2632 return n
2633 2633
2634 2634 def __unicode__(self):
2635 2635 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2636 2636
2637 2637
2638 2638 class UserGroupToPerm(Base, BaseModel):
2639 2639 __tablename__ = 'users_group_to_perm'
2640 2640 __table_args__ = (
2641 2641 UniqueConstraint('users_group_id', 'permission_id',),
2642 2642 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2643 2643 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2644 2644 )
2645 2645 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2646 2646 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2647 2647 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2648 2648
2649 2649 users_group = relationship('UserGroup')
2650 2650 permission = relationship('Permission')
2651 2651
2652 2652
2653 2653 class UserRepoGroupToPerm(Base, BaseModel):
2654 2654 __tablename__ = 'user_repo_group_to_perm'
2655 2655 __table_args__ = (
2656 2656 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2657 2657 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2658 2658 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2659 2659 )
2660 2660
2661 2661 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 2662 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2663 2663 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2664 2664 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2665 2665
2666 2666 user = relationship('User')
2667 2667 group = relationship('RepoGroup')
2668 2668 permission = relationship('Permission')
2669 2669
2670 2670 @classmethod
2671 2671 def create(cls, user, repository_group, permission):
2672 2672 n = cls()
2673 2673 n.user = user
2674 2674 n.group = repository_group
2675 2675 n.permission = permission
2676 2676 Session().add(n)
2677 2677 return n
2678 2678
2679 2679
2680 2680 class UserGroupRepoGroupToPerm(Base, BaseModel):
2681 2681 __tablename__ = 'users_group_repo_group_to_perm'
2682 2682 __table_args__ = (
2683 2683 UniqueConstraint('users_group_id', 'group_id'),
2684 2684 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2685 2685 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2686 2686 )
2687 2687
2688 2688 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2689 2689 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2690 2690 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2691 2691 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2692 2692
2693 2693 users_group = relationship('UserGroup')
2694 2694 permission = relationship('Permission')
2695 2695 group = relationship('RepoGroup')
2696 2696
2697 2697 @classmethod
2698 2698 def create(cls, user_group, repository_group, permission):
2699 2699 n = cls()
2700 2700 n.users_group = user_group
2701 2701 n.group = repository_group
2702 2702 n.permission = permission
2703 2703 Session().add(n)
2704 2704 return n
2705 2705
2706 2706 def __unicode__(self):
2707 2707 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2708 2708
2709 2709
2710 2710 class Statistics(Base, BaseModel):
2711 2711 __tablename__ = 'statistics'
2712 2712 __table_args__ = (
2713 2713 UniqueConstraint('repository_id'),
2714 2714 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2715 2715 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2716 2716 )
2717 2717 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2718 2718 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2719 2719 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2720 2720 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2721 2721 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2722 2722 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2723 2723
2724 2724 repository = relationship('Repository', single_parent=True)
2725 2725
2726 2726
2727 2727 class UserFollowing(Base, BaseModel):
2728 2728 __tablename__ = 'user_followings'
2729 2729 __table_args__ = (
2730 2730 UniqueConstraint('user_id', 'follows_repository_id'),
2731 2731 UniqueConstraint('user_id', 'follows_user_id'),
2732 2732 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2733 2733 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2734 2734 )
2735 2735
2736 2736 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2737 2737 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2738 2738 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2739 2739 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2740 2740 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2741 2741
2742 2742 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2743 2743
2744 2744 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2745 2745 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2746 2746
2747 2747 @classmethod
2748 2748 def get_repo_followers(cls, repo_id):
2749 2749 return cls.query().filter(cls.follows_repo_id == repo_id)
2750 2750
2751 2751
2752 2752 class CacheKey(Base, BaseModel):
2753 2753 __tablename__ = 'cache_invalidation'
2754 2754 __table_args__ = (
2755 2755 UniqueConstraint('cache_key'),
2756 2756 Index('key_idx', 'cache_key'),
2757 2757 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2758 2758 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2759 2759 )
2760 2760 CACHE_TYPE_ATOM = 'ATOM'
2761 2761 CACHE_TYPE_RSS = 'RSS'
2762 2762 CACHE_TYPE_README = 'README'
2763 2763
2764 2764 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2765 2765 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2766 2766 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2767 2767 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2768 2768
2769 2769 def __init__(self, cache_key, cache_args=''):
2770 2770 self.cache_key = cache_key
2771 2771 self.cache_args = cache_args
2772 2772 self.cache_active = False
2773 2773
2774 2774 def __unicode__(self):
2775 2775 return u"<%s('%s:%s[%s]')>" % (
2776 2776 self.__class__.__name__,
2777 2777 self.cache_id, self.cache_key, self.cache_active)
2778 2778
2779 2779 def _cache_key_partition(self):
2780 2780 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2781 2781 return prefix, repo_name, suffix
2782 2782
2783 2783 def get_prefix(self):
2784 2784 """
2785 2785 Try to extract prefix from existing cache key. The key could consist
2786 2786 of prefix, repo_name, suffix
2787 2787 """
2788 2788 # this returns prefix, repo_name, suffix
2789 2789 return self._cache_key_partition()[0]
2790 2790
2791 2791 def get_suffix(self):
2792 2792 """
2793 2793 get suffix that might have been used in _get_cache_key to
2794 2794 generate self.cache_key. Only used for informational purposes
2795 2795 in repo_edit.html.
2796 2796 """
2797 2797 # prefix, repo_name, suffix
2798 2798 return self._cache_key_partition()[2]
2799 2799
2800 2800 @classmethod
2801 2801 def delete_all_cache(cls):
2802 2802 """
2803 2803 Delete all cache keys from database.
2804 2804 Should only be run when all instances are down and all entries
2805 2805 thus stale.
2806 2806 """
2807 2807 cls.query().delete()
2808 2808 Session().commit()
2809 2809
2810 2810 @classmethod
2811 2811 def get_cache_key(cls, repo_name, cache_type):
2812 2812 """
2813 2813
2814 2814 Generate a cache key for this process of RhodeCode instance.
2815 2815 Prefix most likely will be process id or maybe explicitly set
2816 2816 instance_id from .ini file.
2817 2817 """
2818 2818 import rhodecode
2819 2819 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2820 2820
2821 2821 repo_as_unicode = safe_unicode(repo_name)
2822 2822 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2823 2823 if cache_type else repo_as_unicode
2824 2824
2825 2825 return u'{}{}'.format(prefix, key)
2826 2826
2827 2827 @classmethod
2828 2828 def set_invalidate(cls, repo_name, delete=False):
2829 2829 """
2830 2830 Mark all caches of a repo as invalid in the database.
2831 2831 """
2832 2832
2833 2833 try:
2834 2834 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2835 2835 if delete:
2836 2836 log.debug('cache objects deleted for repo %s',
2837 2837 safe_str(repo_name))
2838 2838 qry.delete()
2839 2839 else:
2840 2840 log.debug('cache objects marked as invalid for repo %s',
2841 2841 safe_str(repo_name))
2842 2842 qry.update({"cache_active": False})
2843 2843
2844 2844 Session().commit()
2845 2845 except Exception:
2846 2846 log.exception(
2847 2847 'Cache key invalidation failed for repository %s',
2848 2848 safe_str(repo_name))
2849 2849 Session().rollback()
2850 2850
2851 2851 @classmethod
2852 2852 def get_active_cache(cls, cache_key):
2853 2853 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2854 2854 if inv_obj:
2855 2855 return inv_obj
2856 2856 return None
2857 2857
2858 2858 @classmethod
2859 2859 def repo_context_cache(cls, compute_func, repo_name, cache_type):
2860 2860 """
2861 2861 @cache_region('long_term')
2862 2862 def _heavy_calculation(cache_key):
2863 2863 return 'result'
2864 2864
2865 2865 cache_context = CacheKey.repo_context_cache(
2866 2866 _heavy_calculation, repo_name, cache_type)
2867 2867
2868 2868 with cache_context as context:
2869 2869 context.invalidate()
2870 2870 computed = context.compute()
2871 2871
2872 2872 assert computed == 'result'
2873 2873 """
2874 2874 from rhodecode.lib import caches
2875 2875 return caches.InvalidationContext(compute_func, repo_name, cache_type)
2876 2876
2877 2877
2878 2878 class ChangesetComment(Base, BaseModel):
2879 2879 __tablename__ = 'changeset_comments'
2880 2880 __table_args__ = (
2881 2881 Index('cc_revision_idx', 'revision'),
2882 2882 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2883 2883 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2884 2884 )
2885 2885
2886 2886 COMMENT_OUTDATED = u'comment_outdated'
2887 2887
2888 2888 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2889 2889 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2890 2890 revision = Column('revision', String(40), nullable=True)
2891 2891 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2892 2892 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2893 2893 line_no = Column('line_no', Unicode(10), nullable=True)
2894 2894 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2895 2895 f_path = Column('f_path', Unicode(1000), nullable=True)
2896 2896 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2897 2897 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2898 2898 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2899 2899 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2900 2900 renderer = Column('renderer', Unicode(64), nullable=True)
2901 2901 display_state = Column('display_state', Unicode(128), nullable=True)
2902 2902
2903 2903 author = relationship('User', lazy='joined')
2904 2904 repo = relationship('Repository')
2905 2905 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2906 2906 pull_request = relationship('PullRequest', lazy='joined')
2907 2907 pull_request_version = relationship('PullRequestVersion')
2908 2908
2909 2909 @classmethod
2910 2910 def get_users(cls, revision=None, pull_request_id=None):
2911 2911 """
2912 2912 Returns user associated with this ChangesetComment. ie those
2913 2913 who actually commented
2914 2914
2915 2915 :param cls:
2916 2916 :param revision:
2917 2917 """
2918 2918 q = Session().query(User)\
2919 2919 .join(ChangesetComment.author)
2920 2920 if revision:
2921 2921 q = q.filter(cls.revision == revision)
2922 2922 elif pull_request_id:
2923 2923 q = q.filter(cls.pull_request_id == pull_request_id)
2924 2924 return q.all()
2925 2925
2926 2926 def render(self, mentions=False):
2927 2927 from rhodecode.lib import helpers as h
2928 2928 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2929 2929
2930 2930 def __repr__(self):
2931 2931 if self.comment_id:
2932 2932 return '<DB:ChangesetComment #%s>' % self.comment_id
2933 2933 else:
2934 2934 return '<DB:ChangesetComment at %#x>' % id(self)
2935 2935
2936 2936
2937 2937 class ChangesetStatus(Base, BaseModel):
2938 2938 __tablename__ = 'changeset_statuses'
2939 2939 __table_args__ = (
2940 2940 Index('cs_revision_idx', 'revision'),
2941 2941 Index('cs_version_idx', 'version'),
2942 2942 UniqueConstraint('repo_id', 'revision', 'version'),
2943 2943 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2944 2944 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2945 2945 )
2946 2946 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2947 2947 STATUS_APPROVED = 'approved'
2948 2948 STATUS_REJECTED = 'rejected'
2949 2949 STATUS_UNDER_REVIEW = 'under_review'
2950 2950
2951 2951 STATUSES = [
2952 2952 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2953 2953 (STATUS_APPROVED, _("Approved")),
2954 2954 (STATUS_REJECTED, _("Rejected")),
2955 2955 (STATUS_UNDER_REVIEW, _("Under Review")),
2956 2956 ]
2957 2957
2958 2958 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2959 2959 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2960 2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2961 2961 revision = Column('revision', String(40), nullable=False)
2962 2962 status = Column('status', String(128), nullable=False, default=DEFAULT)
2963 2963 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2964 2964 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2965 2965 version = Column('version', Integer(), nullable=False, default=0)
2966 2966 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2967 2967
2968 2968 author = relationship('User', lazy='joined')
2969 2969 repo = relationship('Repository')
2970 2970 comment = relationship('ChangesetComment', lazy='joined')
2971 2971 pull_request = relationship('PullRequest', lazy='joined')
2972 2972
2973 2973 def __unicode__(self):
2974 2974 return u"<%s('%s[%s]:%s')>" % (
2975 2975 self.__class__.__name__,
2976 2976 self.status, self.version, self.author
2977 2977 )
2978 2978
2979 2979 @classmethod
2980 2980 def get_status_lbl(cls, value):
2981 2981 return dict(cls.STATUSES).get(value)
2982 2982
2983 2983 @property
2984 2984 def status_lbl(self):
2985 2985 return ChangesetStatus.get_status_lbl(self.status)
2986 2986
2987 2987
2988 2988 class _PullRequestBase(BaseModel):
2989 2989 """
2990 2990 Common attributes of pull request and version entries.
2991 2991 """
2992 2992
2993 2993 # .status values
2994 2994 STATUS_NEW = u'new'
2995 2995 STATUS_OPEN = u'open'
2996 2996 STATUS_CLOSED = u'closed'
2997 2997
2998 2998 title = Column('title', Unicode(255), nullable=True)
2999 2999 description = Column(
3000 3000 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3001 3001 nullable=True)
3002 3002 # new/open/closed status of pull request (not approve/reject/etc)
3003 3003 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3004 3004 created_on = Column(
3005 3005 'created_on', DateTime(timezone=False), nullable=False,
3006 3006 default=datetime.datetime.now)
3007 3007 updated_on = Column(
3008 3008 'updated_on', DateTime(timezone=False), nullable=False,
3009 3009 default=datetime.datetime.now)
3010 3010
3011 3011 @declared_attr
3012 3012 def user_id(cls):
3013 3013 return Column(
3014 3014 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3015 3015 unique=None)
3016 3016
3017 3017 # 500 revisions max
3018 3018 _revisions = Column(
3019 3019 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3020 3020
3021 3021 @declared_attr
3022 3022 def source_repo_id(cls):
3023 3023 # TODO: dan: rename column to source_repo_id
3024 3024 return Column(
3025 3025 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3026 3026 nullable=False)
3027 3027
3028 3028 source_ref = Column('org_ref', Unicode(255), nullable=False)
3029 3029
3030 3030 @declared_attr
3031 3031 def target_repo_id(cls):
3032 3032 # TODO: dan: rename column to target_repo_id
3033 3033 return Column(
3034 3034 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3035 3035 nullable=False)
3036 3036
3037 3037 target_ref = Column('other_ref', Unicode(255), nullable=False)
3038 3038
3039 3039 # TODO: dan: rename column to last_merge_source_rev
3040 3040 _last_merge_source_rev = Column(
3041 3041 'last_merge_org_rev', String(40), nullable=True)
3042 3042 # TODO: dan: rename column to last_merge_target_rev
3043 3043 _last_merge_target_rev = Column(
3044 3044 'last_merge_other_rev', String(40), nullable=True)
3045 3045 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3046 3046 merge_rev = Column('merge_rev', String(40), nullable=True)
3047 3047
3048 3048 @hybrid_property
3049 3049 def revisions(self):
3050 3050 return self._revisions.split(':') if self._revisions else []
3051 3051
3052 3052 @revisions.setter
3053 3053 def revisions(self, val):
3054 3054 self._revisions = ':'.join(val)
3055 3055
3056 3056 @declared_attr
3057 3057 def author(cls):
3058 3058 return relationship('User', lazy='joined')
3059 3059
3060 3060 @declared_attr
3061 3061 def source_repo(cls):
3062 3062 return relationship(
3063 3063 'Repository',
3064 3064 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3065 3065
3066 3066 @property
3067 3067 def source_ref_parts(self):
3068 3068 refs = self.source_ref.split(':')
3069 3069 return Reference(refs[0], refs[1], refs[2])
3070 3070
3071 3071 @declared_attr
3072 3072 def target_repo(cls):
3073 3073 return relationship(
3074 3074 'Repository',
3075 3075 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3076 3076
3077 3077 @property
3078 3078 def target_ref_parts(self):
3079 3079 refs = self.target_ref.split(':')
3080 3080 return Reference(refs[0], refs[1], refs[2])
3081 3081
3082 3082
3083 3083 class PullRequest(Base, _PullRequestBase):
3084 3084 __tablename__ = 'pull_requests'
3085 3085 __table_args__ = (
3086 3086 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3087 3087 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3088 3088 )
3089 3089
3090 3090 pull_request_id = Column(
3091 3091 'pull_request_id', Integer(), nullable=False, primary_key=True)
3092 3092
3093 3093 def __repr__(self):
3094 3094 if self.pull_request_id:
3095 3095 return '<DB:PullRequest #%s>' % self.pull_request_id
3096 3096 else:
3097 3097 return '<DB:PullRequest at %#x>' % id(self)
3098 3098
3099 3099 reviewers = relationship('PullRequestReviewers',
3100 3100 cascade="all, delete, delete-orphan")
3101 3101 statuses = relationship('ChangesetStatus')
3102 3102 comments = relationship('ChangesetComment',
3103 3103 cascade="all, delete, delete-orphan")
3104 3104 versions = relationship('PullRequestVersion',
3105 3105 cascade="all, delete, delete-orphan")
3106 3106
3107 3107 def is_closed(self):
3108 3108 return self.status == self.STATUS_CLOSED
3109 3109
3110 3110 def get_api_data(self):
3111 3111 from rhodecode.model.pull_request import PullRequestModel
3112 3112 pull_request = self
3113 3113 merge_status = PullRequestModel().merge_status(pull_request)
3114 3114 data = {
3115 3115 'pull_request_id': pull_request.pull_request_id,
3116 3116 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name,
3117 3117 pull_request_id=self.pull_request_id,
3118 3118 qualified=True),
3119 3119 'title': pull_request.title,
3120 3120 'description': pull_request.description,
3121 3121 'status': pull_request.status,
3122 3122 'created_on': pull_request.created_on,
3123 3123 'updated_on': pull_request.updated_on,
3124 3124 'commit_ids': pull_request.revisions,
3125 3125 'review_status': pull_request.calculated_review_status(),
3126 3126 'mergeable': {
3127 3127 'status': merge_status[0],
3128 3128 'message': unicode(merge_status[1]),
3129 3129 },
3130 3130 'source': {
3131 3131 'clone_url': pull_request.source_repo.clone_url(),
3132 3132 'repository': pull_request.source_repo.repo_name,
3133 3133 'reference': {
3134 3134 'name': pull_request.source_ref_parts.name,
3135 3135 'type': pull_request.source_ref_parts.type,
3136 3136 'commit_id': pull_request.source_ref_parts.commit_id,
3137 3137 },
3138 3138 },
3139 3139 'target': {
3140 3140 'clone_url': pull_request.target_repo.clone_url(),
3141 3141 'repository': pull_request.target_repo.repo_name,
3142 3142 'reference': {
3143 3143 'name': pull_request.target_ref_parts.name,
3144 3144 'type': pull_request.target_ref_parts.type,
3145 3145 'commit_id': pull_request.target_ref_parts.commit_id,
3146 3146 },
3147 3147 },
3148 3148 'author': pull_request.author.get_api_data(include_secrets=False,
3149 3149 details='basic'),
3150 3150 'reviewers': [
3151 3151 {
3152 3152 'user': reviewer.get_api_data(include_secrets=False,
3153 3153 details='basic'),
3154 3154 'review_status': st[0][1].status if st else 'not_reviewed',
3155 3155 }
3156 3156 for reviewer, st in pull_request.reviewers_statuses()
3157 3157 ]
3158 3158 }
3159 3159
3160 3160 return data
3161 3161
3162 3162 def __json__(self):
3163 3163 return {
3164 3164 'revisions': self.revisions,
3165 3165 }
3166 3166
3167 3167 def calculated_review_status(self):
3168 3168 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3169 3169 # because it's tricky on how to use ChangesetStatusModel from there
3170 3170 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3171 3171 from rhodecode.model.changeset_status import ChangesetStatusModel
3172 3172 return ChangesetStatusModel().calculated_review_status(self)
3173 3173
3174 3174 def reviewers_statuses(self):
3175 3175 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3176 3176 from rhodecode.model.changeset_status import ChangesetStatusModel
3177 3177 return ChangesetStatusModel().reviewers_statuses(self)
3178 3178
3179 3179
3180 3180 class PullRequestVersion(Base, _PullRequestBase):
3181 3181 __tablename__ = 'pull_request_versions'
3182 3182 __table_args__ = (
3183 3183 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3184 3184 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3185 3185 )
3186 3186
3187 3187 pull_request_version_id = Column(
3188 3188 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3189 3189 pull_request_id = Column(
3190 3190 'pull_request_id', Integer(),
3191 3191 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3192 3192 pull_request = relationship('PullRequest')
3193 3193
3194 3194 def __repr__(self):
3195 3195 if self.pull_request_version_id:
3196 3196 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3197 3197 else:
3198 3198 return '<DB:PullRequestVersion at %#x>' % id(self)
3199 3199
3200 3200
3201 3201 class PullRequestReviewers(Base, BaseModel):
3202 3202 __tablename__ = 'pull_request_reviewers'
3203 3203 __table_args__ = (
3204 3204 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3205 3205 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3206 3206 )
3207 3207
3208 3208 def __init__(self, user=None, pull_request=None):
3209 3209 self.user = user
3210 3210 self.pull_request = pull_request
3211 3211
3212 3212 pull_requests_reviewers_id = Column(
3213 3213 'pull_requests_reviewers_id', Integer(), nullable=False,
3214 3214 primary_key=True)
3215 3215 pull_request_id = Column(
3216 3216 "pull_request_id", Integer(),
3217 3217 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3218 3218 user_id = Column(
3219 3219 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3220 3220
3221 3221 user = relationship('User')
3222 3222 pull_request = relationship('PullRequest')
3223 3223
3224 3224
3225 3225 class Notification(Base, BaseModel):
3226 3226 __tablename__ = 'notifications'
3227 3227 __table_args__ = (
3228 3228 Index('notification_type_idx', 'type'),
3229 3229 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3230 3230 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3231 3231 )
3232 3232
3233 3233 TYPE_CHANGESET_COMMENT = u'cs_comment'
3234 3234 TYPE_MESSAGE = u'message'
3235 3235 TYPE_MENTION = u'mention'
3236 3236 TYPE_REGISTRATION = u'registration'
3237 3237 TYPE_PULL_REQUEST = u'pull_request'
3238 3238 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3239 3239
3240 3240 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3241 3241 subject = Column('subject', Unicode(512), nullable=True)
3242 3242 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3243 3243 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3244 3244 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3245 3245 type_ = Column('type', Unicode(255))
3246 3246
3247 3247 created_by_user = relationship('User')
3248 3248 notifications_to_users = relationship('UserNotification', lazy='joined',
3249 3249 cascade="all, delete, delete-orphan")
3250 3250
3251 3251 @property
3252 3252 def recipients(self):
3253 3253 return [x.user for x in UserNotification.query()\
3254 3254 .filter(UserNotification.notification == self)\
3255 3255 .order_by(UserNotification.user_id.asc()).all()]
3256 3256
3257 3257 @classmethod
3258 3258 def create(cls, created_by, subject, body, recipients, type_=None):
3259 3259 if type_ is None:
3260 3260 type_ = Notification.TYPE_MESSAGE
3261 3261
3262 3262 notification = cls()
3263 3263 notification.created_by_user = created_by
3264 3264 notification.subject = subject
3265 3265 notification.body = body
3266 3266 notification.type_ = type_
3267 3267 notification.created_on = datetime.datetime.now()
3268 3268
3269 3269 for u in recipients:
3270 3270 assoc = UserNotification()
3271 3271 assoc.notification = notification
3272 3272
3273 3273 # if created_by is inside recipients mark his notification
3274 3274 # as read
3275 3275 if u.user_id == created_by.user_id:
3276 3276 assoc.read = True
3277 3277
3278 3278 u.notifications.append(assoc)
3279 3279 Session().add(notification)
3280 3280
3281 3281 return notification
3282 3282
3283 3283 @property
3284 3284 def description(self):
3285 3285 from rhodecode.model.notification import NotificationModel
3286 3286 return NotificationModel().make_description(self)
3287 3287
3288 3288
3289 3289 class UserNotification(Base, BaseModel):
3290 3290 __tablename__ = 'user_to_notification'
3291 3291 __table_args__ = (
3292 3292 UniqueConstraint('user_id', 'notification_id'),
3293 3293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3294 3294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3295 3295 )
3296 3296 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3297 3297 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3298 3298 read = Column('read', Boolean, default=False)
3299 3299 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3300 3300
3301 3301 user = relationship('User', lazy="joined")
3302 3302 notification = relationship('Notification', lazy="joined",
3303 3303 order_by=lambda: Notification.created_on.desc(),)
3304 3304
3305 3305 def mark_as_read(self):
3306 3306 self.read = True
3307 3307 Session().add(self)
3308 3308
3309 3309
3310 3310 class Gist(Base, BaseModel):
3311 3311 __tablename__ = 'gists'
3312 3312 __table_args__ = (
3313 3313 Index('g_gist_access_id_idx', 'gist_access_id'),
3314 3314 Index('g_created_on_idx', 'created_on'),
3315 3315 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3316 3316 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3317 3317 )
3318 3318 GIST_PUBLIC = u'public'
3319 3319 GIST_PRIVATE = u'private'
3320 3320 DEFAULT_FILENAME = u'gistfile1.txt'
3321 3321
3322 3322 ACL_LEVEL_PUBLIC = u'acl_public'
3323 3323 ACL_LEVEL_PRIVATE = u'acl_private'
3324 3324
3325 3325 gist_id = Column('gist_id', Integer(), primary_key=True)
3326 3326 gist_access_id = Column('gist_access_id', Unicode(250))
3327 3327 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3328 3328 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3329 3329 gist_expires = Column('gist_expires', Float(53), nullable=False)
3330 3330 gist_type = Column('gist_type', Unicode(128), nullable=False)
3331 3331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3332 3332 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3333 3333 acl_level = Column('acl_level', Unicode(128), nullable=True)
3334 3334
3335 3335 owner = relationship('User')
3336 3336
3337 3337 def __repr__(self):
3338 3338 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3339 3339
3340 3340 @classmethod
3341 3341 def get_or_404(cls, id_):
3342 3342 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3343 3343 if not res:
3344 3344 raise HTTPNotFound
3345 3345 return res
3346 3346
3347 3347 @classmethod
3348 3348 def get_by_access_id(cls, gist_access_id):
3349 3349 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3350 3350
3351 3351 def gist_url(self):
3352 3352 import rhodecode
3353 3353 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3354 3354 if alias_url:
3355 3355 return alias_url.replace('{gistid}', self.gist_access_id)
3356 3356
3357 3357 return url('gist', gist_id=self.gist_access_id, qualified=True)
3358 3358
3359 3359 @classmethod
3360 3360 def base_path(cls):
3361 3361 """
3362 3362 Returns base path when all gists are stored
3363 3363
3364 3364 :param cls:
3365 3365 """
3366 3366 from rhodecode.model.gist import GIST_STORE_LOC
3367 3367 q = Session().query(RhodeCodeUi)\
3368 3368 .filter(RhodeCodeUi.ui_key == URL_SEP)
3369 3369 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3370 3370 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3371 3371
3372 3372 def get_api_data(self):
3373 3373 """
3374 3374 Common function for generating gist related data for API
3375 3375 """
3376 3376 gist = self
3377 3377 data = {
3378 3378 'gist_id': gist.gist_id,
3379 3379 'type': gist.gist_type,
3380 3380 'access_id': gist.gist_access_id,
3381 3381 'description': gist.gist_description,
3382 3382 'url': gist.gist_url(),
3383 3383 'expires': gist.gist_expires,
3384 3384 'created_on': gist.created_on,
3385 3385 'modified_at': gist.modified_at,
3386 3386 'content': None,
3387 3387 'acl_level': gist.acl_level,
3388 3388 }
3389 3389 return data
3390 3390
3391 3391 def __json__(self):
3392 3392 data = dict(
3393 3393 )
3394 3394 data.update(self.get_api_data())
3395 3395 return data
3396 3396 # SCM functions
3397 3397
3398 3398 def scm_instance(self, **kwargs):
3399 3399 from rhodecode.lib.vcs import get_repo
3400 3400 base_path = self.base_path()
3401 3401 return get_repo(os.path.join(*map(safe_str,
3402 3402 [base_path, self.gist_access_id])))
3403 3403
3404 3404
3405 3405 class DbMigrateVersion(Base, BaseModel):
3406 3406 __tablename__ = 'db_migrate_version'
3407 3407 __table_args__ = (
3408 3408 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3409 3409 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3410 3410 )
3411 3411 repository_id = Column('repository_id', String(250), primary_key=True)
3412 3412 repository_path = Column('repository_path', Text)
3413 3413 version = Column('version', Integer)
3414 3414
3415 3415
3416 3416 class ExternalIdentity(Base, BaseModel):
3417 3417 __tablename__ = 'external_identities'
3418 3418 __table_args__ = (
3419 3419 Index('local_user_id_idx', 'local_user_id'),
3420 3420 Index('external_id_idx', 'external_id'),
3421 3421 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3422 3422 'mysql_charset': 'utf8'})
3423 3423
3424 3424 external_id = Column('external_id', Unicode(255), default=u'',
3425 3425 primary_key=True)
3426 3426 external_username = Column('external_username', Unicode(1024), default=u'')
3427 3427 local_user_id = Column('local_user_id', Integer(),
3428 3428 ForeignKey('users.user_id'), primary_key=True)
3429 3429 provider_name = Column('provider_name', Unicode(255), default=u'',
3430 3430 primary_key=True)
3431 3431 access_token = Column('access_token', String(1024), default=u'')
3432 3432 alt_token = Column('alt_token', String(1024), default=u'')
3433 3433 token_secret = Column('token_secret', String(1024), default=u'')
3434 3434
3435 3435 @classmethod
3436 3436 def by_external_id_and_provider(cls, external_id, provider_name,
3437 3437 local_user_id=None):
3438 3438 """
3439 3439 Returns ExternalIdentity instance based on search params
3440 3440
3441 3441 :param external_id:
3442 3442 :param provider_name:
3443 3443 :return: ExternalIdentity
3444 3444 """
3445 3445 query = cls.query()
3446 3446 query = query.filter(cls.external_id == external_id)
3447 3447 query = query.filter(cls.provider_name == provider_name)
3448 3448 if local_user_id:
3449 3449 query = query.filter(cls.local_user_id == local_user_id)
3450 3450 return query.first()
3451 3451
3452 3452 @classmethod
3453 3453 def user_by_external_id_and_provider(cls, external_id, provider_name):
3454 3454 """
3455 3455 Returns User instance based on search params
3456 3456
3457 3457 :param external_id:
3458 3458 :param provider_name:
3459 3459 :return: User
3460 3460 """
3461 3461 query = User.query()
3462 3462 query = query.filter(cls.external_id == external_id)
3463 3463 query = query.filter(cls.provider_name == provider_name)
3464 3464 query = query.filter(User.user_id == cls.local_user_id)
3465 3465 return query.first()
3466 3466
3467 3467 @classmethod
3468 3468 def by_local_user_id(cls, local_user_id):
3469 3469 """
3470 3470 Returns all tokens for user
3471 3471
3472 3472 :param local_user_id:
3473 3473 :return: ExternalIdentity
3474 3474 """
3475 3475 query = cls.query()
3476 3476 query = query.filter(cls.local_user_id == local_user_id)
3477 3477 return query
3478
3479
3480 class Integration(Base, BaseModel):
3481 __tablename__ = 'integrations'
3482 __table_args__ = (
3483 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3484 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3485 )
3486
3487 integration_id = Column('integration_id', Integer(), primary_key=True)
3488 integration_type = Column('integration_type', String(255))
3489 enabled = Column("enabled", Boolean(), nullable=False)
3490 name = Column('name', String(255), nullable=False)
3491 settings_json = Column('settings_json',
3492 UnicodeText().with_variant(UnicodeText(16384), 'mysql'))
3493 repo_id = Column(
3494 "repo_id", Integer(), ForeignKey('repositories.repo_id'),
3495 nullable=True, unique=None, default=None)
3496 repo = relationship('Repository', lazy='joined')
3497
3498 @hybrid_property
3499 def settings(self):
3500 data = json.loads(self.settings_json or '{}')
3501 return data
3502
3503 @settings.setter
3504 def settings(self, dct):
3505 self.settings_json = json.dumps(dct, indent=2)
3506
3507 def __repr__(self):
3508 if self.repo:
3509 scope = 'repo=%r' % self.repo
3510 else:
3511 scope = 'global'
3512
3513 return '<Integration(%r, %r)>' % (self.integration_type, scope)
3514
3515 def settings_as_dict(self):
3516 return json.loads(self.settings_json)
@@ -1,934 +1,934 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime
32 32
33 33 from sqlalchemy.sql import func
34 34 from sqlalchemy.sql.expression import true, or_
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h
39 39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 40 from rhodecode.lib.caching_query import FromCache
41 41 from rhodecode.lib.exceptions import AttachedForksError
42 42 from rhodecode.lib.hooks_base import log_delete_repository
43 43 from rhodecode.lib.utils import make_db_config
44 44 from rhodecode.lib.utils2 import (
45 45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 47 from rhodecode.lib.vcs.backends import get_backend
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 52 RepoGroup, RepositoryField)
53 53 from rhodecode.model.scm import UserGroupList
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoModel(BaseModel):
61 61
62 62 cls = Repository
63 63
64 64 def _get_user_group(self, users_group):
65 65 return self._get_instance(UserGroup, users_group,
66 66 callback=UserGroup.get_by_group_name)
67 67
68 68 def _get_repo_group(self, repo_group):
69 69 return self._get_instance(RepoGroup, repo_group,
70 70 callback=RepoGroup.get_by_group_name)
71 71
72 72 def _create_default_perms(self, repository, private):
73 73 # create default permission
74 74 default = 'repository.read'
75 75 def_user = User.get_default_user()
76 76 for p in def_user.user_perms:
77 77 if p.permission.permission_name.startswith('repository.'):
78 78 default = p.permission.permission_name
79 79 break
80 80
81 81 default_perm = 'repository.none' if private else default
82 82
83 83 repo_to_perm = UserRepoToPerm()
84 84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85 85
86 86 repo_to_perm.repository = repository
87 87 repo_to_perm.user_id = def_user.user_id
88 88
89 89 return repo_to_perm
90 90
91 91 @LazyProperty
92 92 def repos_path(self):
93 93 """
94 94 Gets the repositories root path from database
95 95 """
96 96 settings_model = VcsSettingsModel(sa=self.sa)
97 97 return settings_model.get_repos_location()
98 98
99 99 def get(self, repo_id, cache=False):
100 100 repo = self.sa.query(Repository) \
101 101 .filter(Repository.repo_id == repo_id)
102 102
103 103 if cache:
104 104 repo = repo.options(FromCache("sql_cache_short",
105 105 "get_repo_%s" % repo_id))
106 106 return repo.scalar()
107 107
108 108 def get_repo(self, repository):
109 109 return self._get_repo(repository)
110 110
111 111 def get_by_repo_name(self, repo_name, cache=False):
112 112 repo = self.sa.query(Repository) \
113 113 .filter(Repository.repo_name == repo_name)
114 114
115 115 if cache:
116 116 repo = repo.options(FromCache("sql_cache_short",
117 117 "get_repo_%s" % repo_name))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135 try:
136 136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 137 if _repo_id:
138 138 return self.get(_repo_id)
139 139 except Exception:
140 140 log.exception('Failed to extract repo_name from URL')
141 141
142 142 return None
143 143
144 144 def get_url(self, repo):
145 145 return h.url('summary_home', repo_name=repo.repo_name, qualified=True)
146 146
147 147 def get_users(self, name_contains=None, limit=20, only_active=True):
148 148 # TODO: mikhail: move this method to the UserModel.
149 149 query = self.sa.query(User)
150 150 if only_active:
151 151 query = query.filter(User.active == true())
152 152
153 153 if name_contains:
154 154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
155 155 query = query.filter(
156 156 or_(
157 157 User.name.ilike(ilike_expression),
158 158 User.lastname.ilike(ilike_expression),
159 159 User.username.ilike(ilike_expression)
160 160 )
161 161 )
162 162 query = query.limit(limit)
163 163 users = query.all()
164 164
165 165 _users = [
166 166 {
167 167 'id': user.user_id,
168 168 'first_name': user.name,
169 169 'last_name': user.lastname,
170 170 'username': user.username,
171 171 'icon_link': h.gravatar_url(user.email, 14),
172 172 'value_display': h.person(user.email),
173 173 'value': user.username,
174 174 'value_type': 'user',
175 175 'active': user.active,
176 176 }
177 177 for user in users
178 178 ]
179 179 return _users
180 180
181 181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
182 182 # TODO: mikhail: move this method to the UserGroupModel.
183 183 query = self.sa.query(UserGroup)
184 184 if only_active:
185 185 query = query.filter(UserGroup.users_group_active == true())
186 186
187 187 if name_contains:
188 188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
189 189 query = query.filter(
190 190 UserGroup.users_group_name.ilike(ilike_expression))\
191 191 .order_by(func.length(UserGroup.users_group_name))\
192 192 .order_by(UserGroup.users_group_name)
193 193
194 194 query = query.limit(limit)
195 195 user_groups = query.all()
196 196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
197 197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
198 198
199 199 _groups = [
200 200 {
201 201 'id': group.users_group_id,
202 202 # TODO: marcink figure out a way to generate the url for the
203 203 # icon
204 204 'icon_link': '',
205 205 'value_display': 'Group: %s (%d members)' % (
206 206 group.users_group_name, len(group.members),),
207 207 'value': group.users_group_name,
208 208 'value_type': 'user_group',
209 209 'active': group.users_group_active,
210 210 }
211 211 for group in user_groups
212 212 ]
213 213 return _groups
214 214
215 215 @classmethod
216 216 def update_repoinfo(cls, repositories=None):
217 217 if not repositories:
218 218 repositories = Repository.getAll()
219 219 for repo in repositories:
220 220 repo.update_commit_cache()
221 221
222 222 def get_repos_as_dict(self, repo_list=None, admin=False,
223 223 super_user_actions=False):
224 224
225 225 from rhodecode.lib.utils import PartialRenderer
226 226 _render = PartialRenderer('data_table/_dt_elements.html')
227 227 c = _render.c
228 228
229 229 def quick_menu(repo_name):
230 230 return _render('quick_menu', repo_name)
231 231
232 232 def repo_lnk(name, rtype, rstate, private, fork_of):
233 233 return _render('repo_name', name, rtype, rstate, private, fork_of,
234 234 short_name=not admin, admin=False)
235 235
236 236 def last_change(last_change):
237 237 return _render("last_change", last_change)
238 238
239 239 def rss_lnk(repo_name):
240 240 return _render("rss", repo_name)
241 241
242 242 def atom_lnk(repo_name):
243 243 return _render("atom", repo_name)
244 244
245 245 def last_rev(repo_name, cs_cache):
246 246 return _render('revision', repo_name, cs_cache.get('revision'),
247 247 cs_cache.get('raw_id'), cs_cache.get('author'),
248 248 cs_cache.get('message'))
249 249
250 250 def desc(desc):
251 251 if c.visual.stylify_metatags:
252 252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
253 253 else:
254 254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
255 255
256 256 def state(repo_state):
257 257 return _render("repo_state", repo_state)
258 258
259 259 def repo_actions(repo_name):
260 260 return _render('repo_actions', repo_name, super_user_actions)
261 261
262 262 def user_profile(username):
263 263 return _render('user_profile', username)
264 264
265 265 repos_data = []
266 266 for repo in repo_list:
267 267 cs_cache = repo.changeset_cache
268 268 row = {
269 269 "menu": quick_menu(repo.repo_name),
270 270
271 271 "name": repo_lnk(repo.repo_name, repo.repo_type,
272 272 repo.repo_state, repo.private, repo.fork),
273 273 "name_raw": repo.repo_name.lower(),
274 274
275 275 "last_change": last_change(repo.last_db_change),
276 276 "last_change_raw": datetime_to_time(repo.last_db_change),
277 277
278 278 "last_changeset": last_rev(repo.repo_name, cs_cache),
279 279 "last_changeset_raw": cs_cache.get('revision'),
280 280
281 281 "desc": desc(repo.description),
282 282 "owner": user_profile(repo.user.username),
283 283
284 284 "state": state(repo.repo_state),
285 285 "rss": rss_lnk(repo.repo_name),
286 286
287 287 "atom": atom_lnk(repo.repo_name),
288 288 }
289 289 if admin:
290 290 row.update({
291 291 "action": repo_actions(repo.repo_name),
292 292 })
293 293 repos_data.append(row)
294 294
295 295 return repos_data
296 296
297 297 def _get_defaults(self, repo_name):
298 298 """
299 299 Gets information about repository, and returns a dict for
300 300 usage in forms
301 301
302 302 :param repo_name:
303 303 """
304 304
305 305 repo_info = Repository.get_by_repo_name(repo_name)
306 306
307 307 if repo_info is None:
308 308 return None
309 309
310 310 defaults = repo_info.get_dict()
311 311 defaults['repo_name'] = repo_info.just_name
312 312
313 313 groups = repo_info.groups_with_parents
314 314 parent_group = groups[-1] if groups else None
315 315
316 316 # we use -1 as this is how in HTML, we mark an empty group
317 317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
318 318
319 319 keys_to_process = (
320 320 {'k': 'repo_type', 'strip': False},
321 321 {'k': 'repo_enable_downloads', 'strip': True},
322 322 {'k': 'repo_description', 'strip': True},
323 323 {'k': 'repo_enable_locking', 'strip': True},
324 324 {'k': 'repo_landing_rev', 'strip': True},
325 325 {'k': 'clone_uri', 'strip': False},
326 326 {'k': 'repo_private', 'strip': True},
327 327 {'k': 'repo_enable_statistics', 'strip': True}
328 328 )
329 329
330 330 for item in keys_to_process:
331 331 attr = item['k']
332 332 if item['strip']:
333 333 attr = remove_prefix(item['k'], 'repo_')
334 334
335 335 val = defaults[attr]
336 336 if item['k'] == 'repo_landing_rev':
337 337 val = ':'.join(defaults[attr])
338 338 defaults[item['k']] = val
339 339 if item['k'] == 'clone_uri':
340 340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
341 341
342 342 # fill owner
343 343 if repo_info.user:
344 344 defaults.update({'user': repo_info.user.username})
345 345 else:
346 346 replacement_user = User.get_first_super_admin().username
347 347 defaults.update({'user': replacement_user})
348 348
349 349 # fill repository users
350 350 for p in repo_info.repo_to_perm:
351 351 defaults.update({'u_perm_%s' % p.user.user_id:
352 352 p.permission.permission_name})
353 353
354 354 # fill repository groups
355 355 for p in repo_info.users_group_to_perm:
356 356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
357 357 p.permission.permission_name})
358 358
359 359 return defaults
360 360
361 361 def update(self, repo, **kwargs):
362 362 try:
363 363 cur_repo = self._get_repo(repo)
364 364 source_repo_name = cur_repo.repo_name
365 365 if 'user' in kwargs:
366 366 cur_repo.user = User.get_by_username(kwargs['user'])
367 367
368 368 if 'repo_group' in kwargs:
369 369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
370 370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
371 371
372 372 update_keys = [
373 373 (1, 'repo_enable_downloads'),
374 374 (1, 'repo_description'),
375 375 (1, 'repo_enable_locking'),
376 376 (1, 'repo_landing_rev'),
377 377 (1, 'repo_private'),
378 378 (1, 'repo_enable_statistics'),
379 379 (0, 'clone_uri'),
380 380 (0, 'fork_id')
381 381 ]
382 382 for strip, k in update_keys:
383 383 if k in kwargs:
384 384 val = kwargs[k]
385 385 if strip:
386 386 k = remove_prefix(k, 'repo_')
387 387 if k == 'clone_uri':
388 388 from rhodecode.model.validators import Missing
389 389 _change = kwargs.get('clone_uri_change')
390 390 if _change in [Missing, 'OLD']:
391 391 # we don't change the value, so use original one
392 392 val = cur_repo.clone_uri
393 393
394 394 setattr(cur_repo, k, val)
395 395
396 396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
397 397 cur_repo.repo_name = new_name
398 398
399 399 # if private flag is set, reset default permission to NONE
400 400 if kwargs.get('repo_private'):
401 401 EMPTY_PERM = 'repository.none'
402 402 RepoModel().grant_user_permission(
403 403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
404 404 )
405 405
406 406 # handle extra fields
407 407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
408 408 kwargs):
409 409 k = RepositoryField.un_prefix_key(field)
410 410 ex_field = RepositoryField.get_by_key_name(
411 411 key=k, repo=cur_repo)
412 412 if ex_field:
413 413 ex_field.field_value = kwargs[field]
414 414 self.sa.add(ex_field)
415 415 self.sa.add(cur_repo)
416 416
417 417 if source_repo_name != new_name:
418 418 # rename repository
419 419 self._rename_filesystem_repo(
420 420 old=source_repo_name, new=new_name)
421 421
422 422 return cur_repo
423 423 except Exception:
424 424 log.error(traceback.format_exc())
425 425 raise
426 426
427 427 def _create_repo(self, repo_name, repo_type, description, owner,
428 428 private=False, clone_uri=None, repo_group=None,
429 429 landing_rev='rev:tip', fork_of=None,
430 430 copy_fork_permissions=False, enable_statistics=False,
431 431 enable_locking=False, enable_downloads=False,
432 432 copy_group_permissions=False,
433 433 state=Repository.STATE_PENDING):
434 434 """
435 435 Create repository inside database with PENDING state, this should be
436 436 only executed by create() repo. With exception of importing existing
437 437 repos
438 438 """
439 439 from rhodecode.model.scm import ScmModel
440 440
441 441 owner = self._get_user(owner)
442 442 fork_of = self._get_repo(fork_of)
443 443 repo_group = self._get_repo_group(safe_int(repo_group))
444 444
445 445 try:
446 446 repo_name = safe_unicode(repo_name)
447 447 description = safe_unicode(description)
448 448 # repo name is just a name of repository
449 449 # while repo_name_full is a full qualified name that is combined
450 450 # with name and path of group
451 451 repo_name_full = repo_name
452 452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
453 453
454 454 new_repo = Repository()
455 455 new_repo.repo_state = state
456 456 new_repo.enable_statistics = False
457 457 new_repo.repo_name = repo_name_full
458 458 new_repo.repo_type = repo_type
459 459 new_repo.user = owner
460 460 new_repo.group = repo_group
461 461 new_repo.description = description or repo_name
462 462 new_repo.private = private
463 463 new_repo.clone_uri = clone_uri
464 464 new_repo.landing_rev = landing_rev
465 465
466 466 new_repo.enable_statistics = enable_statistics
467 467 new_repo.enable_locking = enable_locking
468 468 new_repo.enable_downloads = enable_downloads
469 469
470 470 if repo_group:
471 471 new_repo.enable_locking = repo_group.enable_locking
472 472
473 473 if fork_of:
474 474 parent_repo = fork_of
475 475 new_repo.fork = parent_repo
476 476
477 477 events.trigger(events.RepoPreCreateEvent(new_repo))
478 478
479 479 self.sa.add(new_repo)
480 480
481 481 EMPTY_PERM = 'repository.none'
482 482 if fork_of and copy_fork_permissions:
483 483 repo = fork_of
484 484 user_perms = UserRepoToPerm.query() \
485 485 .filter(UserRepoToPerm.repository == repo).all()
486 486 group_perms = UserGroupRepoToPerm.query() \
487 487 .filter(UserGroupRepoToPerm.repository == repo).all()
488 488
489 489 for perm in user_perms:
490 490 UserRepoToPerm.create(
491 491 perm.user, new_repo, perm.permission)
492 492
493 493 for perm in group_perms:
494 494 UserGroupRepoToPerm.create(
495 495 perm.users_group, new_repo, perm.permission)
496 496 # in case we copy permissions and also set this repo to private
497 497 # override the default user permission to make it a private
498 498 # repo
499 499 if private:
500 500 RepoModel(self.sa).grant_user_permission(
501 501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
502 502
503 503 elif repo_group and copy_group_permissions:
504 504 user_perms = UserRepoGroupToPerm.query() \
505 505 .filter(UserRepoGroupToPerm.group == repo_group).all()
506 506
507 507 group_perms = UserGroupRepoGroupToPerm.query() \
508 508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
509 509
510 510 for perm in user_perms:
511 511 perm_name = perm.permission.permission_name.replace(
512 512 'group.', 'repository.')
513 513 perm_obj = Permission.get_by_key(perm_name)
514 514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
515 515
516 516 for perm in group_perms:
517 517 perm_name = perm.permission.permission_name.replace(
518 518 'group.', 'repository.')
519 519 perm_obj = Permission.get_by_key(perm_name)
520 520 UserGroupRepoToPerm.create(
521 521 perm.users_group, new_repo, perm_obj)
522 522
523 523 if private:
524 524 RepoModel(self.sa).grant_user_permission(
525 525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
526 526
527 527 else:
528 528 perm_obj = self._create_default_perms(new_repo, private)
529 529 self.sa.add(perm_obj)
530 530
531 531 # now automatically start following this repository as owner
532 532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
533 533 owner.user_id)
534 534
535 535 # we need to flush here, in order to check if database won't
536 536 # throw any exceptions, create filesystem dirs at the very end
537 537 self.sa.flush()
538 events.trigger(events.RepoCreatedEvent(new_repo))
538 events.trigger(events.RepoCreateEvent(new_repo))
539 539 return new_repo
540 540
541 541 except Exception:
542 542 log.error(traceback.format_exc())
543 543 raise
544 544
545 545 def create(self, form_data, cur_user):
546 546 """
547 547 Create repository using celery tasks
548 548
549 549 :param form_data:
550 550 :param cur_user:
551 551 """
552 552 from rhodecode.lib.celerylib import tasks, run_task
553 553 return run_task(tasks.create_repo, form_data, cur_user)
554 554
555 555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
556 556 perm_deletions=None, check_perms=True,
557 557 cur_user=None):
558 558 if not perm_additions:
559 559 perm_additions = []
560 560 if not perm_updates:
561 561 perm_updates = []
562 562 if not perm_deletions:
563 563 perm_deletions = []
564 564
565 565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
566 566
567 567 # update permissions
568 568 for member_id, perm, member_type in perm_updates:
569 569 member_id = int(member_id)
570 570 if member_type == 'user':
571 571 # this updates also current one if found
572 572 self.grant_user_permission(
573 573 repo=repo, user=member_id, perm=perm)
574 574 else: # set for user group
575 575 # check if we have permissions to alter this usergroup
576 576 member_name = UserGroup.get(member_id).users_group_name
577 577 if not check_perms or HasUserGroupPermissionAny(
578 578 *req_perms)(member_name, user=cur_user):
579 579 self.grant_user_group_permission(
580 580 repo=repo, group_name=member_id, perm=perm)
581 581
582 582 # set new permissions
583 583 for member_id, perm, member_type in perm_additions:
584 584 member_id = int(member_id)
585 585 if member_type == 'user':
586 586 self.grant_user_permission(
587 587 repo=repo, user=member_id, perm=perm)
588 588 else: # set for user group
589 589 # check if we have permissions to alter this usergroup
590 590 member_name = UserGroup.get(member_id).users_group_name
591 591 if not check_perms or HasUserGroupPermissionAny(
592 592 *req_perms)(member_name, user=cur_user):
593 593 self.grant_user_group_permission(
594 594 repo=repo, group_name=member_id, perm=perm)
595 595
596 596 # delete permissions
597 597 for member_id, perm, member_type in perm_deletions:
598 598 member_id = int(member_id)
599 599 if member_type == 'user':
600 600 self.revoke_user_permission(repo=repo, user=member_id)
601 601 else: # set for user group
602 602 # check if we have permissions to alter this usergroup
603 603 member_name = UserGroup.get(member_id).users_group_name
604 604 if not check_perms or HasUserGroupPermissionAny(
605 605 *req_perms)(member_name, user=cur_user):
606 606 self.revoke_user_group_permission(
607 607 repo=repo, group_name=member_id)
608 608
609 609 def create_fork(self, form_data, cur_user):
610 610 """
611 611 Simple wrapper into executing celery task for fork creation
612 612
613 613 :param form_data:
614 614 :param cur_user:
615 615 """
616 616 from rhodecode.lib.celerylib import tasks, run_task
617 617 return run_task(tasks.create_repo_fork, form_data, cur_user)
618 618
619 619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
620 620 """
621 621 Delete given repository, forks parameter defines what do do with
622 622 attached forks. Throws AttachedForksError if deleted repo has attached
623 623 forks
624 624
625 625 :param repo:
626 626 :param forks: str 'delete' or 'detach'
627 627 :param fs_remove: remove(archive) repo from filesystem
628 628 """
629 629 if not cur_user:
630 630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
631 631 repo = self._get_repo(repo)
632 632 if repo:
633 633 if forks == 'detach':
634 634 for r in repo.forks:
635 635 r.fork = None
636 636 self.sa.add(r)
637 637 elif forks == 'delete':
638 638 for r in repo.forks:
639 639 self.delete(r, forks='delete')
640 640 elif [f for f in repo.forks]:
641 641 raise AttachedForksError()
642 642
643 643 old_repo_dict = repo.get_dict()
644 644 events.trigger(events.RepoPreDeleteEvent(repo))
645 645 try:
646 646 self.sa.delete(repo)
647 647 if fs_remove:
648 648 self._delete_filesystem_repo(repo)
649 649 else:
650 650 log.debug('skipping removal from filesystem')
651 651 old_repo_dict.update({
652 652 'deleted_by': cur_user,
653 653 'deleted_on': time.time(),
654 654 })
655 655 log_delete_repository(**old_repo_dict)
656 events.trigger(events.RepoDeletedEvent(repo))
656 events.trigger(events.RepoDeleteEvent(repo))
657 657 except Exception:
658 658 log.error(traceback.format_exc())
659 659 raise
660 660
661 661 def grant_user_permission(self, repo, user, perm):
662 662 """
663 663 Grant permission for user on given repository, or update existing one
664 664 if found
665 665
666 666 :param repo: Instance of Repository, repository_id, or repository name
667 667 :param user: Instance of User, user_id or username
668 668 :param perm: Instance of Permission, or permission_name
669 669 """
670 670 user = self._get_user(user)
671 671 repo = self._get_repo(repo)
672 672 permission = self._get_perm(perm)
673 673
674 674 # check if we have that permission already
675 675 obj = self.sa.query(UserRepoToPerm) \
676 676 .filter(UserRepoToPerm.user == user) \
677 677 .filter(UserRepoToPerm.repository == repo) \
678 678 .scalar()
679 679 if obj is None:
680 680 # create new !
681 681 obj = UserRepoToPerm()
682 682 obj.repository = repo
683 683 obj.user = user
684 684 obj.permission = permission
685 685 self.sa.add(obj)
686 686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
687 687 action_logger_generic(
688 688 'granted permission: {} to user: {} on repo: {}'.format(
689 689 perm, user, repo), namespace='security.repo')
690 690 return obj
691 691
692 692 def revoke_user_permission(self, repo, user):
693 693 """
694 694 Revoke permission for user on given repository
695 695
696 696 :param repo: Instance of Repository, repository_id, or repository name
697 697 :param user: Instance of User, user_id or username
698 698 """
699 699
700 700 user = self._get_user(user)
701 701 repo = self._get_repo(repo)
702 702
703 703 obj = self.sa.query(UserRepoToPerm) \
704 704 .filter(UserRepoToPerm.repository == repo) \
705 705 .filter(UserRepoToPerm.user == user) \
706 706 .scalar()
707 707 if obj:
708 708 self.sa.delete(obj)
709 709 log.debug('Revoked perm on %s on %s', repo, user)
710 710 action_logger_generic(
711 711 'revoked permission from user: {} on repo: {}'.format(
712 712 user, repo), namespace='security.repo')
713 713
714 714 def grant_user_group_permission(self, repo, group_name, perm):
715 715 """
716 716 Grant permission for user group on given repository, or update
717 717 existing one if found
718 718
719 719 :param repo: Instance of Repository, repository_id, or repository name
720 720 :param group_name: Instance of UserGroup, users_group_id,
721 721 or user group name
722 722 :param perm: Instance of Permission, or permission_name
723 723 """
724 724 repo = self._get_repo(repo)
725 725 group_name = self._get_user_group(group_name)
726 726 permission = self._get_perm(perm)
727 727
728 728 # check if we have that permission already
729 729 obj = self.sa.query(UserGroupRepoToPerm) \
730 730 .filter(UserGroupRepoToPerm.users_group == group_name) \
731 731 .filter(UserGroupRepoToPerm.repository == repo) \
732 732 .scalar()
733 733
734 734 if obj is None:
735 735 # create new
736 736 obj = UserGroupRepoToPerm()
737 737
738 738 obj.repository = repo
739 739 obj.users_group = group_name
740 740 obj.permission = permission
741 741 self.sa.add(obj)
742 742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
743 743 action_logger_generic(
744 744 'granted permission: {} to usergroup: {} on repo: {}'.format(
745 745 perm, group_name, repo), namespace='security.repo')
746 746
747 747 return obj
748 748
749 749 def revoke_user_group_permission(self, repo, group_name):
750 750 """
751 751 Revoke permission for user group on given repository
752 752
753 753 :param repo: Instance of Repository, repository_id, or repository name
754 754 :param group_name: Instance of UserGroup, users_group_id,
755 755 or user group name
756 756 """
757 757 repo = self._get_repo(repo)
758 758 group_name = self._get_user_group(group_name)
759 759
760 760 obj = self.sa.query(UserGroupRepoToPerm) \
761 761 .filter(UserGroupRepoToPerm.repository == repo) \
762 762 .filter(UserGroupRepoToPerm.users_group == group_name) \
763 763 .scalar()
764 764 if obj:
765 765 self.sa.delete(obj)
766 766 log.debug('Revoked perm to %s on %s', repo, group_name)
767 767 action_logger_generic(
768 768 'revoked permission from usergroup: {} on repo: {}'.format(
769 769 group_name, repo), namespace='security.repo')
770 770
771 771 def delete_stats(self, repo_name):
772 772 """
773 773 removes stats for given repo
774 774
775 775 :param repo_name:
776 776 """
777 777 repo = self._get_repo(repo_name)
778 778 try:
779 779 obj = self.sa.query(Statistics) \
780 780 .filter(Statistics.repository == repo).scalar()
781 781 if obj:
782 782 self.sa.delete(obj)
783 783 except Exception:
784 784 log.error(traceback.format_exc())
785 785 raise
786 786
787 787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
788 788 field_type='str', field_desc=''):
789 789
790 790 repo = self._get_repo(repo_name)
791 791
792 792 new_field = RepositoryField()
793 793 new_field.repository = repo
794 794 new_field.field_key = field_key
795 795 new_field.field_type = field_type # python type
796 796 new_field.field_value = field_value
797 797 new_field.field_desc = field_desc
798 798 new_field.field_label = field_label
799 799 self.sa.add(new_field)
800 800 return new_field
801 801
802 802 def delete_repo_field(self, repo_name, field_key):
803 803 repo = self._get_repo(repo_name)
804 804 field = RepositoryField.get_by_key_name(field_key, repo)
805 805 if field:
806 806 self.sa.delete(field)
807 807
808 808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
809 809 clone_uri=None, repo_store_location=None,
810 810 use_global_config=False):
811 811 """
812 812 makes repository on filesystem. It's group aware means it'll create
813 813 a repository within a group, and alter the paths accordingly of
814 814 group location
815 815
816 816 :param repo_name:
817 817 :param alias:
818 818 :param parent:
819 819 :param clone_uri:
820 820 :param repo_store_location:
821 821 """
822 822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
823 823 from rhodecode.model.scm import ScmModel
824 824
825 825 if Repository.NAME_SEP in repo_name:
826 826 raise ValueError(
827 827 'repo_name must not contain groups got `%s`' % repo_name)
828 828
829 829 if isinstance(repo_group, RepoGroup):
830 830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
831 831 else:
832 832 new_parent_path = repo_group or ''
833 833
834 834 if repo_store_location:
835 835 _paths = [repo_store_location]
836 836 else:
837 837 _paths = [self.repos_path, new_parent_path, repo_name]
838 838 # we need to make it str for mercurial
839 839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
840 840
841 841 # check if this path is not a repository
842 842 if is_valid_repo(repo_path, self.repos_path):
843 843 raise Exception('This path %s is a valid repository' % repo_path)
844 844
845 845 # check if this path is a group
846 846 if is_valid_repo_group(repo_path, self.repos_path):
847 847 raise Exception('This path %s is a valid group' % repo_path)
848 848
849 849 log.info('creating repo %s in %s from url: `%s`',
850 850 repo_name, safe_unicode(repo_path),
851 851 obfuscate_url_pw(clone_uri))
852 852
853 853 backend = get_backend(repo_type)
854 854
855 855 config_repo = None if use_global_config else repo_name
856 856 if config_repo and new_parent_path:
857 857 config_repo = Repository.NAME_SEP.join(
858 858 (new_parent_path, config_repo))
859 859 config = make_db_config(clear_session=False, repo=config_repo)
860 860 config.set('extensions', 'largefiles', '')
861 861
862 862 # patch and reset hooks section of UI config to not run any
863 863 # hooks on creating remote repo
864 864 config.clear_section('hooks')
865 865
866 866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
867 867 if repo_type == 'git':
868 868 repo = backend(
869 869 repo_path, config=config, create=True, src_url=clone_uri,
870 870 bare=True)
871 871 else:
872 872 repo = backend(
873 873 repo_path, config=config, create=True, src_url=clone_uri)
874 874
875 875 ScmModel().install_hooks(repo, repo_type=repo_type)
876 876
877 877 log.debug('Created repo %s with %s backend',
878 878 safe_unicode(repo_name), safe_unicode(repo_type))
879 879 return repo
880 880
881 881 def _rename_filesystem_repo(self, old, new):
882 882 """
883 883 renames repository on filesystem
884 884
885 885 :param old: old name
886 886 :param new: new name
887 887 """
888 888 log.info('renaming repo from %s to %s', old, new)
889 889
890 890 old_path = os.path.join(self.repos_path, old)
891 891 new_path = os.path.join(self.repos_path, new)
892 892 if os.path.isdir(new_path):
893 893 raise Exception(
894 894 'Was trying to rename to already existing dir %s' % new_path
895 895 )
896 896 shutil.move(old_path, new_path)
897 897
898 898 def _delete_filesystem_repo(self, repo):
899 899 """
900 900 removes repo from filesystem, the removal is acctually made by
901 901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
902 902 repository is no longer valid for rhodecode, can be undeleted later on
903 903 by reverting the renames on this repository
904 904
905 905 :param repo: repo object
906 906 """
907 907 rm_path = os.path.join(self.repos_path, repo.repo_name)
908 908 repo_group = repo.group
909 909 log.info("Removing repository %s", rm_path)
910 910 # disable hg/git internal that it doesn't get detected as repo
911 911 alias = repo.repo_type
912 912
913 913 config = make_db_config(clear_session=False)
914 914 config.set('extensions', 'largefiles', '')
915 915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
916 916
917 917 # skip this for bare git repos
918 918 if not bare:
919 919 # disable VCS repo
920 920 vcs_path = os.path.join(rm_path, '.%s' % alias)
921 921 if os.path.exists(vcs_path):
922 922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
923 923
924 924 _now = datetime.now()
925 925 _ms = str(_now.microsecond).rjust(6, '0')
926 926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
927 927 repo.just_name)
928 928 if repo_group:
929 929 # if repository is in group, prefix the removal path with the group
930 930 args = repo_group.full_path_splitted + [_d]
931 931 _d = os.path.join(*args)
932 932
933 933 if os.path.isdir(rm_path):
934 934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,77 +1,84 b''
1 1 ## -*- coding: utf-8 -*-
2 2 ##
3 3 ## See also repo_settings.html
4 4 ##
5 5 <%inherit file="/base/base.html"/>
6 6
7 7 <%def name="title()">
8 8 ${_('%s repository settings') % c.repo_info.repo_name}
9 9 %if c.rhodecode_name:
10 10 &middot; ${h.branding(c.rhodecode_name)}
11 11 %endif
12 12 </%def>
13 13
14 14 <%def name="breadcrumbs_links()">
15 15 ${_('Settings')}
16 16 </%def>
17 17
18 18 <%def name="menu_bar_nav()">
19 19 ${self.menu_items(active='repositories')}
20 20 </%def>
21 21
22 22 <%def name="menu_bar_subnav()">
23 23 ${self.repo_menu(active='options')}
24 24 </%def>
25 25
26 <%def name="main_content()">
27 <%include file="/admin/repos/repo_edit_${c.active}.html"/>
28 </%def>
29
26 30
27 31 <%def name="main()">
28 32 <div class="box">
29 33 <div class="title">
30 34 ${self.repo_page_title(c.rhodecode_db_repo)}
31 35 ${self.breadcrumbs()}
32 36 </div>
33 37
34 38 <div class="sidebar-col-wrapper scw-small">
35 39 ##main
36 40 <div class="sidebar">
37 41 <ul class="nav nav-pills nav-stacked">
38 42 <li class="${'active' if c.active=='settings' else ''}">
39 43 <a href="${h.url('edit_repo', repo_name=c.repo_name)}">${_('Settings')}</a>
40 44 </li>
41 45 <li class="${'active' if c.active=='permissions' else ''}">
42 46 <a href="${h.url('edit_repo_perms', repo_name=c.repo_name)}">${_('Permissions')}</a>
43 47 </li>
44 48 <li class="${'active' if c.active=='advanced' else ''}">
45 49 <a href="${h.url('edit_repo_advanced', repo_name=c.repo_name)}">${_('Advanced')}</a>
46 50 </li>
47 51 <li class="${'active' if c.active=='vcs' else ''}">
48 52 <a href="${h.url('repo_vcs_settings', repo_name=c.repo_name)}">${_('VCS')}</a>
49 53 </li>
50 54 <li class="${'active' if c.active=='fields' else ''}">
51 55 <a href="${h.url('edit_repo_fields', repo_name=c.repo_name)}">${_('Extra Fields')}</a>
52 56 </li>
53 57 <li class="${'active' if c.active=='issuetracker' else ''}">
54 58 <a href="${h.url('repo_settings_issuetracker', repo_name=c.repo_name)}">${_('Issue Tracker')}</a>
55 59 </li>
56 60 <li class="${'active' if c.active=='caches' else ''}">
57 61 <a href="${h.url('edit_repo_caches', repo_name=c.repo_name)}">${_('Caches')}</a>
58 62 </li>
59 63 %if c.repo_info.repo_type != 'svn':
60 64 <li class="${'active' if c.active=='remote' else ''}">
61 65 <a href="${h.url('edit_repo_remote', repo_name=c.repo_name)}">${_('Remote')}</a>
62 66 </li>
63 67 %endif
64 68 <li class="${'active' if c.active=='statistics' else ''}">
65 69 <a href="${h.url('edit_repo_statistics', repo_name=c.repo_name)}">${_('Statistics')}</a>
66 70 </li>
71 <li class="${'active' if c.active=='integrations' else ''}">
72 <a href="${h.route_path('repo_integrations_home', repo_name=c.repo_name)}">${_('Integrations')}</a>
73 </li>
67 74 </ul>
68 75 </div>
69 76
70 77 <div class="main-content-full-width">
71 <%include file="/admin/repos/repo_edit_${c.active}.html"/>
78 ${self.main_content()}
72 79 </div>
73 80
74 81 </div>
75 82 </div>
76 83
77 </%def>
84 </%def> No newline at end of file
@@ -1,45 +1,53 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="/base/base.html"/>
3 3
4 4 <%def name="title()">
5 5 ${_('Settings administration')}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 11 <%def name="breadcrumbs_links()">
12 12 ${h.link_to(_('Admin'),h.url('admin_home'))}
13 13 &raquo;
14 14 ${_('Settings')}
15 15 </%def>
16 16
17 17 <%def name="menu_bar_nav()">
18 18 ${self.menu_items(active='admin')}
19 19 </%def>
20 20
21 <%def name="side_bar_nav()">
22 % for navitem in c.navlist:
23 <li class="${'active' if c.active==navitem.key else ''}">
24 <a href="${navitem.url}">${navitem.name}</a>
25 </li>
26 % endfor
27 </%def>
28
29 <%def name="main_content()">
30 <%include file="/admin/settings/settings_${c.active}.html"/>
31 </%def>
32
21 33 <%def name="main()">
22 34 <div class="box">
23 35 <div class="title">
24 36 ${self.breadcrumbs()}
25 37 </div>
26 38
27 39 ##main
28 40 <div class='sidebar-col-wrapper'>
29 41 <div class="sidebar">
30 42 <ul class="nav nav-pills nav-stacked">
31 % for navitem in c.navlist:
32 <li class="${'active' if c.active==navitem.key else ''}">
33 <a href="${navitem.url}">${navitem.name}</a>
34 </li>
35 % endfor
43 ${self.side_bar_nav()}
36 44 </ul>
37 45 </div>
38 46
39 47 <div class="main-content-full-width">
40 <%include file="/admin/settings/settings_${c.active}.html"/>
48 ${self.main_content()}
41 49 </div>
42 50 </div>
43 51 </div>
44 52
45 </%def>
53 </%def> No newline at end of file
@@ -1,656 +1,656 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="root.html"/>
3 3
4 4 <div class="outerwrapper">
5 5 <!-- HEADER -->
6 6 <div class="header">
7 7 <div id="header-inner" class="wrapper">
8 8 <div id="logo">
9 9 <div class="logo-wrapper">
10 10 <a href="${h.url('home')}"><img src="${h.url('/images/rhodecode-logo-white-216x60.png')}" alt="RhodeCode"/></a>
11 11 </div>
12 12 %if c.rhodecode_name:
13 13 <div class="branding">- ${h.branding(c.rhodecode_name)}</div>
14 14 %endif
15 15 </div>
16 16 <!-- MENU BAR NAV -->
17 17 ${self.menu_bar_nav()}
18 18 <!-- END MENU BAR NAV -->
19 ${self.body()}
20 19 </div>
21 20 </div>
22 21 ${self.menu_bar_subnav()}
23 22 <!-- END HEADER -->
24 23
25 24 <!-- CONTENT -->
26 25 <div id="content" class="wrapper">
27 26 ${self.flash_msg()}
28 27 <div class="main">
29 28 ${next.main()}
30 29 </div>
31 30 </div>
32 31 <!-- END CONTENT -->
33 32
34 33 </div>
35 34 <!-- FOOTER -->
36 35 <div id="footer">
37 36 <div id="footer-inner" class="title wrapper">
38 37 <div>
39 38 <p class="footer-link-right">
40 39 % if c.visual.show_version:
41 40 RhodeCode Enterprise ${c.rhodecode_version} ${c.rhodecode_edition}
42 41 % endif
43 42 &copy; 2010-${h.datetime.today().year}, <a href="${h.url('rhodecode_official')}" target="_blank">RhodeCode GmbH</a>. All rights reserved.
44 43 % if c.visual.rhodecode_support_url:
45 44 <a href="${c.visual.rhodecode_support_url}" target="_blank">${_('Support')}</a>
46 45 % endif
47 46 </p>
48 47 <% sid = 'block' if request.GET.get('showrcid') else 'none' %>
49 48 <p class="server-instance" style="display:${sid}">
50 49 ## display hidden instance ID if specially defined
51 50 % if c.rhodecode_instanceid:
52 51 ${_('RhodeCode instance id: %s') % c.rhodecode_instanceid}
53 52 % endif
54 53 </p>
55 54 </div>
56 55 </div>
57 56 </div>
58 57
59 58 <!-- END FOOTER -->
60 59
61 60 ### MAKO DEFS ###
62 61
63 62 <%def name="menu_bar_subnav()">
64 63 </%def>
65 64
66 65 <%def name="flash_msg()">
67 66 <%include file="/base/flash_msg.html"/>
68 67 </%def>
69 68
70 69 <%def name="breadcrumbs(class_='breadcrumbs')">
71 70 <div class="${class_}">
72 71 ${self.breadcrumbs_links()}
73 72 </div>
74 73 </%def>
75 74
76 75 <%def name="admin_menu()">
77 76 <ul class="admin_menu submenu">
78 77 <li><a href="${h.url('admin_home')}">${_('Admin journal')}</a></li>
79 78 <li><a href="${h.url('repos')}">${_('Repositories')}</a></li>
80 79 <li><a href="${h.url('repo_groups')}">${_('Repository groups')}</a></li>
81 80 <li><a href="${h.url('users')}">${_('Users')}</a></li>
82 81 <li><a href="${h.url('users_groups')}">${_('User groups')}</a></li>
83 82 <li><a href="${h.url('admin_permissions_application')}">${_('Permissions')}</a></li>
84 83 <li><a href="${h.route_path('auth_home', traverse='')}">${_('Authentication')}</a></li>
84 <li><a href="${h.route_path('global_integrations_home')}">${_('Integrations')}</a></li>
85 85 <li><a href="${h.url('admin_defaults_repositories')}">${_('Defaults')}</a></li>
86 86 <li class="last"><a href="${h.url('admin_settings')}">${_('Settings')}</a></li>
87 87 </ul>
88 88 </%def>
89 89
90 90
91 91 <%def name="dt_info_panel(elements)">
92 92 <dl class="dl-horizontal">
93 93 %for dt, dd, title, show_items in elements:
94 94 <dt>${dt}:</dt>
95 95 <dd title="${title}">
96 96 %if callable(dd):
97 97 ## allow lazy evaluation of elements
98 98 ${dd()}
99 99 %else:
100 100 ${dd}
101 101 %endif
102 102 %if show_items:
103 103 <span class="btn-collapse" data-toggle="item-${h.md5(dt)[:6]}-details">${_('Show More')} </span>
104 104 %endif
105 105 </dd>
106 106
107 107 %if show_items:
108 108 <div class="collapsable-content" data-toggle="item-${h.md5(dt)[:6]}-details" style="display: none">
109 109 %for item in show_items:
110 110 <dt></dt>
111 111 <dd>${item}</dd>
112 112 %endfor
113 113 </div>
114 114 %endif
115 115
116 116 %endfor
117 117 </dl>
118 118 </%def>
119 119
120 120
121 121 <%def name="gravatar(email, size=16)">
122 122 <%
123 123 if (size > 16):
124 124 gravatar_class = 'gravatar gravatar-large'
125 125 else:
126 126 gravatar_class = 'gravatar'
127 127 %>
128 128 <%doc>
129 129 TODO: johbo: For now we serve double size images to make it smooth
130 130 for retina. This is how it worked until now. Should be replaced
131 131 with a better solution at some point.
132 132 </%doc>
133 133 <img class="${gravatar_class}" src="${h.gravatar_url(email, size * 2)}" height="${size}" width="${size}">
134 134 </%def>
135 135
136 136
137 137 <%def name="gravatar_with_user(contact, size=16, show_disabled=False)">
138 138 <% email = h.email_or_none(contact) %>
139 139 <div class="rc-user tooltip" title="${h.author_string(email)}">
140 140 ${self.gravatar(email, size)}
141 141 <span class="${'user user-disabled' if show_disabled else 'user'}"> ${h.link_to_user(contact)}</span>
142 142 </div>
143 143 </%def>
144 144
145 145
146 146 ## admin menu used for people that have some admin resources
147 147 <%def name="admin_menu_simple(repositories=None, repository_groups=None, user_groups=None)">
148 148 <ul class="submenu">
149 149 %if repositories:
150 150 <li><a href="${h.url('repos')}">${_('Repositories')}</a></li>
151 151 %endif
152 152 %if repository_groups:
153 153 <li><a href="${h.url('repo_groups')}">${_('Repository groups')}</a></li>
154 154 %endif
155 155 %if user_groups:
156 156 <li><a href="${h.url('users_groups')}">${_('User groups')}</a></li>
157 157 %endif
158 158 </ul>
159 159 </%def>
160 160
161 161 <%def name="repo_page_title(repo_instance)">
162 162 <div class="title-content">
163 163 <div class="title-main">
164 164 ## SVN/HG/GIT icons
165 165 %if h.is_hg(repo_instance):
166 166 <i class="icon-hg"></i>
167 167 %endif
168 168 %if h.is_git(repo_instance):
169 169 <i class="icon-git"></i>
170 170 %endif
171 171 %if h.is_svn(repo_instance):
172 172 <i class="icon-svn"></i>
173 173 %endif
174 174
175 175 ## public/private
176 176 %if repo_instance.private:
177 177 <i class="icon-repo-private"></i>
178 178 %else:
179 179 <i class="icon-repo-public"></i>
180 180 %endif
181 181
182 182 ## repo name with group name
183 183 ${h.breadcrumb_repo_link(c.rhodecode_db_repo)}
184 184
185 185 </div>
186 186
187 187 ## FORKED
188 188 %if repo_instance.fork:
189 189 <p>
190 190 <i class="icon-code-fork"></i> ${_('Fork of')}
191 191 <a href="${h.url('summary_home',repo_name=repo_instance.fork.repo_name)}">${repo_instance.fork.repo_name}</a>
192 192 </p>
193 193 %endif
194 194
195 195 ## IMPORTED FROM REMOTE
196 196 %if repo_instance.clone_uri:
197 197 <p>
198 198 <i class="icon-code-fork"></i> ${_('Clone from')}
199 199 <a href="${h.url(h.safe_str(h.hide_credentials(repo_instance.clone_uri)))}">${h.hide_credentials(repo_instance.clone_uri)}</a>
200 200 </p>
201 201 %endif
202 202
203 203 ## LOCKING STATUS
204 204 %if repo_instance.locked[0]:
205 205 <p class="locking_locked">
206 206 <i class="icon-repo-lock"></i>
207 207 ${_('Repository locked by %(user)s') % {'user': h.person_by_id(repo_instance.locked[0])}}
208 208 </p>
209 209 %elif repo_instance.enable_locking:
210 210 <p class="locking_unlocked">
211 211 <i class="icon-repo-unlock"></i>
212 212 ${_('Repository not locked. Pull repository to lock it.')}
213 213 </p>
214 214 %endif
215 215
216 216 </div>
217 217 </%def>
218 218
219 219 <%def name="repo_menu(active=None)">
220 220 <%
221 221 def is_active(selected):
222 222 if selected == active:
223 223 return "active"
224 224 %>
225 225
226 226 <!--- CONTEXT BAR -->
227 227 <div id="context-bar">
228 228 <div class="wrapper">
229 229 <ul id="context-pages" class="horizontal-list navigation">
230 230 <li class="${is_active('summary')}"><a class="menulink" href="${h.url('summary_home', repo_name=c.repo_name)}"><div class="menulabel">${_('Summary')}</div></a></li>
231 231 <li class="${is_active('changelog')}"><a class="menulink" href="${h.url('changelog_home', repo_name=c.repo_name)}"><div class="menulabel">${_('Changelog')}</div></a></li>
232 232 <li class="${is_active('files')}"><a class="menulink" href="${h.url('files_home', repo_name=c.repo_name, revision=c.rhodecode_db_repo.landing_rev[1])}"><div class="menulabel">${_('Files')}</div></a></li>
233 233 <li class="${is_active('compare')}">
234 234 <a class="menulink" href="${h.url('compare_home',repo_name=c.repo_name)}"><div class="menulabel">${_('Compare')}</div></a>
235 235 </li>
236 236 ## TODO: anderson: ideally it would have a function on the scm_instance "enable_pullrequest() and enable_fork()"
237 237 %if c.rhodecode_db_repo.repo_type in ['git','hg']:
238 238 <li class="${is_active('showpullrequest')}">
239 239 <a class="menulink" href="${h.url('pullrequest_show_all',repo_name=c.repo_name)}" title="${_('Show Pull Requests for %s') % c.repo_name}">
240 240 %if c.repository_pull_requests:
241 241 <span class="pr_notifications">${c.repository_pull_requests}</span>
242 242 %endif
243 243 <div class="menulabel">${_('Pull Requests')}</div>
244 244 </a>
245 245 </li>
246 246 %endif
247 247 <li class="${is_active('options')}">
248 248 <a class="menulink" href="#" class="dropdown"><div class="menulabel">${_('Options')} <div class="show_more"></div></div></a>
249 249 <ul class="submenu">
250 250 %if h.HasRepoPermissionAll('repository.admin')(c.repo_name):
251 251 <li><a href="${h.url('edit_repo',repo_name=c.repo_name)}">${_('Settings')}</a></li>
252 252 %endif
253 253 %if c.rhodecode_db_repo.fork:
254 254 <li><a href="${h.url('compare_url',repo_name=c.rhodecode_db_repo.fork.repo_name,source_ref_type=c.rhodecode_db_repo.landing_rev[0],source_ref=c.rhodecode_db_repo.landing_rev[1], target_repo=c.repo_name,target_ref_type='branch' if request.GET.get('branch') else c.rhodecode_db_repo.landing_rev[0],target_ref=request.GET.get('branch') or c.rhodecode_db_repo.landing_rev[1], merge=1)}">
255 255 ${_('Compare fork')}</a></li>
256 256 %endif
257 257
258 258 <li><a href="${h.url('search_repo_home',repo_name=c.repo_name)}">${_('Search')}</a></li>
259 259
260 260 %if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name) and c.rhodecode_db_repo.enable_locking:
261 261 %if c.rhodecode_db_repo.locked[0]:
262 262 <li><a class="locking_del" href="${h.url('toggle_locking',repo_name=c.repo_name)}">${_('Unlock')}</a></li>
263 263 %else:
264 264 <li><a class="locking_add" href="${h.url('toggle_locking',repo_name=c.repo_name)}">${_('Lock')}</a></li>
265 265 %endif
266 266 %endif
267 267 %if c.rhodecode_user.username != h.DEFAULT_USER:
268 268 %if c.rhodecode_db_repo.repo_type in ['git','hg']:
269 269 <li><a href="${h.url('repo_fork_home',repo_name=c.repo_name)}">${_('Fork')}</a></li>
270 270 <li><a href="${h.url('pullrequest_home',repo_name=c.repo_name)}">${_('Create Pull Request')}</a></li>
271 271 %endif
272 272 %endif
273 273 </ul>
274 274 </li>
275 275 </ul>
276 276 </div>
277 277 <div class="clear"></div>
278 278 </div>
279 279 <!--- END CONTEXT BAR -->
280 280
281 281 </%def>
282 282
283 283 <%def name="usermenu()">
284 284 ## USER MENU
285 285 <li id="quick_login_li">
286 286 <a id="quick_login_link" class="menulink childs">
287 287 ${gravatar(c.rhodecode_user.email, 20)}
288 288 <span class="user">
289 289 %if c.rhodecode_user.username != h.DEFAULT_USER:
290 290 <span class="menu_link_user">${c.rhodecode_user.username}</span><div class="show_more"></div>
291 291 %else:
292 292 <span>${_('Sign in')}</span>
293 293 %endif
294 294 </span>
295 295 </a>
296 296
297 297 <div class="user-menu submenu">
298 298 <div id="quick_login">
299 299 %if c.rhodecode_user.username == h.DEFAULT_USER:
300 300 <h4>${_('Sign in to your account')}</h4>
301 301 ${h.form(h.route_path('login', _query={'came_from': h.url.current()}), needs_csrf_token=False)}
302 302 <div class="form form-vertical">
303 303 <div class="fields">
304 304 <div class="field">
305 305 <div class="label">
306 306 <label for="username">${_('Username')}:</label>
307 307 </div>
308 308 <div class="input">
309 309 ${h.text('username',class_='focus',tabindex=1)}
310 310 </div>
311 311
312 312 </div>
313 313 <div class="field">
314 314 <div class="label">
315 315 <label for="password">${_('Password')}:</label>
316 316 <span class="forgot_password">${h.link_to(_('(Forgot password?)'),h.route_path('reset_password'))}</span>
317 317 </div>
318 318 <div class="input">
319 319 ${h.password('password',class_='focus',tabindex=2)}
320 320 </div>
321 321 </div>
322 322 <div class="buttons">
323 323 <div class="register">
324 324 %if h.HasPermissionAny('hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate')():
325 325 ${h.link_to(_("Don't have an account ?"),h.route_path('register'))}
326 326 %endif
327 327 </div>
328 328 <div class="submit">
329 329 ${h.submit('sign_in',_('Sign In'),class_="btn btn-small",tabindex=3)}
330 330 </div>
331 331 </div>
332 332 </div>
333 333 </div>
334 334 ${h.end_form()}
335 335 %else:
336 336 <div class="">
337 337 <div class="big_gravatar">${gravatar(c.rhodecode_user.email, 48)}</div>
338 338 <div class="full_name">${c.rhodecode_user.full_name_or_username}</div>
339 339 <div class="email">${c.rhodecode_user.email}</div>
340 340 </div>
341 341 <div class="">
342 342 <ol class="links">
343 343 <li>${h.link_to(_(u'My account'),h.url('my_account'))}</li>
344 344 <li class="logout">
345 345 ${h.secure_form(h.route_path('logout'))}
346 346 ${h.submit('log_out', _(u'Sign Out'),class_="btn btn-primary")}
347 347 ${h.end_form()}
348 348 </li>
349 349 </ol>
350 350 </div>
351 351 %endif
352 352 </div>
353 353 </div>
354 354 %if c.rhodecode_user.username != h.DEFAULT_USER:
355 355 <div class="pill_container">
356 356 % if c.unread_notifications == 0:
357 357 <a class="menu_link_notifications empty" href="${h.url('notifications')}">${c.unread_notifications}</a>
358 358 % else:
359 359 <a class="menu_link_notifications" href="${h.url('notifications')}">${c.unread_notifications}</a>
360 360 % endif
361 361 </div>
362 362 % endif
363 363 </li>
364 364 </%def>
365 365
366 366 <%def name="menu_items(active=None)">
367 367 <%
368 368 def is_active(selected):
369 369 if selected == active:
370 370 return "active"
371 371 return ""
372 372 %>
373 373 <ul id="quick" class="main_nav navigation horizontal-list">
374 374 <!-- repo switcher -->
375 375 <li class="${is_active('repositories')} repo_switcher_li has_select2">
376 376 <input id="repo_switcher" name="repo_switcher" type="hidden">
377 377 </li>
378 378
379 379 ## ROOT MENU
380 380 %if c.rhodecode_user.username != h.DEFAULT_USER:
381 381 <li class="${is_active('journal')}">
382 382 <a class="menulink" title="${_('Show activity journal')}" href="${h.url('journal')}">
383 383 <div class="menulabel">${_('Journal')}</div>
384 384 </a>
385 385 </li>
386 386 %else:
387 387 <li class="${is_active('journal')}">
388 388 <a class="menulink" title="${_('Show Public activity journal')}" href="${h.url('public_journal')}">
389 389 <div class="menulabel">${_('Public journal')}</div>
390 390 </a>
391 391 </li>
392 392 %endif
393 393 <li class="${is_active('gists')}">
394 394 <a class="menulink childs" title="${_('Show Gists')}" href="${h.url('gists')}">
395 395 <div class="menulabel">${_('Gists')}</div>
396 396 </a>
397 397 </li>
398 398 <li class="${is_active('search')}">
399 399 <a class="menulink" title="${_('Search in repositories you have access to')}" href="${h.url('search')}">
400 400 <div class="menulabel">${_('Search')}</div>
401 401 </a>
402 402 </li>
403 403 % if h.HasPermissionAll('hg.admin')('access admin main page'):
404 404 <li class="${is_active('admin')}">
405 405 <a class="menulink childs" title="${_('Admin settings')}" href="#" onclick="return false;">
406 406 <div class="menulabel">${_('Admin')} <div class="show_more"></div></div>
407 407 </a>
408 408 ${admin_menu()}
409 409 </li>
410 410 % elif c.rhodecode_user.repositories_admin or c.rhodecode_user.repository_groups_admin or c.rhodecode_user.user_groups_admin:
411 411 <li class="${is_active('admin')}">
412 412 <a class="menulink childs" title="${_('Delegated Admin settings')}">
413 413 <div class="menulabel">${_('Admin')} <div class="show_more"></div></div>
414 414 </a>
415 415 ${admin_menu_simple(c.rhodecode_user.repositories_admin,
416 416 c.rhodecode_user.repository_groups_admin,
417 417 c.rhodecode_user.user_groups_admin or h.HasPermissionAny('hg.usergroup.create.true')())}
418 418 </li>
419 419 % endif
420 420 % if c.debug_style:
421 421 <li class="${is_active('debug_style')}">
422 422 <a class="menulink" title="${_('Style')}" href="${h.url('debug_style_home')}">
423 423 <div class="menulabel">${_('Style')}</div>
424 424 </a>
425 425 </li>
426 426 % endif
427 427 ## render extra user menu
428 428 ${usermenu()}
429 429 </ul>
430 430
431 431 <script type="text/javascript">
432 432 var visual_show_public_icon = "${c.visual.show_public_icon}" == "True";
433 433
434 434 /*format the look of items in the list*/
435 435 var format = function(state, escapeMarkup){
436 436 if (!state.id){
437 437 return state.text; // optgroup
438 438 }
439 439 var obj_dict = state.obj;
440 440 var tmpl = '';
441 441
442 442 if(obj_dict && state.type == 'repo'){
443 443 if(obj_dict['repo_type'] === 'hg'){
444 444 tmpl += '<i class="icon-hg"></i> ';
445 445 }
446 446 else if(obj_dict['repo_type'] === 'git'){
447 447 tmpl += '<i class="icon-git"></i> ';
448 448 }
449 449 else if(obj_dict['repo_type'] === 'svn'){
450 450 tmpl += '<i class="icon-svn"></i> ';
451 451 }
452 452 if(obj_dict['private']){
453 453 tmpl += '<i class="icon-lock" ></i> ';
454 454 }
455 455 else if(visual_show_public_icon){
456 456 tmpl += '<i class="icon-unlock-alt"></i> ';
457 457 }
458 458 }
459 459 if(obj_dict && state.type == 'commit') {
460 460 tmpl += '<i class="icon-tag"></i>';
461 461 }
462 462 if(obj_dict && state.type == 'group'){
463 463 tmpl += '<i class="icon-folder-close"></i> ';
464 464 }
465 465 tmpl += escapeMarkup(state.text);
466 466 return tmpl;
467 467 };
468 468
469 469 var formatResult = function(result, container, query, escapeMarkup) {
470 470 return format(result, escapeMarkup);
471 471 };
472 472
473 473 var formatSelection = function(data, container, escapeMarkup) {
474 474 return format(data, escapeMarkup);
475 475 };
476 476
477 477 $("#repo_switcher").select2({
478 478 cachedDataSource: {},
479 479 minimumInputLength: 2,
480 480 placeholder: '<div class="menulabel">${_('Go to')} <div class="show_more"></div></div>',
481 481 dropdownAutoWidth: true,
482 482 formatResult: formatResult,
483 483 formatSelection: formatSelection,
484 484 containerCssClass: "repo-switcher",
485 485 dropdownCssClass: "repo-switcher-dropdown",
486 486 escapeMarkup: function(m){
487 487 // don't escape our custom placeholder
488 488 if(m.substr(0,23) == '<div class="menulabel">'){
489 489 return m;
490 490 }
491 491
492 492 return Select2.util.escapeMarkup(m);
493 493 },
494 494 query: $.debounce(250, function(query){
495 495 self = this;
496 496 var cacheKey = query.term;
497 497 var cachedData = self.cachedDataSource[cacheKey];
498 498
499 499 if (cachedData) {
500 500 query.callback({results: cachedData.results});
501 501 } else {
502 502 $.ajax({
503 503 url: "${h.url('goto_switcher_data')}",
504 504 data: {'query': query.term},
505 505 dataType: 'json',
506 506 type: 'GET',
507 507 success: function(data) {
508 508 self.cachedDataSource[cacheKey] = data;
509 509 query.callback({results: data.results});
510 510 },
511 511 error: function(data, textStatus, errorThrown) {
512 512 alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText));
513 513 }
514 514 })
515 515 }
516 516 })
517 517 });
518 518
519 519 $("#repo_switcher").on('select2-selecting', function(e){
520 520 e.preventDefault();
521 521 window.location = e.choice.url;
522 522 });
523 523
524 524 ## Global mouse bindings ##
525 525
526 526 // general help "?"
527 527 Mousetrap.bind(['?'], function(e) {
528 528 $('#help_kb').modal({})
529 529 });
530 530
531 531 // / open the quick filter
532 532 Mousetrap.bind(['/'], function(e) {
533 533 $("#repo_switcher").select2("open");
534 534
535 535 // return false to prevent default browser behavior
536 536 // and stop event from bubbling
537 537 return false;
538 538 });
539 539
540 540 // general nav g + action
541 541 Mousetrap.bind(['g h'], function(e) {
542 542 window.location = pyroutes.url('home');
543 543 });
544 544 Mousetrap.bind(['g g'], function(e) {
545 545 window.location = pyroutes.url('gists', {'private':1});
546 546 });
547 547 Mousetrap.bind(['g G'], function(e) {
548 548 window.location = pyroutes.url('gists', {'public':1});
549 549 });
550 550 Mousetrap.bind(['n g'], function(e) {
551 551 window.location = pyroutes.url('new_gist');
552 552 });
553 553 Mousetrap.bind(['n r'], function(e) {
554 554 window.location = pyroutes.url('new_repo');
555 555 });
556 556
557 557 % if hasattr(c, 'repo_name') and hasattr(c, 'rhodecode_db_repo'):
558 558 // nav in repo context
559 559 Mousetrap.bind(['g s'], function(e) {
560 560 window.location = pyroutes.url('summary_home', {'repo_name': REPO_NAME});
561 561 });
562 562 Mousetrap.bind(['g c'], function(e) {
563 563 window.location = pyroutes.url('changelog_home', {'repo_name': REPO_NAME});
564 564 });
565 565 Mousetrap.bind(['g F'], function(e) {
566 566 window.location = pyroutes.url('files_home', {'repo_name': REPO_NAME, 'revision': '${c.rhodecode_db_repo.landing_rev[1]}', 'f_path': '', 'search': '1'});
567 567 });
568 568 Mousetrap.bind(['g f'], function(e) {
569 569 window.location = pyroutes.url('files_home', {'repo_name': REPO_NAME, 'revision': '${c.rhodecode_db_repo.landing_rev[1]}', 'f_path': ''});
570 570 });
571 571 Mousetrap.bind(['g p'], function(e) {
572 572 window.location = pyroutes.url('pullrequest_show_all', {'repo_name': REPO_NAME});
573 573 });
574 574 Mousetrap.bind(['g o'], function(e) {
575 575 window.location = pyroutes.url('edit_repo', {'repo_name': REPO_NAME});
576 576 });
577 577 Mousetrap.bind(['g O'], function(e) {
578 578 window.location = pyroutes.url('edit_repo_perms', {'repo_name': REPO_NAME});
579 579 });
580 580 % endif
581 581
582 582 </script>
583 583 <script src="${h.url('/js/rhodecode/base/keyboard-bindings.js', ver=c.rhodecode_version_hash)}"></script>
584 584 </%def>
585 585
586 586 <div class="modal" id="help_kb" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
587 587 <div class="modal-dialog">
588 588 <div class="modal-content">
589 589 <div class="modal-header">
590 590 <button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button>
591 591 <h4 class="modal-title" id="myModalLabel">${_('Keyboard shortcuts')}</h4>
592 592 </div>
593 593 <div class="modal-body">
594 594 <div class="block-left">
595 595 <table class="keyboard-mappings">
596 596 <tbody>
597 597 <tr>
598 598 <th></th>
599 599 <th>${_('Site-wide shortcuts')}</th>
600 600 </tr>
601 601 <%
602 602 elems = [
603 603 ('/', 'Open quick search box'),
604 604 ('g h', 'Goto home page'),
605 605 ('g g', 'Goto my private gists page'),
606 606 ('g G', 'Goto my public gists page'),
607 607 ('n r', 'New repository page'),
608 608 ('n g', 'New gist page'),
609 609 ]
610 610 %>
611 611 %for key, desc in elems:
612 612 <tr>
613 613 <td class="keys">
614 614 <span class="key tag">${key}</span>
615 615 </td>
616 616 <td>${desc}</td>
617 617 </tr>
618 618 %endfor
619 619 </tbody>
620 620 </table>
621 621 </div>
622 622 <div class="block-left">
623 623 <table class="keyboard-mappings">
624 624 <tbody>
625 625 <tr>
626 626 <th></th>
627 627 <th>${_('Repositories')}</th>
628 628 </tr>
629 629 <%
630 630 elems = [
631 631 ('g s', 'Goto summary page'),
632 632 ('g c', 'Goto changelog page'),
633 633 ('g f', 'Goto files page'),
634 634 ('g F', 'Goto files page with file search activated'),
635 635 ('g p', 'Goto pull requests page'),
636 636 ('g o', 'Goto repository settings'),
637 637 ('g O', 'Goto repository permissions settings'),
638 638 ]
639 639 %>
640 640 %for key, desc in elems:
641 641 <tr>
642 642 <td class="keys">
643 643 <span class="key tag">${key}</span>
644 644 </td>
645 645 <td>${desc}</td>
646 646 </tr>
647 647 %endfor
648 648 </tbody>
649 649 </table>
650 650 </div>
651 651 </div>
652 652 <div class="modal-footer">
653 653 </div>
654 654 </div><!-- /.modal-content -->
655 655 </div><!-- /.modal-dialog -->
656 656 </div><!-- /.modal -->
@@ -1,116 +1,116 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests.events.conftest import EventCatcher
24 24
25 25 from rhodecode.lib import hooks_base, utils2
26 26 from rhodecode.model.repo import RepoModel
27 27 from rhodecode.events.repo import (
28 28 RepoPrePullEvent, RepoPullEvent,
29 29 RepoPrePushEvent, RepoPushEvent,
30 RepoPreCreateEvent, RepoCreatedEvent,
31 RepoPreDeleteEvent, RepoDeletedEvent,
30 RepoPreCreateEvent, RepoCreateEvent,
31 RepoPreDeleteEvent, RepoDeleteEvent,
32 32 )
33 33
34 34
35 35 @pytest.fixture
36 36 def scm_extras(user_regular, repo_stub):
37 37 extras = utils2.AttributeDict({
38 38 'ip': '127.0.0.1',
39 39 'username': user_regular.username,
40 40 'action': '',
41 41 'repository': repo_stub.repo_name,
42 42 'scm': repo_stub.scm_instance().alias,
43 43 'config': '',
44 44 'server_url': 'http://example.com',
45 45 'make_lock': None,
46 46 'locked_by': [None],
47 47 'commit_ids': ['a' * 40] * 3,
48 48 })
49 49 return extras
50 50
51 51
52 52 # TODO: dan: make the serialization tests complete json comparisons
53 53 @pytest.mark.parametrize('EventClass', [
54 RepoPreCreateEvent, RepoCreatedEvent,
55 RepoPreDeleteEvent, RepoDeletedEvent,
54 RepoPreCreateEvent, RepoCreateEvent,
55 RepoPreDeleteEvent, RepoDeleteEvent,
56 56 ])
57 57 def test_repo_events_serialized(repo_stub, EventClass):
58 58 event = EventClass(repo_stub)
59 59 data = event.as_dict()
60 60 assert data['name'] == EventClass.name
61 61 assert data['repo']['repo_name'] == repo_stub.repo_name
62 62 assert data['repo']['url']
63 63
64 64
65 65 @pytest.mark.parametrize('EventClass', [
66 66 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
67 67 ])
68 68 def test_vcs_repo_events_serialize(repo_stub, scm_extras, EventClass):
69 69 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
70 70 data = event.as_dict()
71 71 assert data['name'] == EventClass.name
72 72 assert data['repo']['repo_name'] == repo_stub.repo_name
73 73 assert data['repo']['url']
74 74
75 75
76 76
77 77 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
78 78 def test_vcs_repo_push_event_serialize(repo_stub, scm_extras, EventClass):
79 79 event = EventClass(repo_name=repo_stub.repo_name,
80 80 pushed_commit_ids=scm_extras['commit_ids'],
81 81 extras=scm_extras)
82 82 data = event.as_dict()
83 83 assert data['name'] == EventClass.name
84 84 assert data['repo']['repo_name'] == repo_stub.repo_name
85 85 assert data['repo']['url']
86 86
87 87
88 88 def test_create_delete_repo_fires_events(backend):
89 89 with EventCatcher() as event_catcher:
90 90 repo = backend.create_repo()
91 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreatedEvent]
91 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
92 92
93 93 with EventCatcher() as event_catcher:
94 94 RepoModel().delete(repo)
95 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeletedEvent]
95 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
96 96
97 97
98 98 def test_pull_fires_events(scm_extras):
99 99 with EventCatcher() as event_catcher:
100 100 hooks_base.pre_push(scm_extras)
101 101 assert event_catcher.events_types == [RepoPrePushEvent]
102 102
103 103 with EventCatcher() as event_catcher:
104 104 hooks_base.post_push(scm_extras)
105 105 assert event_catcher.events_types == [RepoPushEvent]
106 106
107 107
108 108 def test_push_fires_events(scm_extras):
109 109 with EventCatcher() as event_catcher:
110 110 hooks_base.pre_pull(scm_extras)
111 111 assert event_catcher.events_types == [RepoPrePullEvent]
112 112
113 113 with EventCatcher() as event_catcher:
114 114 hooks_base.post_pull(scm_extras)
115 115 assert event_catcher.events_types == [RepoPullEvent]
116 116
@@ -1,210 +1,244 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import copy
22 22 import mock
23 23 import pytest
24 24
25 25 from pylons.util import ContextObj
26 26
27 27 from rhodecode.lib import helpers
28 28 from rhodecode.lib.utils2 import AttributeDict
29 29 from rhodecode.model.settings import IssueTrackerSettingsModel
30 30
31 31
32 32 @pytest.mark.parametrize('url, expected_url', [
33 33 ('http://rc.rc/test', '<a href="http://rc.rc/test">http://rc.rc/test</a>'),
34 34 ('http://rc.rc/@foo', '<a href="http://rc.rc/@foo">http://rc.rc/@foo</a>'),
35 35 ('http://rc.rc/!foo', '<a href="http://rc.rc/!foo">http://rc.rc/!foo</a>'),
36 36 ('http://rc.rc/&foo', '<a href="http://rc.rc/&foo">http://rc.rc/&foo</a>'),
37 37 ('http://rc.rc/#foo', '<a href="http://rc.rc/#foo">http://rc.rc/#foo</a>'),
38 38 ])
39 39 def test_urlify_text(url, expected_url):
40 40 assert helpers.urlify_text(url) == expected_url
41 41
42 42
43 43 @pytest.mark.parametrize('repo_name, commit_id, path, expected_result', [
44 44 ('rX<X', 'cX<X', 'pX<X/aX<X/bX<X',
45 45 '<a class="pjax-link" href="/rX%3CX/files/cX%3CX/">rX&lt;X</a>/'
46 46 '<a class="pjax-link" href="/rX%3CX/files/cX%3CX/pX%3CX">pX&lt;X</a>/'
47 47 '<a class="pjax-link" href="/rX%3CX/files/cX%3CX/pX%3CX/aX%3CX">aX&lt;X'
48 48 '</a>/bX&lt;X'),
49 49 # Path with only one segment
50 50 ('rX<X', 'cX<X', 'pX<X',
51 51 '<a class="pjax-link" href="/rX%3CX/files/cX%3CX/">rX&lt;X</a>/pX&lt;X'),
52 52 # Empty path
53 53 ('rX<X', 'cX<X', '', 'rX&lt;X'),
54 54 ('rX"X', 'cX"X', 'pX"X/aX"X/bX"X',
55 55 '<a class="pjax-link" href="/rX%22X/files/cX%22X/">rX&#34;X</a>/'
56 56 '<a class="pjax-link" href="/rX%22X/files/cX%22X/pX%22X">pX&#34;X</a>/'
57 57 '<a class="pjax-link" href="/rX%22X/files/cX%22X/pX%22X/aX%22X">aX&#34;X'
58 58 '</a>/bX&#34;X'),
59 59 ], ids=['simple', 'one_segment', 'empty_path', 'simple_quote'])
60 60 def test_files_breadcrumbs_xss(
61 61 repo_name, commit_id, path, pylonsapp, expected_result):
62 62 result = helpers.files_breadcrumbs(repo_name, commit_id, path)
63 63 # Expect it to encode all path fragments properly. This is important
64 64 # because it returns an instance of `literal`.
65 65 assert result == expected_result
66 66
67 67
68 68 def test_format_binary():
69 69 assert helpers.format_byte_size_binary(298489462784) == '278.0 GiB'
70 70
71 71
72 @pytest.mark.parametrize('text_string, pattern, expected', [
73 ('No issue here', '(?:#)(?P<issue_id>\d+)', []),
74 ('Fix #42', '(?:#)(?P<issue_id>\d+)',
75 [{'url': 'http://r.io/{repo}/i/42', 'id': '42'}]),
76 ('Fix #42, #53', '(?:#)(?P<issue_id>\d+)', [
77 {'url': 'http://r.io/{repo}/i/42', 'id': '42'},
78 {'url': 'http://r.io/{repo}/i/53', 'id': '53'}]),
79 ('Fix #42', '(?:#)?<issue_id>\d+)', []), # Broken regex
80 ])
81 def test_extract_issues(backend, text_string, pattern, expected):
82 repo = backend.create_repo()
83 config = {
84 '123': {
85 'uid': '123',
86 'pat': pattern,
87 'url': 'http://r.io/${repo}/i/${issue_id}',
88 'pref': '#',
89 }
90 }
91
92 def get_settings_mock(self, cache=True):
93 return config
94
95 with mock.patch.object(IssueTrackerSettingsModel,
96 'get_settings', get_settings_mock):
97 text, issues = helpers.process_patterns(text_string, repo.repo_name)
98
99 expected = copy.deepcopy(expected)
100 for item in expected:
101 item['url'] = item['url'].format(repo=repo.repo_name)
102
103 assert issues == expected
104
105
72 106 @pytest.mark.parametrize('text_string, pattern, expected_text', [
73 107 ('Fix #42', '(?:#)(?P<issue_id>\d+)',
74 108 'Fix <a class="issue-tracker-link" href="http://r.io/{repo}/i/42">#42</a>'
75 109 ),
76 110 ('Fix #42', '(?:#)?<issue_id>\d+)', 'Fix #42'), # Broken regex
77 111 ])
78 112 def test_process_patterns_repo(backend, text_string, pattern, expected_text):
79 113 repo = backend.create_repo()
80 114 config = {'123': {
81 115 'uid': '123',
82 116 'pat': pattern,
83 117 'url': 'http://r.io/${repo}/i/${issue_id}',
84 118 'pref': '#',
85 119 }
86 120 }
87 121
88 122 def get_settings_mock(self, cache=True):
89 123 return config
90 124
91 125 with mock.patch.object(IssueTrackerSettingsModel,
92 126 'get_settings', get_settings_mock):
93 processed_text = helpers.process_patterns(
127 processed_text, issues = helpers.process_patterns(
94 128 text_string, repo.repo_name, config)
95 129
96 130 assert processed_text == expected_text.format(repo=repo.repo_name)
97 131
98 132
99 133 @pytest.mark.parametrize('text_string, pattern, expected_text', [
100 134 ('Fix #42', '(?:#)(?P<issue_id>\d+)',
101 135 'Fix <a class="issue-tracker-link" href="http://r.io/i/42">#42</a>'
102 136 ),
103 137 ('Fix #42', '(?:#)?<issue_id>\d+)', 'Fix #42'), # Broken regex
104 138 ])
105 139 def test_process_patterns_no_repo(text_string, pattern, expected_text):
106 140 config = {'123': {
107 141 'uid': '123',
108 142 'pat': pattern,
109 143 'url': 'http://r.io/i/${issue_id}',
110 144 'pref': '#',
111 145 }
112 146 }
113 147
114 148 def get_settings_mock(self, cache=True):
115 149 return config
116 150
117 151 with mock.patch.object(IssueTrackerSettingsModel,
118 152 'get_global_settings', get_settings_mock):
119 processed_text = helpers.process_patterns(
153 processed_text, issues = helpers.process_patterns(
120 154 text_string, '', config)
121 155
122 156 assert processed_text == expected_text
123 157
124 158
125 159 def test_process_patterns_non_existent_repo_name(backend):
126 160 text_string = 'Fix #42'
127 161 pattern = '(?:#)(?P<issue_id>\d+)'
128 162 expected_text = ('Fix <a class="issue-tracker-link" '
129 163 'href="http://r.io/do-not-exist/i/42">#42</a>')
130 164 config = {'123': {
131 165 'uid': '123',
132 166 'pat': pattern,
133 167 'url': 'http://r.io/${repo}/i/${issue_id}',
134 168 'pref': '#',
135 169 }
136 170 }
137 171
138 172 def get_settings_mock(self, cache=True):
139 173 return config
140 174
141 175 with mock.patch.object(IssueTrackerSettingsModel,
142 176 'get_global_settings', get_settings_mock):
143 processed_text = helpers.process_patterns(
177 processed_text, issues = helpers.process_patterns(
144 178 text_string, 'do-not-exist', config)
145 179
146 180 assert processed_text == expected_text
147 181
148 182
149 183 def test_get_visual_attr(pylonsapp):
150 184 c = ContextObj()
151 185 assert None is helpers.get_visual_attr(c, 'fakse')
152 186
153 187 # emulate the c.visual behaviour
154 188 c.visual = AttributeDict({})
155 189 assert None is helpers.get_visual_attr(c, 'some_var')
156 190
157 191 c.visual.some_var = 'foobar'
158 192 assert 'foobar' == helpers.get_visual_attr(c, 'some_var')
159 193
160 194
161 195 @pytest.mark.parametrize('test_text, inclusive, expected_text', [
162 196 ('just a string', False, 'just a string'),
163 197 ('just a string\n', False, 'just a string'),
164 198 ('just a string\n next line', False, 'just a string...'),
165 199 ('just a string\n next line', True, 'just a string\n...'),
166 200 ])
167 201 def test_chop_at(test_text, inclusive, expected_text):
168 202 assert helpers.chop_at_smart(
169 203 test_text, '\n', inclusive, '...') == expected_text
170 204
171 205
172 206 @pytest.mark.parametrize('test_text, expected_output', [
173 207 ('some text', ['some', 'text']),
174 208 ('some text', ['some', 'text']),
175 209 ('some text "with a phrase"', ['some', 'text', 'with a phrase']),
176 210 ('"a phrase" "another phrase"', ['a phrase', 'another phrase']),
177 211 ('"justphrase"', ['justphrase']),
178 212 ('""', []),
179 213 ('', []),
180 214 (' ', []),
181 215 ('" "', []),
182 216 ])
183 217 def test_extract_phrases(test_text, expected_output):
184 218 assert helpers.extract_phrases(test_text) == expected_output
185 219
186 220
187 221 @pytest.mark.parametrize('test_text, text_phrases, expected_output', [
188 222 ('some text here', ['some', 'here'], [(0, 4), (10, 14)]),
189 223 ('here here there', ['here'], [(0, 4), (5, 9), (11, 15)]),
190 224 ('irrelevant', ['not found'], []),
191 225 ('irrelevant', ['not found'], []),
192 226 ])
193 227 def test_get_matching_offsets(test_text, text_phrases, expected_output):
194 228 assert helpers.get_matching_offsets(
195 229 test_text, text_phrases) == expected_output
196 230
197 231
198 232 def test_normalize_text_for_matching():
199 233 assert helpers.normalize_text_for_matching(
200 234 'OJjfe)*#$*@)$JF*)3r2f80h') == 'ojjfe jf 3r2f80h'
201 235
202 236
203 237 def test_get_matching_line_offsets():
204 238 assert helpers.get_matching_line_offsets([
205 239 'words words words',
206 240 'words words words',
207 241 'some text some',
208 242 'words words words',
209 243 'words words words',
210 244 'text here what'], 'text') == {3: [(5, 9)], 6: [(0, 4)]}
@@ -1,22 +1,35 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from pyramid.i18n import TranslationStringFactory
20 20
21 21 # Create a translation string factory for the 'rhodecode' domain.
22 22 _ = TranslationStringFactory('rhodecode')
23
24 class LazyString(object):
25 def __init__(self, *args, **kw):
26 self.args = args
27 self.kw = kw
28
29 def __str__(self):
30 return _(*self.args, **self.kw)
31
32
33 def lazy_ugettext(*args, **kw):
34 """ Lazily evaluated version of _() """
35 return LazyString(*args, **kw)
@@ -1,250 +1,249 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Import early to make sure things are patched up properly
4 4 from setuptools import setup, find_packages
5 5
6 6 import os
7 7 import sys
8 8 import platform
9 9
10 10 if sys.version_info < (2, 7):
11 11 raise Exception('RhodeCode requires Python 2.7 or later')
12 12
13 13
14 14 here = os.path.abspath(os.path.dirname(__file__))
15 15
16 16
17 17 def _get_meta_var(name, data, callback_handler=None):
18 18 import re
19 19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
20 20 if matches:
21 21 if not callable(callback_handler):
22 22 callback_handler = lambda v: v
23 23
24 24 return callback_handler(eval(matches.groups()[0]))
25 25
26 26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
27 27 _metadata = _meta.read()
28 28 _meta.close()
29 29
30 30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
31 31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
32 32 __license__ = _get_meta_var('__license__', _metadata)
33 33 __author__ = _get_meta_var('__author__', _metadata)
34 34 __url__ = _get_meta_var('__url__', _metadata)
35 35 # defines current platform
36 36 __platform__ = platform.system()
37 37
38 38 # Cygwin has different platform identifiers, but they all contain the
39 39 # term "CYGWIN"
40 40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
41 41
42 42 requirements = [
43 43 'Babel',
44 44 'Beaker',
45 45 'FormEncode',
46 46 'Mako',
47 47 'Markdown',
48 48 'MarkupSafe',
49 49 'MySQL-python',
50 50 'Paste',
51 51 'PasteDeploy',
52 52 'PasteScript',
53 53 'Pygments',
54 54 'Pylons',
55 55 'Pyro4',
56 56 'Routes',
57 57 'SQLAlchemy',
58 58 'Tempita',
59 59 'URLObject',
60 60 'WebError',
61 61 'WebHelpers',
62 62 'WebHelpers2',
63 63 'WebOb',
64 64 'WebTest',
65 65 'Whoosh',
66 66 'alembic',
67 67 'amqplib',
68 68 'anyjson',
69 69 'appenlight-client',
70 70 'authomatic',
71 71 'backport_ipaddress',
72 72 'celery',
73 73 'colander',
74 74 'decorator',
75 75 'docutils',
76 76 'gunicorn',
77 77 'infrae.cache',
78 78 'ipython',
79 79 'iso8601',
80 80 'kombu',
81 'marshmallow',
82 81 'msgpack-python',
83 82 'packaging',
84 83 'psycopg2',
85 84 'py-gfm',
86 85 'pycrypto',
87 86 'pycurl',
88 87 'pyparsing',
89 88 'pyramid',
90 89 'pyramid-debugtoolbar',
91 90 'pyramid-mako',
92 91 'pyramid-beaker',
93 92 'pysqlite',
94 93 'python-dateutil',
95 94 'python-ldap',
96 95 'python-memcached',
97 96 'python-pam',
98 97 'recaptcha-client',
99 98 'repoze.lru',
100 99 'requests',
101 100 'simplejson',
102 101 'waitress',
103 102 'zope.cachedescriptors',
104 103 'dogpile.cache',
105 104 'dogpile.core'
106 105 ]
107 106
108 107 if is_windows:
109 108 pass
110 109 else:
111 110 requirements.append('psutil')
112 111 requirements.append('py-bcrypt')
113 112
114 113 test_requirements = [
115 114 'WebTest',
116 115 'configobj',
117 116 'cssselect',
118 117 'flake8',
119 118 'lxml',
120 119 'mock',
121 120 'pytest',
122 121 'pytest-cov',
123 122 'pytest-runner',
124 123 ]
125 124
126 125 setup_requirements = [
127 126 'PasteScript',
128 127 'pytest-runner',
129 128 ]
130 129
131 130 dependency_links = [
132 131 ]
133 132
134 133 classifiers = [
135 134 'Development Status :: 6 - Mature',
136 135 'Environment :: Web Environment',
137 136 'Framework :: Pylons',
138 137 'Intended Audience :: Developers',
139 138 'Operating System :: OS Independent',
140 139 'Programming Language :: Python',
141 140 'Programming Language :: Python :: 2.7',
142 141 ]
143 142
144 143
145 144 # additional files from project that goes somewhere in the filesystem
146 145 # relative to sys.prefix
147 146 data_files = []
148 147
149 148 # additional files that goes into package itself
150 149 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
151 150
152 151 description = ('RhodeCode is a fast and powerful management tool '
153 152 'for Mercurial and GIT with a built in push/pull server, '
154 153 'full text search and code-review.')
155 154
156 155 keywords = ' '.join([
157 156 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
158 157 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
159 158 'hgwebdir', 'gitweb replacement', 'serving hgweb',
160 159 ])
161 160
162 161 # long description
163 162 README_FILE = 'README.rst'
164 163 CHANGELOG_FILE = 'CHANGES.rst'
165 164 try:
166 165 long_description = open(README_FILE).read() + '\n\n' + \
167 166 open(CHANGELOG_FILE).read()
168 167
169 168 except IOError, err:
170 169 sys.stderr.write(
171 170 '[WARNING] Cannot find file specified as long_description (%s)\n or '
172 171 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
173 172 )
174 173 long_description = description
175 174
176 175 # packages
177 176 packages = find_packages()
178 177
179 178 paster_commands = [
180 179 'make-config=rhodecode.lib.paster_commands.make_config:Command',
181 180 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
182 181 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
183 182 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
184 183 'ishell=rhodecode.lib.paster_commands.ishell:Command',
185 184 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
186 185 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
187 186 ]
188 187
189 188 setup(
190 189 name='rhodecode-enterprise-ce',
191 190 version=__version__,
192 191 description=description,
193 192 long_description=long_description,
194 193 keywords=keywords,
195 194 license=__license__,
196 195 author=__author__,
197 196 author_email='marcin@rhodecode.com',
198 197 dependency_links=dependency_links,
199 198 url=__url__,
200 199 install_requires=requirements,
201 200 tests_require=test_requirements,
202 201 classifiers=classifiers,
203 202 setup_requires=setup_requirements,
204 203 data_files=data_files,
205 204 packages=packages,
206 205 include_package_data=True,
207 206 package_data=package_data,
208 207 message_extractors={
209 208 'rhodecode': [
210 209 ('**.py', 'python', None),
211 210 ('**.js', 'javascript', None),
212 211 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
213 212 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
214 213 ('public/**', 'ignore', None),
215 214 ]
216 215 },
217 216 zip_safe=False,
218 217 paster_plugins=['PasteScript', 'Pylons'],
219 218 entry_points={
220 219 'enterprise.plugins1': [
221 220 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
222 221 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
223 222 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
224 223 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
225 224 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
226 225 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
227 226 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
228 227 ],
229 228 'paste.app_factory': [
230 229 'main=rhodecode.config.middleware:make_pyramid_app',
231 230 'pylons=rhodecode.config.middleware:make_app',
232 231 ],
233 232 'paste.app_install': [
234 233 'main=pylons.util:PylonsInstaller',
235 234 'pylons=pylons.util:PylonsInstaller',
236 235 ],
237 236 'paste.global_paster_command': paster_commands,
238 237 'pytest11': [
239 238 'pylons=rhodecode.tests.pylons_plugin',
240 239 'enterprise=rhodecode.tests.plugin',
241 240 ],
242 241 'console_scripts': [
243 242 'rcserver=rhodecode.rcserver:main',
244 243 ],
245 244 'beaker.backends': [
246 245 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
247 246 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
248 247 ]
249 248 },
250 249 )
General Comments 0
You need to be logged in to leave comments. Login now