##// END OF EJS Templates
integrations: add integration support...
dan -
r411:df8dc98d default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,52 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22 from rhodecode.integrations.registry import IntegrationTypeRegistry
23 from rhodecode.integrations.types import slack
24
25 log = logging.getLogger(__name__)
26
27
28 # TODO: dan: This is currently global until we figure out what to do about
29 # VCS's not having a pyramid context - move it to pyramid app configuration
30 # includeme level later to allow per instance integration setup
31 integration_type_registry = IntegrationTypeRegistry()
32 integration_type_registry.register_integration_type(slack.SlackIntegrationType)
33
34 def integrations_event_handler(event):
35 """
36 Takes an event and passes it to all enabled integrations
37 """
38 from rhodecode.model.integration import IntegrationModel
39
40 integration_model = IntegrationModel()
41 integrations = integration_model.get_for_event(event)
42 for integration in integrations:
43 try:
44 integration_model.send_event(integration, event)
45 except Exception:
46 log.exception(
47 'failure occured when sending event %s to integration %s' % (
48 event, integration))
49
50
51 def includeme(config):
52 config.include('rhodecode.integrations.routes')
@@ -0,0 +1,37 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2012-2016 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 import logging
21
22 log = logging.getLogger()
23
24
25 class IntegrationTypeRegistry(dict):
26 """
27 Registry Class to hold IntegrationTypes
28 """
29 def register_integration_type(self, IntegrationType):
30 key = IntegrationType.key
31 if key in self:
32 log.warning(
33 'Overriding existing integration type %s (%s) with %s' % (
34 self[key], key, IntegrationType))
35
36 self[key] = IntegrationType
37
@@ -0,0 +1,133 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22
23 from rhodecode.model.db import Repository, Integration
24 from rhodecode.config.routing import (
25 ADMIN_PREFIX, add_route_requirements, URL_NAME_REQUIREMENTS)
26 from rhodecode.integrations import integration_type_registry
27
28 log = logging.getLogger(__name__)
29
30
31 def includeme(config):
32 config.add_route('global_integrations_home',
33 ADMIN_PREFIX + '/integrations')
34 config.add_route('global_integrations_list',
35 ADMIN_PREFIX + '/integrations/{integration}')
36 for route_name in ['global_integrations_home', 'global_integrations_list']:
37 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
38 attr='index',
39 renderer='rhodecode:templates/admin/integrations/list.html',
40 request_method='GET',
41 route_name=route_name)
42
43 config.add_route('global_integrations_create',
44 ADMIN_PREFIX + '/integrations/{integration}/new',
45 custom_predicates=(valid_integration,))
46 config.add_route('global_integrations_edit',
47 ADMIN_PREFIX + '/integrations/{integration}/{integration_id}',
48 custom_predicates=(valid_integration,))
49 for route_name in ['global_integrations_create', 'global_integrations_edit']:
50 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
51 attr='settings_get',
52 renderer='rhodecode:templates/admin/integrations/edit.html',
53 request_method='GET',
54 route_name=route_name)
55 config.add_view('rhodecode.integrations.views.GlobalIntegrationsView',
56 attr='settings_post',
57 renderer='rhodecode:templates/admin/integrations/edit.html',
58 request_method='POST',
59 route_name=route_name)
60
61 config.add_route('repo_integrations_home',
62 add_route_requirements(
63 '{repo_name}/settings/integrations',
64 URL_NAME_REQUIREMENTS
65 ),
66 custom_predicates=(valid_repo,))
67 config.add_route('repo_integrations_list',
68 add_route_requirements(
69 '{repo_name}/settings/integrations/{integration}',
70 URL_NAME_REQUIREMENTS
71 ),
72 custom_predicates=(valid_repo, valid_integration))
73 for route_name in ['repo_integrations_home', 'repo_integrations_list']:
74 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
75 attr='index',
76 request_method='GET',
77 route_name=route_name)
78
79 config.add_route('repo_integrations_create',
80 add_route_requirements(
81 '{repo_name}/settings/integrations/{integration}/new',
82 URL_NAME_REQUIREMENTS
83 ),
84 custom_predicates=(valid_repo, valid_integration))
85 config.add_route('repo_integrations_edit',
86 add_route_requirements(
87 '{repo_name}/settings/integrations/{integration}/{integration_id}',
88 URL_NAME_REQUIREMENTS
89 ),
90 custom_predicates=(valid_repo, valid_integration))
91 for route_name in ['repo_integrations_edit', 'repo_integrations_create']:
92 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
93 attr='settings_get',
94 renderer='rhodecode:templates/admin/integrations/edit.html',
95 request_method='GET',
96 route_name=route_name)
97 config.add_view('rhodecode.integrations.views.RepoIntegrationsView',
98 attr='settings_post',
99 renderer='rhodecode:templates/admin/integrations/edit.html',
100 request_method='POST',
101 route_name=route_name)
102
103
104 def valid_repo(info, request):
105 repo = Repository.get_by_repo_name(info['match']['repo_name'])
106 if repo:
107 return True
108
109
110 def valid_integration(info, request):
111 integration_type = info['match']['integration']
112 integration_id = info['match'].get('integration_id')
113 repo_name = info['match'].get('repo_name')
114
115 if integration_type not in integration_type_registry:
116 return False
117
118 repo = None
119 if repo_name:
120 repo = Repository.get_by_repo_name(info['match']['repo_name'])
121 if not repo:
122 return False
123
124 if integration_id:
125 integration = Integration.get(integration_id)
126 if not integration:
127 return False
128 if integration.integration_type != integration_type:
129 return False
130 if repo and repo.repo_id != integration.repo_id:
131 return False
132
133 return True
@@ -0,0 +1,48 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22
23 from rhodecode.translation import lazy_ugettext
24
25
26 class IntegrationSettingsSchemaBase(colander.MappingSchema):
27 """
28 This base schema is intended for use in integrations.
29 It adds a few default settings (e.g., "enabled"), so that integration
30 authors don't have to maintain a bunch of boilerplate.
31 """
32 enabled = colander.SchemaNode(
33 colander.Bool(),
34 default=True,
35 description=lazy_ugettext('Enable or disable this integration.'),
36 missing=False,
37 title=lazy_ugettext('Enabled'),
38 widget='bool',
39 )
40
41 name = colander.SchemaNode(
42 colander.String(),
43 description=lazy_ugettext('Short name for this integration.'),
44 missing=colander.required,
45 title=lazy_ugettext('Integration name'),
46 widget='string',
47 )
48
@@ -0,0 +1,19 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,43 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 from rhodecode.integrations.schema import IntegrationSettingsSchemaBase
22
23
24 class IntegrationTypeBase(object):
25 """ Base class for IntegrationType plugins """
26
27 def __init__(self, settings):
28 """
29 :param settings: dict of settings to be used for the integration
30 """
31 self.settings = settings
32
33
34 @classmethod
35 def settings_schema(cls):
36 """
37 A colander schema of settings for the integration type
38
39 Subclasses can return their own schema but should always
40 inherit from IntegrationSettingsSchemaBase
41 """
42 return IntegrationSettingsSchemaBase()
43
@@ -0,0 +1,199 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 from __future__ import unicode_literals
22
23 import re
24 import logging
25 import requests
26 import colander
27 from celery.task import task
28 from mako.template import Template
29
30 from rhodecode import events
31 from rhodecode.translation import lazy_ugettext
32 from rhodecode.lib import helpers as h
33 from rhodecode.lib.celerylib import run_task
34 from rhodecode.lib.colander_utils import strip_whitespace
35 from rhodecode.integrations.types.base import IntegrationTypeBase
36 from rhodecode.integrations.schema import IntegrationSettingsSchemaBase
37
38 log = logging.getLogger()
39
40
41 class SlackSettingsSchema(IntegrationSettingsSchemaBase):
42 service = colander.SchemaNode(
43 colander.String(),
44 title=lazy_ugettext('Slack service URL'),
45 description=h.literal(lazy_ugettext(
46 'This can be setup at the '
47 '<a href="https://my.slack.com/services/new/incoming-webhook/">'
48 'slack app manager</a>')),
49 default='',
50 placeholder='https://hooks.slack.com/services/...',
51 preparer=strip_whitespace,
52 validator=colander.url,
53 widget='string'
54 )
55 username = colander.SchemaNode(
56 colander.String(),
57 title=lazy_ugettext('Username'),
58 description=lazy_ugettext('Username to show notifications coming from.'),
59 missing='Rhodecode',
60 preparer=strip_whitespace,
61 widget='string',
62 placeholder='Rhodecode'
63 )
64 channel = colander.SchemaNode(
65 colander.String(),
66 title=lazy_ugettext('Channel'),
67 description=lazy_ugettext('Channel to send notifications to.'),
68 missing='',
69 preparer=strip_whitespace,
70 widget='string',
71 placeholder='#general'
72 )
73 icon_emoji = colander.SchemaNode(
74 colander.String(),
75 title=lazy_ugettext('Emoji'),
76 description=lazy_ugettext('Emoji to use eg. :studio_microphone:'),
77 missing='',
78 preparer=strip_whitespace,
79 widget='string',
80 placeholder=':studio_microphone:'
81 )
82
83
84 repo_push_template = Template(r'''
85 *${data['actor']['username']}* pushed to \
86 %if data['push']['branches']:
87 ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} \
88 ${', '.join('<%s|%s>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} \
89 %else:
90 unknown branch \
91 %endif
92 in <${data['repo']['url']}|${data['repo']['repo_name']}>
93 >>>
94 %for commit in data['push']['commits']:
95 <${commit['url']}|${commit['short_id']}> - ${commit['message_html']|html_to_slack_links}
96 %endfor
97 ''')
98
99
100 class SlackIntegrationType(IntegrationTypeBase):
101 key = 'slack'
102 display_name = lazy_ugettext('Slack')
103 SettingsSchema = SlackSettingsSchema
104 valid_events = [
105 events.PullRequestCloseEvent,
106 events.PullRequestMergeEvent,
107 events.PullRequestUpdateEvent,
108 events.PullRequestReviewEvent,
109 events.PullRequestCreateEvent,
110 events.RepoPushEvent,
111 events.RepoCreateEvent,
112 ]
113
114 def send_event(self, event):
115 if event.__class__ not in self.valid_events:
116 log.debug('event not valid: %r' % event)
117 return
118
119 if event.name not in self.settings['events']:
120 log.debug('event ignored: %r' % event)
121 return
122
123 data = event.as_dict()
124
125 text = '*%s* caused a *%s* event' % (
126 data['actor']['username'], event.name)
127
128 if isinstance(event, events.PullRequestEvent):
129 text = self.format_pull_request_event(event, data)
130 elif isinstance(event, events.RepoPushEvent):
131 text = self.format_repo_push_event(data)
132 elif isinstance(event, events.RepoCreateEvent):
133 text = self.format_repo_create_event(data)
134 else:
135 log.error('unhandled event type: %r' % event)
136
137 run_task(post_text_to_slack, self.settings, text)
138
139 @classmethod
140 def settings_schema(cls):
141 schema = SlackSettingsSchema()
142 schema.add(colander.SchemaNode(
143 colander.Set(),
144 widget='checkbox_list',
145 choices=sorted([e.name for e in cls.valid_events]),
146 description="Events activated for this integration",
147 default=[e.name for e in cls.valid_events],
148 name='events'
149 ))
150 return schema
151
152 def format_pull_request_event(self, event, data):
153 action = {
154 events.PullRequestCloseEvent: 'closed',
155 events.PullRequestMergeEvent: 'merged',
156 events.PullRequestUpdateEvent: 'updated',
157 events.PullRequestReviewEvent: 'reviewed',
158 events.PullRequestCreateEvent: 'created',
159 }.get(event.__class__, '<unknown action>')
160
161 return ('Pull request <{url}|#{number}> ({title}) '
162 '{action} by {user}').format(
163 user=data['actor']['username'],
164 number=data['pullrequest']['pull_request_id'],
165 url=data['pullrequest']['url'],
166 title=data['pullrequest']['title'],
167 action=action
168 )
169
170 def format_repo_push_event(self, data):
171 result = repo_push_template.render(
172 data=data,
173 html_to_slack_links=html_to_slack_links,
174 )
175 return result
176
177 def format_repo_create_msg(self, data):
178 return '<{}|{}> ({}) repository created by *{}*'.format(
179 data['repo']['url'],
180 data['repo']['repo_name'],
181 data['repo']['repo_type'],
182 data['actor']['username'],
183 )
184
185
186 def html_to_slack_links(message):
187 return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub(
188 r'<\1|\2>', message)
189
190
191 @task(ignore_result=True)
192 def post_text_to_slack(settings, text):
193 resp = requests.post(settings['service'], json={
194 "channel": settings.get('channel', ''),
195 "username": settings.get('username', 'Rhodecode'),
196 "text": text,
197 "icon_emoji": settings.get('icon_emoji', ':studio_microphone:')
198 })
199 resp.raise_for_status() # raise exception on a failed request
@@ -0,0 +1,257 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import colander
22 import logging
23 import pylons
24
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden
26 from pyramid.renderers import render
27 from pyramid.response import Response
28
29 from rhodecode.lib import auth
30 from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
31 from rhodecode.model.db import Repository, Session, Integration
32 from rhodecode.model.scm import ScmModel
33 from rhodecode.model.integration import IntegrationModel
34 from rhodecode.admin.navigation import navigation_list
35 from rhodecode.translation import _
36 from rhodecode.integrations import integration_type_registry
37
38 log = logging.getLogger(__name__)
39
40
41 class IntegrationSettingsViewBase(object):
42 """ Base Integration settings view used by both repo / global settings """
43
44 def __init__(self, context, request):
45 self.context = context
46 self.request = request
47 self._load_general_context()
48
49 if not self.perm_check(request.user):
50 raise HTTPForbidden()
51
52 def _load_general_context(self):
53 """
54 This avoids boilerplate for repo/global+list/edit+views/templates
55 by doing all possible contexts at the same time however it should
56 be split up into separate functions once more "contexts" exist
57 """
58
59 self.IntegrationType = None
60 self.repo = None
61 self.integration = None
62 self.integrations = {}
63
64 request = self.request
65
66 if 'repo_name' in request.matchdict: # we're in a repo context
67 repo_name = request.matchdict['repo_name']
68 self.repo = Repository.get_by_repo_name(repo_name)
69
70 if 'integration' in request.matchdict: # we're in integration context
71 integration_type = request.matchdict['integration']
72 self.IntegrationType = integration_type_registry[integration_type]
73
74 if 'integration_id' in request.matchdict: # single integration context
75 integration_id = request.matchdict['integration_id']
76 self.integration = Integration.get(integration_id)
77 else: # list integrations context
78 for integration in IntegrationModel().get_integrations(self.repo):
79 self.integrations.setdefault(integration.integration_type, []
80 ).append(integration)
81
82 self.settings = self.integration and self.integration.settings or {}
83
84 def _template_c_context(self):
85 # TODO: dan: this is a stopgap in order to inherit from current pylons
86 # based admin/repo settings templates - this should be removed entirely
87 # after port to pyramid
88
89 c = pylons.tmpl_context
90 c.active = 'integrations'
91 c.rhodecode_user = self.request.user
92 c.repo = self.repo
93 c.repo_name = self.repo and self.repo.repo_name or None
94 if self.repo:
95 c.repo_info = self.repo
96 c.rhodecode_db_repo = self.repo
97 c.repository_pull_requests = ScmModel().get_pull_requests(self.repo)
98 else:
99 c.navlist = navigation_list(self.request)
100
101 return c
102
103 def _form_schema(self):
104 return self.IntegrationType.settings_schema()
105
106 def settings_get(self, defaults=None, errors=None):
107 """
108 View that displays the plugin settings as a form.
109 """
110 defaults = defaults or {}
111 errors = errors or {}
112
113 schema = self._form_schema()
114
115 if not defaults:
116 if self.integration:
117 defaults['enabled'] = self.integration.enabled
118 defaults['name'] = self.integration.name
119 else:
120 if self.repo:
121 scope = self.repo.repo_name
122 else:
123 scope = _('Global')
124
125 defaults['name'] = '{} {} integration'.format(scope,
126 self.IntegrationType.display_name)
127 defaults['enabled'] = True
128
129 for node in schema:
130 setting = self.settings.get(node.name)
131 if setting is not None:
132 defaults.setdefault(node.name, setting)
133 else:
134 if node.default:
135 defaults.setdefault(node.name, node.default)
136
137 template_context = {
138 'defaults': defaults,
139 'errors': errors,
140 'schema': schema,
141 'current_IntegrationType': self.IntegrationType,
142 'integration': self.integration,
143 'settings': self.settings,
144 'resource': self.context,
145 'c': self._template_c_context(),
146 }
147
148 return template_context
149
150 @auth.CSRFRequired()
151 def settings_post(self):
152 """
153 View that validates and stores the plugin settings.
154 """
155 if self.request.params.get('delete'):
156 Session().delete(self.integration)
157 Session().commit()
158 self.request.session.flash(
159 _('Integration {integration_name} deleted successfully.').format(
160 integration_name=self.integration.name),
161 queue='success')
162 if self.repo:
163 redirect_to = self.request.route_url(
164 'repo_integrations_home', repo_name=self.repo.repo_name)
165 else:
166 redirect_to = self.request.route_url('global_integrations_home')
167 raise HTTPFound(redirect_to)
168
169 schema = self._form_schema()
170
171 params = {}
172 for node in schema.children:
173 if type(node.typ) in (colander.Set, colander.List):
174 val = self.request.params.getall(node.name)
175 else:
176 val = self.request.params.get(node.name)
177 if val:
178 params[node.name] = val
179
180 try:
181 valid_data = schema.deserialize(params)
182 except colander.Invalid, e:
183 # Display error message and display form again.
184 self.request.session.flash(
185 _('Errors exist when saving plugin settings. '
186 'Please check the form inputs.'),
187 queue='error')
188 return self.settings_get(errors=e.asdict(), defaults=params)
189
190 if not self.integration:
191 self.integration = Integration(
192 integration_type=self.IntegrationType.key)
193 if self.repo:
194 self.integration.repo = self.repo
195 Session.add(self.integration)
196
197 self.integration.enabled = valid_data.pop('enabled', False)
198 self.integration.name = valid_data.pop('name')
199 self.integration.settings = valid_data
200
201 Session.commit()
202
203 # Display success message and redirect.
204 self.request.session.flash(
205 _('Integration {integration_name} updated successfully.').format(
206 integration_name=self.IntegrationType.display_name,
207 queue='success'))
208 if self.repo:
209 redirect_to = self.request.route_url(
210 'repo_integrations_edit', repo_name=self.repo.repo_name,
211 integration=self.integration.integration_type,
212 integration_id=self.integration.integration_id)
213 else:
214 redirect_to = self.request.route_url(
215 'global_integrations_edit',
216 integration=self.integration.integration_type,
217 integration_id=self.integration.integration_id)
218
219 return HTTPFound(redirect_to)
220
221 def index(self):
222 current_integrations = self.integrations
223 if self.IntegrationType:
224 current_integrations = {
225 self.IntegrationType.key: self.integrations.get(
226 self.IntegrationType.key, [])
227 }
228
229 template_context = {
230 'current_IntegrationType': self.IntegrationType,
231 'current_integrations': current_integrations,
232 'current_integration': 'none',
233 'available_integrations': integration_type_registry,
234 'c': self._template_c_context()
235 }
236
237 if self.repo:
238 html = render('rhodecode:templates/admin/integrations/list.html',
239 template_context,
240 request=self.request)
241 else:
242 html = render('rhodecode:templates/admin/integrations/list.html',
243 template_context,
244 request=self.request)
245
246 return Response(html)
247
248
249 class GlobalIntegrationsView(IntegrationSettingsViewBase):
250 def perm_check(self, user):
251 return auth.HasPermissionAll('hg.admin').check_permissions(user=user)
252
253
254 class RepoIntegrationsView(IntegrationSettingsViewBase):
255 def perm_check(self, user):
256 return auth.HasRepoPermissionAll('repository.admin'
257 )(repo_name=self.repo.repo_name, user=user)
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -0,0 +1,27 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 import sqlalchemy as sa
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8
9 from rhodecode.lib.dbmigrate.versions import _reset_base
10
11 log = logging.getLogger(__name__)
12
13
14 def upgrade(migrate_engine):
15 """
16 Upgrade operations go here.
17 Don't create your own engine; bind migrate_engine to your metadata
18 """
19 _reset_base(migrate_engine)
20 from rhodecode.lib.dbmigrate.schema import db_4_3_0_0
21
22 integrations_table = db_4_3_0_0.Integration.__table__
23 integrations_table.create()
24
25
26 def downgrade(migrate_engine):
27 pass
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1654 +1,1641 b''
1 1 {
2 2 Babel = super.buildPythonPackage {
3 3 name = "Babel-1.3";
4 4 buildInputs = with self; [];
5 5 doCheck = false;
6 6 propagatedBuildInputs = with self; [pytz];
7 7 src = fetchurl {
8 8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 10 };
11 11 meta = {
12 12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 13 };
14 14 };
15 15 Beaker = super.buildPythonPackage {
16 16 name = "Beaker-1.7.0";
17 17 buildInputs = with self; [];
18 18 doCheck = false;
19 19 propagatedBuildInputs = with self; [];
20 20 src = fetchurl {
21 21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 22 md5 = "386be3f7fe427358881eee4622b428b3";
23 23 };
24 24 meta = {
25 25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 26 };
27 27 };
28 28 CProfileV = super.buildPythonPackage {
29 29 name = "CProfileV-1.0.6";
30 30 buildInputs = with self; [];
31 31 doCheck = false;
32 32 propagatedBuildInputs = with self; [bottle];
33 33 src = fetchurl {
34 34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 36 };
37 37 meta = {
38 38 license = [ pkgs.lib.licenses.mit ];
39 39 };
40 40 };
41 41 Fabric = super.buildPythonPackage {
42 42 name = "Fabric-1.10.0";
43 43 buildInputs = with self; [];
44 44 doCheck = false;
45 45 propagatedBuildInputs = with self; [paramiko];
46 46 src = fetchurl {
47 47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 FormEncode = super.buildPythonPackage {
55 55 name = "FormEncode-1.2.4";
56 56 buildInputs = with self; [];
57 57 doCheck = false;
58 58 propagatedBuildInputs = with self; [];
59 59 src = fetchurl {
60 60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 62 };
63 63 meta = {
64 64 license = [ pkgs.lib.licenses.psfl ];
65 65 };
66 66 };
67 67 Jinja2 = super.buildPythonPackage {
68 68 name = "Jinja2-2.7.3";
69 69 buildInputs = with self; [];
70 70 doCheck = false;
71 71 propagatedBuildInputs = with self; [MarkupSafe];
72 72 src = fetchurl {
73 73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 75 };
76 76 meta = {
77 77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 78 };
79 79 };
80 80 Mako = super.buildPythonPackage {
81 81 name = "Mako-1.0.1";
82 82 buildInputs = with self; [];
83 83 doCheck = false;
84 84 propagatedBuildInputs = with self; [MarkupSafe];
85 85 src = fetchurl {
86 86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 88 };
89 89 meta = {
90 90 license = [ pkgs.lib.licenses.mit ];
91 91 };
92 92 };
93 93 Markdown = super.buildPythonPackage {
94 94 name = "Markdown-2.6.2";
95 95 buildInputs = with self; [];
96 96 doCheck = false;
97 97 propagatedBuildInputs = with self; [];
98 98 src = fetchurl {
99 99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 101 };
102 102 meta = {
103 103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 104 };
105 105 };
106 106 MarkupSafe = super.buildPythonPackage {
107 107 name = "MarkupSafe-0.23";
108 108 buildInputs = with self; [];
109 109 doCheck = false;
110 110 propagatedBuildInputs = with self; [];
111 111 src = fetchurl {
112 112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 114 };
115 115 meta = {
116 116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 117 };
118 118 };
119 119 MySQL-python = super.buildPythonPackage {
120 120 name = "MySQL-python-1.2.5";
121 121 buildInputs = with self; [];
122 122 doCheck = false;
123 123 propagatedBuildInputs = with self; [];
124 124 src = fetchurl {
125 125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 127 };
128 128 meta = {
129 129 license = [ pkgs.lib.licenses.gpl1 ];
130 130 };
131 131 };
132 132 Paste = super.buildPythonPackage {
133 133 name = "Paste-2.0.2";
134 134 buildInputs = with self; [];
135 135 doCheck = false;
136 136 propagatedBuildInputs = with self; [six];
137 137 src = fetchurl {
138 138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.mit ];
143 143 };
144 144 };
145 145 PasteDeploy = super.buildPythonPackage {
146 146 name = "PasteDeploy-1.5.2";
147 147 buildInputs = with self; [];
148 148 doCheck = false;
149 149 propagatedBuildInputs = with self; [];
150 150 src = fetchurl {
151 151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 152 md5 = "352b7205c78c8de4987578d19431af3b";
153 153 };
154 154 meta = {
155 155 license = [ pkgs.lib.licenses.mit ];
156 156 };
157 157 };
158 158 PasteScript = super.buildPythonPackage {
159 159 name = "PasteScript-1.7.5";
160 160 buildInputs = with self; [];
161 161 doCheck = false;
162 162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 163 src = fetchurl {
164 164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 166 };
167 167 meta = {
168 168 license = [ pkgs.lib.licenses.mit ];
169 169 };
170 170 };
171 171 Pygments = super.buildPythonPackage {
172 172 name = "Pygments-2.1.3";
173 173 buildInputs = with self; [];
174 174 doCheck = false;
175 175 propagatedBuildInputs = with self; [];
176 176 src = fetchurl {
177 177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 Pylons = super.buildPythonPackage {
185 185 name = "Pylons-1.0.1";
186 186 buildInputs = with self; [];
187 187 doCheck = false;
188 188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 189 src = fetchurl {
190 190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 192 };
193 193 meta = {
194 194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 195 };
196 196 };
197 197 Pyro4 = super.buildPythonPackage {
198 198 name = "Pyro4-4.41";
199 199 buildInputs = with self; [];
200 200 doCheck = false;
201 201 propagatedBuildInputs = with self; [serpent];
202 202 src = fetchurl {
203 203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 205 };
206 206 meta = {
207 207 license = [ pkgs.lib.licenses.mit ];
208 208 };
209 209 };
210 210 Routes = super.buildPythonPackage {
211 211 name = "Routes-1.13";
212 212 buildInputs = with self; [];
213 213 doCheck = false;
214 214 propagatedBuildInputs = with self; [repoze.lru];
215 215 src = fetchurl {
216 216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 218 };
219 219 meta = {
220 220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 221 };
222 222 };
223 223 SQLAlchemy = super.buildPythonPackage {
224 224 name = "SQLAlchemy-0.9.9";
225 225 buildInputs = with self; [];
226 226 doCheck = false;
227 227 propagatedBuildInputs = with self; [];
228 228 src = fetchurl {
229 229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 231 };
232 232 meta = {
233 233 license = [ pkgs.lib.licenses.mit ];
234 234 };
235 235 };
236 236 Sphinx = super.buildPythonPackage {
237 237 name = "Sphinx-1.2.2";
238 238 buildInputs = with self; [];
239 239 doCheck = false;
240 240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 241 src = fetchurl {
242 242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 244 };
245 245 meta = {
246 246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 247 };
248 248 };
249 249 Tempita = super.buildPythonPackage {
250 250 name = "Tempita-0.5.2";
251 251 buildInputs = with self; [];
252 252 doCheck = false;
253 253 propagatedBuildInputs = with self; [];
254 254 src = fetchurl {
255 255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 257 };
258 258 meta = {
259 259 license = [ pkgs.lib.licenses.mit ];
260 260 };
261 261 };
262 262 URLObject = super.buildPythonPackage {
263 263 name = "URLObject-2.4.0";
264 264 buildInputs = with self; [];
265 265 doCheck = false;
266 266 propagatedBuildInputs = with self; [];
267 267 src = fetchurl {
268 268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 270 };
271 271 meta = {
272 272 license = [ ];
273 273 };
274 274 };
275 275 WebError = super.buildPythonPackage {
276 276 name = "WebError-0.10.3";
277 277 buildInputs = with self; [];
278 278 doCheck = false;
279 279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 280 src = fetchurl {
281 281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 283 };
284 284 meta = {
285 285 license = [ pkgs.lib.licenses.mit ];
286 286 };
287 287 };
288 288 WebHelpers = super.buildPythonPackage {
289 289 name = "WebHelpers-1.3";
290 290 buildInputs = with self; [];
291 291 doCheck = false;
292 292 propagatedBuildInputs = with self; [MarkupSafe];
293 293 src = fetchurl {
294 294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 295 md5 = "32749ffadfc40fea51075a7def32588b";
296 296 };
297 297 meta = {
298 298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 299 };
300 300 };
301 301 WebHelpers2 = super.buildPythonPackage {
302 302 name = "WebHelpers2-2.0";
303 303 buildInputs = with self; [];
304 304 doCheck = false;
305 305 propagatedBuildInputs = with self; [MarkupSafe six];
306 306 src = fetchurl {
307 307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 309 };
310 310 meta = {
311 311 license = [ pkgs.lib.licenses.mit ];
312 312 };
313 313 };
314 314 WebOb = super.buildPythonPackage {
315 315 name = "WebOb-1.3.1";
316 316 buildInputs = with self; [];
317 317 doCheck = false;
318 318 propagatedBuildInputs = with self; [];
319 319 src = fetchurl {
320 320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 321 md5 = "20918251c5726956ba8fef22d1556177";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.mit ];
325 325 };
326 326 };
327 327 WebTest = super.buildPythonPackage {
328 328 name = "WebTest-1.4.3";
329 329 buildInputs = with self; [];
330 330 doCheck = false;
331 331 propagatedBuildInputs = with self; [WebOb];
332 332 src = fetchurl {
333 333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 334 md5 = "631ce728bed92c681a4020a36adbc353";
335 335 };
336 336 meta = {
337 337 license = [ pkgs.lib.licenses.mit ];
338 338 };
339 339 };
340 340 Whoosh = super.buildPythonPackage {
341 341 name = "Whoosh-2.7.0";
342 342 buildInputs = with self; [];
343 343 doCheck = false;
344 344 propagatedBuildInputs = with self; [];
345 345 src = fetchurl {
346 346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 348 };
349 349 meta = {
350 350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 351 };
352 352 };
353 353 alembic = super.buildPythonPackage {
354 354 name = "alembic-0.8.4";
355 355 buildInputs = with self; [];
356 356 doCheck = false;
357 357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 358 src = fetchurl {
359 359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 361 };
362 362 meta = {
363 363 license = [ pkgs.lib.licenses.mit ];
364 364 };
365 365 };
366 366 amqplib = super.buildPythonPackage {
367 367 name = "amqplib-1.0.2";
368 368 buildInputs = with self; [];
369 369 doCheck = false;
370 370 propagatedBuildInputs = with self; [];
371 371 src = fetchurl {
372 372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 374 };
375 375 meta = {
376 376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 377 };
378 378 };
379 379 anyjson = super.buildPythonPackage {
380 380 name = "anyjson-0.3.3";
381 381 buildInputs = with self; [];
382 382 doCheck = false;
383 383 propagatedBuildInputs = with self; [];
384 384 src = fetchurl {
385 385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 390 };
391 391 };
392 392 appenlight-client = super.buildPythonPackage {
393 393 name = "appenlight-client-0.6.14";
394 394 buildInputs = with self; [];
395 395 doCheck = false;
396 396 propagatedBuildInputs = with self; [WebOb requests];
397 397 src = fetchurl {
398 398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 400 };
401 401 meta = {
402 402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 403 };
404 404 };
405 405 authomatic = super.buildPythonPackage {
406 406 name = "authomatic-0.1.0.post1";
407 407 buildInputs = with self; [];
408 408 doCheck = false;
409 409 propagatedBuildInputs = with self; [];
410 410 src = fetchurl {
411 411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 413 };
414 414 meta = {
415 415 license = [ pkgs.lib.licenses.mit ];
416 416 };
417 417 };
418 418 backport-ipaddress = super.buildPythonPackage {
419 419 name = "backport-ipaddress-0.1";
420 420 buildInputs = with self; [];
421 421 doCheck = false;
422 422 propagatedBuildInputs = with self; [];
423 423 src = fetchurl {
424 424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 426 };
427 427 meta = {
428 428 license = [ pkgs.lib.licenses.psfl ];
429 429 };
430 430 };
431 431 bottle = super.buildPythonPackage {
432 432 name = "bottle-0.12.8";
433 433 buildInputs = with self; [];
434 434 doCheck = false;
435 435 propagatedBuildInputs = with self; [];
436 436 src = fetchurl {
437 437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 439 };
440 440 meta = {
441 441 license = [ pkgs.lib.licenses.mit ];
442 442 };
443 443 };
444 444 bumpversion = super.buildPythonPackage {
445 445 name = "bumpversion-0.5.3";
446 446 buildInputs = with self; [];
447 447 doCheck = false;
448 448 propagatedBuildInputs = with self; [];
449 449 src = fetchurl {
450 450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 452 };
453 453 meta = {
454 454 license = [ pkgs.lib.licenses.mit ];
455 455 };
456 456 };
457 457 celery = super.buildPythonPackage {
458 458 name = "celery-2.2.10";
459 459 buildInputs = with self; [];
460 460 doCheck = false;
461 461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 462 src = fetchurl {
463 463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 464 md5 = "898bc87e54f278055b561316ba73e222";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 click = super.buildPythonPackage {
471 471 name = "click-5.1";
472 472 buildInputs = with self; [];
473 473 doCheck = false;
474 474 propagatedBuildInputs = with self; [];
475 475 src = fetchurl {
476 476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 478 };
479 479 meta = {
480 480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 481 };
482 482 };
483 483 colander = super.buildPythonPackage {
484 484 name = "colander-1.2";
485 485 buildInputs = with self; [];
486 486 doCheck = false;
487 487 propagatedBuildInputs = with self; [translationstring iso8601];
488 488 src = fetchurl {
489 489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 491 };
492 492 meta = {
493 493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 494 };
495 495 };
496 496 configobj = super.buildPythonPackage {
497 497 name = "configobj-5.0.6";
498 498 buildInputs = with self; [];
499 499 doCheck = false;
500 500 propagatedBuildInputs = with self; [six];
501 501 src = fetchurl {
502 502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 504 };
505 505 meta = {
506 506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 507 };
508 508 };
509 509 cov-core = super.buildPythonPackage {
510 510 name = "cov-core-1.15.0";
511 511 buildInputs = with self; [];
512 512 doCheck = false;
513 513 propagatedBuildInputs = with self; [coverage];
514 514 src = fetchurl {
515 515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 517 };
518 518 meta = {
519 519 license = [ pkgs.lib.licenses.mit ];
520 520 };
521 521 };
522 522 coverage = super.buildPythonPackage {
523 523 name = "coverage-3.7.1";
524 524 buildInputs = with self; [];
525 525 doCheck = false;
526 526 propagatedBuildInputs = with self; [];
527 527 src = fetchurl {
528 528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 530 };
531 531 meta = {
532 532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 533 };
534 534 };
535 535 cssselect = super.buildPythonPackage {
536 536 name = "cssselect-0.9.1";
537 537 buildInputs = with self; [];
538 538 doCheck = false;
539 539 propagatedBuildInputs = with self; [];
540 540 src = fetchurl {
541 541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 546 };
547 547 };
548 548 decorator = super.buildPythonPackage {
549 549 name = "decorator-3.4.2";
550 550 buildInputs = with self; [];
551 551 doCheck = false;
552 552 propagatedBuildInputs = with self; [];
553 553 src = fetchurl {
554 554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 556 };
557 557 meta = {
558 558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 559 };
560 560 };
561 561 docutils = super.buildPythonPackage {
562 562 name = "docutils-0.12";
563 563 buildInputs = with self; [];
564 564 doCheck = false;
565 565 propagatedBuildInputs = with self; [];
566 566 src = fetchurl {
567 567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 568 md5 = "4622263b62c5c771c03502afa3157768";
569 569 };
570 570 meta = {
571 571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 572 };
573 573 };
574 574 dogpile.cache = super.buildPythonPackage {
575 575 name = "dogpile.cache-0.6.1";
576 576 buildInputs = with self; [];
577 577 doCheck = false;
578 578 propagatedBuildInputs = with self; [dogpile.core];
579 579 src = fetchurl {
580 580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 582 };
583 583 meta = {
584 584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 585 };
586 586 };
587 587 dogpile.core = super.buildPythonPackage {
588 588 name = "dogpile.core-0.4.1";
589 589 buildInputs = with self; [];
590 590 doCheck = false;
591 591 propagatedBuildInputs = with self; [];
592 592 src = fetchurl {
593 593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 595 };
596 596 meta = {
597 597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 598 };
599 599 };
600 600 dulwich = super.buildPythonPackage {
601 601 name = "dulwich-0.12.0";
602 602 buildInputs = with self; [];
603 603 doCheck = false;
604 604 propagatedBuildInputs = with self; [];
605 605 src = fetchurl {
606 606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 608 };
609 609 meta = {
610 610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 611 };
612 612 };
613 613 ecdsa = super.buildPythonPackage {
614 614 name = "ecdsa-0.11";
615 615 buildInputs = with self; [];
616 616 doCheck = false;
617 617 propagatedBuildInputs = with self; [];
618 618 src = fetchurl {
619 619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 621 };
622 622 meta = {
623 623 license = [ pkgs.lib.licenses.mit ];
624 624 };
625 625 };
626 626 elasticsearch = super.buildPythonPackage {
627 627 name = "elasticsearch-2.3.0";
628 628 buildInputs = with self; [];
629 629 doCheck = false;
630 630 propagatedBuildInputs = with self; [urllib3];
631 631 src = fetchurl {
632 632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 634 };
635 635 meta = {
636 636 license = [ pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 elasticsearch-dsl = super.buildPythonPackage {
640 640 name = "elasticsearch-dsl-2.0.0";
641 641 buildInputs = with self; [];
642 642 doCheck = false;
643 643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 644 src = fetchurl {
645 645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 647 };
648 648 meta = {
649 649 license = [ pkgs.lib.licenses.asl20 ];
650 650 };
651 651 };
652 652 flake8 = super.buildPythonPackage {
653 653 name = "flake8-2.4.1";
654 654 buildInputs = with self; [];
655 655 doCheck = false;
656 656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 657 src = fetchurl {
658 658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 660 };
661 661 meta = {
662 662 license = [ pkgs.lib.licenses.mit ];
663 663 };
664 664 };
665 665 future = super.buildPythonPackage {
666 666 name = "future-0.14.3";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 673 };
674 674 meta = {
675 675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 676 };
677 677 };
678 678 futures = super.buildPythonPackage {
679 679 name = "futures-3.0.2";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 689 };
690 690 };
691 691 gnureadline = super.buildPythonPackage {
692 692 name = "gnureadline-6.3.3";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.gpl1 ];
702 702 };
703 703 };
704 704 gprof2dot = super.buildPythonPackage {
705 705 name = "gprof2dot-2015.12.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 712 };
713 713 meta = {
714 714 license = [ { fullName = "LGPL"; } ];
715 715 };
716 716 };
717 717 gunicorn = super.buildPythonPackage {
718 718 name = "gunicorn-19.6.0";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 infrae.cache = super.buildPythonPackage {
731 731 name = "infrae.cache-1.0.1";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 738 };
739 739 meta = {
740 740 license = [ pkgs.lib.licenses.zpt21 ];
741 741 };
742 742 };
743 743 invoke = super.buildPythonPackage {
744 744 name = "invoke-0.13.0";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 751 };
752 752 meta = {
753 753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 754 };
755 755 };
756 756 ipdb = super.buildPythonPackage {
757 757 name = "ipdb-0.8";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [ipython];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.gpl1 ];
767 767 };
768 768 };
769 769 ipython = super.buildPythonPackage {
770 770 name = "ipython-3.1.0";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 780 };
781 781 };
782 782 iso8601 = super.buildPythonPackage {
783 783 name = "iso8601-0.1.11";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.mit ];
793 793 };
794 794 };
795 795 itsdangerous = super.buildPythonPackage {
796 796 name = "itsdangerous-0.24";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 803 };
804 804 meta = {
805 805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 806 };
807 807 };
808 808 kombu = super.buildPythonPackage {
809 809 name = "kombu-1.5.1";
810 810 buildInputs = with self; [];
811 811 doCheck = false;
812 812 propagatedBuildInputs = with self; [anyjson amqplib];
813 813 src = fetchurl {
814 814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 lxml = super.buildPythonPackage {
822 822 name = "lxml-3.4.4";
823 823 buildInputs = with self; [];
824 824 doCheck = false;
825 825 propagatedBuildInputs = with self; [];
826 826 src = fetchurl {
827 827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 832 };
833 833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
847 834 mccabe = super.buildPythonPackage {
848 835 name = "mccabe-0.3";
849 836 buildInputs = with self; [];
850 837 doCheck = false;
851 838 propagatedBuildInputs = with self; [];
852 839 src = fetchurl {
853 840 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
854 841 md5 = "81640948ff226f8c12b3277059489157";
855 842 };
856 843 meta = {
857 844 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
858 845 };
859 846 };
860 847 meld3 = super.buildPythonPackage {
861 848 name = "meld3-1.0.2";
862 849 buildInputs = with self; [];
863 850 doCheck = false;
864 851 propagatedBuildInputs = with self; [];
865 852 src = fetchurl {
866 853 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
867 854 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
868 855 };
869 856 meta = {
870 857 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
871 858 };
872 859 };
873 860 mock = super.buildPythonPackage {
874 861 name = "mock-1.0.1";
875 862 buildInputs = with self; [];
876 863 doCheck = false;
877 864 propagatedBuildInputs = with self; [];
878 865 src = fetchurl {
879 866 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
880 867 md5 = "869f08d003c289a97c1a6610faf5e913";
881 868 };
882 869 meta = {
883 870 license = [ pkgs.lib.licenses.bsdOriginal ];
884 871 };
885 872 };
886 873 msgpack-python = super.buildPythonPackage {
887 874 name = "msgpack-python-0.4.6";
888 875 buildInputs = with self; [];
889 876 doCheck = false;
890 877 propagatedBuildInputs = with self; [];
891 878 src = fetchurl {
892 879 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
893 880 md5 = "8b317669314cf1bc881716cccdaccb30";
894 881 };
895 882 meta = {
896 883 license = [ pkgs.lib.licenses.asl20 ];
897 884 };
898 885 };
899 886 nose = super.buildPythonPackage {
900 887 name = "nose-1.3.6";
901 888 buildInputs = with self; [];
902 889 doCheck = false;
903 890 propagatedBuildInputs = with self; [];
904 891 src = fetchurl {
905 892 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
906 893 md5 = "0ca546d81ca8309080fc80cb389e7a16";
907 894 };
908 895 meta = {
909 896 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
910 897 };
911 898 };
912 899 objgraph = super.buildPythonPackage {
913 900 name = "objgraph-2.0.0";
914 901 buildInputs = with self; [];
915 902 doCheck = false;
916 903 propagatedBuildInputs = with self; [];
917 904 src = fetchurl {
918 905 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
919 906 md5 = "25b0d5e5adc74aa63ead15699614159c";
920 907 };
921 908 meta = {
922 909 license = [ pkgs.lib.licenses.mit ];
923 910 };
924 911 };
925 912 packaging = super.buildPythonPackage {
926 913 name = "packaging-15.2";
927 914 buildInputs = with self; [];
928 915 doCheck = false;
929 916 propagatedBuildInputs = with self; [];
930 917 src = fetchurl {
931 918 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
932 919 md5 = "c16093476f6ced42128bf610e5db3784";
933 920 };
934 921 meta = {
935 922 license = [ pkgs.lib.licenses.asl20 ];
936 923 };
937 924 };
938 925 paramiko = super.buildPythonPackage {
939 926 name = "paramiko-1.15.1";
940 927 buildInputs = with self; [];
941 928 doCheck = false;
942 929 propagatedBuildInputs = with self; [pycrypto ecdsa];
943 930 src = fetchurl {
944 931 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
945 932 md5 = "48c274c3f9b1282932567b21f6acf3b5";
946 933 };
947 934 meta = {
948 935 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
949 936 };
950 937 };
951 938 pep8 = super.buildPythonPackage {
952 939 name = "pep8-1.5.7";
953 940 buildInputs = with self; [];
954 941 doCheck = false;
955 942 propagatedBuildInputs = with self; [];
956 943 src = fetchurl {
957 944 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
958 945 md5 = "f6adbdd69365ecca20513c709f9b7c93";
959 946 };
960 947 meta = {
961 948 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
962 949 };
963 950 };
964 951 psutil = super.buildPythonPackage {
965 952 name = "psutil-2.2.1";
966 953 buildInputs = with self; [];
967 954 doCheck = false;
968 955 propagatedBuildInputs = with self; [];
969 956 src = fetchurl {
970 957 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
971 958 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
972 959 };
973 960 meta = {
974 961 license = [ pkgs.lib.licenses.bsdOriginal ];
975 962 };
976 963 };
977 964 psycopg2 = super.buildPythonPackage {
978 965 name = "psycopg2-2.6.1";
979 966 buildInputs = with self; [];
980 967 doCheck = false;
981 968 propagatedBuildInputs = with self; [];
982 969 src = fetchurl {
983 970 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
984 971 md5 = "842b44f8c95517ed5b792081a2370da1";
985 972 };
986 973 meta = {
987 974 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
988 975 };
989 976 };
990 977 py = super.buildPythonPackage {
991 978 name = "py-1.4.29";
992 979 buildInputs = with self; [];
993 980 doCheck = false;
994 981 propagatedBuildInputs = with self; [];
995 982 src = fetchurl {
996 983 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
997 984 md5 = "c28e0accba523a29b35a48bb703fb96c";
998 985 };
999 986 meta = {
1000 987 license = [ pkgs.lib.licenses.mit ];
1001 988 };
1002 989 };
1003 990 py-bcrypt = super.buildPythonPackage {
1004 991 name = "py-bcrypt-0.4";
1005 992 buildInputs = with self; [];
1006 993 doCheck = false;
1007 994 propagatedBuildInputs = with self; [];
1008 995 src = fetchurl {
1009 996 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1010 997 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1011 998 };
1012 999 meta = {
1013 1000 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 1001 };
1015 1002 };
1016 1003 py-gfm = super.buildPythonPackage {
1017 1004 name = "py-gfm-0.1.3";
1018 1005 buildInputs = with self; [];
1019 1006 doCheck = false;
1020 1007 propagatedBuildInputs = with self; [setuptools Markdown];
1021 1008 src = fetchurl {
1022 1009 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1023 1010 md5 = "e588d9e69640a241b97e2c59c22527a6";
1024 1011 };
1025 1012 meta = {
1026 1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1027 1014 };
1028 1015 };
1029 1016 pycrypto = super.buildPythonPackage {
1030 1017 name = "pycrypto-2.6.1";
1031 1018 buildInputs = with self; [];
1032 1019 doCheck = false;
1033 1020 propagatedBuildInputs = with self; [];
1034 1021 src = fetchurl {
1035 1022 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1036 1023 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1037 1024 };
1038 1025 meta = {
1039 1026 license = [ pkgs.lib.licenses.publicDomain ];
1040 1027 };
1041 1028 };
1042 1029 pycurl = super.buildPythonPackage {
1043 1030 name = "pycurl-7.19.5";
1044 1031 buildInputs = with self; [];
1045 1032 doCheck = false;
1046 1033 propagatedBuildInputs = with self; [];
1047 1034 src = fetchurl {
1048 1035 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1049 1036 md5 = "47b4eac84118e2606658122104e62072";
1050 1037 };
1051 1038 meta = {
1052 1039 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1053 1040 };
1054 1041 };
1055 1042 pyflakes = super.buildPythonPackage {
1056 1043 name = "pyflakes-0.8.1";
1057 1044 buildInputs = with self; [];
1058 1045 doCheck = false;
1059 1046 propagatedBuildInputs = with self; [];
1060 1047 src = fetchurl {
1061 1048 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1062 1049 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1063 1050 };
1064 1051 meta = {
1065 1052 license = [ pkgs.lib.licenses.mit ];
1066 1053 };
1067 1054 };
1068 1055 pyparsing = super.buildPythonPackage {
1069 1056 name = "pyparsing-1.5.7";
1070 1057 buildInputs = with self; [];
1071 1058 doCheck = false;
1072 1059 propagatedBuildInputs = with self; [];
1073 1060 src = fetchurl {
1074 1061 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1075 1062 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1076 1063 };
1077 1064 meta = {
1078 1065 license = [ pkgs.lib.licenses.mit ];
1079 1066 };
1080 1067 };
1081 1068 pyramid = super.buildPythonPackage {
1082 1069 name = "pyramid-1.6.1";
1083 1070 buildInputs = with self; [];
1084 1071 doCheck = false;
1085 1072 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1086 1073 src = fetchurl {
1087 1074 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1088 1075 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1089 1076 };
1090 1077 meta = {
1091 1078 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 1079 };
1093 1080 };
1094 1081 pyramid-beaker = super.buildPythonPackage {
1095 1082 name = "pyramid-beaker-0.8";
1096 1083 buildInputs = with self; [];
1097 1084 doCheck = false;
1098 1085 propagatedBuildInputs = with self; [pyramid Beaker];
1099 1086 src = fetchurl {
1100 1087 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1101 1088 md5 = "22f14be31b06549f80890e2c63a93834";
1102 1089 };
1103 1090 meta = {
1104 1091 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1105 1092 };
1106 1093 };
1107 1094 pyramid-debugtoolbar = super.buildPythonPackage {
1108 1095 name = "pyramid-debugtoolbar-2.4.2";
1109 1096 buildInputs = with self; [];
1110 1097 doCheck = false;
1111 1098 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1112 1099 src = fetchurl {
1113 1100 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1114 1101 md5 = "073ea67086cc4bd5decc3a000853642d";
1115 1102 };
1116 1103 meta = {
1117 1104 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1118 1105 };
1119 1106 };
1120 1107 pyramid-jinja2 = super.buildPythonPackage {
1121 1108 name = "pyramid-jinja2-2.5";
1122 1109 buildInputs = with self; [];
1123 1110 doCheck = false;
1124 1111 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1125 1112 src = fetchurl {
1126 1113 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1127 1114 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1128 1115 };
1129 1116 meta = {
1130 1117 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 1118 };
1132 1119 };
1133 1120 pyramid-mako = super.buildPythonPackage {
1134 1121 name = "pyramid-mako-1.0.2";
1135 1122 buildInputs = with self; [];
1136 1123 doCheck = false;
1137 1124 propagatedBuildInputs = with self; [pyramid Mako];
1138 1125 src = fetchurl {
1139 1126 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1140 1127 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1141 1128 };
1142 1129 meta = {
1143 1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1144 1131 };
1145 1132 };
1146 1133 pysqlite = super.buildPythonPackage {
1147 1134 name = "pysqlite-2.6.3";
1148 1135 buildInputs = with self; [];
1149 1136 doCheck = false;
1150 1137 propagatedBuildInputs = with self; [];
1151 1138 src = fetchurl {
1152 1139 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1153 1140 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1154 1141 };
1155 1142 meta = {
1156 1143 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1157 1144 };
1158 1145 };
1159 1146 pytest = super.buildPythonPackage {
1160 1147 name = "pytest-2.8.5";
1161 1148 buildInputs = with self; [];
1162 1149 doCheck = false;
1163 1150 propagatedBuildInputs = with self; [py];
1164 1151 src = fetchurl {
1165 1152 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1166 1153 md5 = "8493b06f700862f1294298d6c1b715a9";
1167 1154 };
1168 1155 meta = {
1169 1156 license = [ pkgs.lib.licenses.mit ];
1170 1157 };
1171 1158 };
1172 1159 pytest-catchlog = super.buildPythonPackage {
1173 1160 name = "pytest-catchlog-1.2.2";
1174 1161 buildInputs = with self; [];
1175 1162 doCheck = false;
1176 1163 propagatedBuildInputs = with self; [py pytest];
1177 1164 src = fetchurl {
1178 1165 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1179 1166 md5 = "09d890c54c7456c818102b7ff8c182c8";
1180 1167 };
1181 1168 meta = {
1182 1169 license = [ pkgs.lib.licenses.mit ];
1183 1170 };
1184 1171 };
1185 1172 pytest-cov = super.buildPythonPackage {
1186 1173 name = "pytest-cov-1.8.1";
1187 1174 buildInputs = with self; [];
1188 1175 doCheck = false;
1189 1176 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1190 1177 src = fetchurl {
1191 1178 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1192 1179 md5 = "76c778afa2494088270348be42d759fc";
1193 1180 };
1194 1181 meta = {
1195 1182 license = [ pkgs.lib.licenses.mit ];
1196 1183 };
1197 1184 };
1198 1185 pytest-profiling = super.buildPythonPackage {
1199 1186 name = "pytest-profiling-1.0.1";
1200 1187 buildInputs = with self; [];
1201 1188 doCheck = false;
1202 1189 propagatedBuildInputs = with self; [six pytest gprof2dot];
1203 1190 src = fetchurl {
1204 1191 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1205 1192 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1206 1193 };
1207 1194 meta = {
1208 1195 license = [ pkgs.lib.licenses.mit ];
1209 1196 };
1210 1197 };
1211 1198 pytest-runner = super.buildPythonPackage {
1212 1199 name = "pytest-runner-2.7.1";
1213 1200 buildInputs = with self; [];
1214 1201 doCheck = false;
1215 1202 propagatedBuildInputs = with self; [];
1216 1203 src = fetchurl {
1217 1204 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1218 1205 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1219 1206 };
1220 1207 meta = {
1221 1208 license = [ pkgs.lib.licenses.mit ];
1222 1209 };
1223 1210 };
1224 1211 pytest-timeout = super.buildPythonPackage {
1225 1212 name = "pytest-timeout-0.4";
1226 1213 buildInputs = with self; [];
1227 1214 doCheck = false;
1228 1215 propagatedBuildInputs = with self; [pytest];
1229 1216 src = fetchurl {
1230 1217 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1231 1218 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1232 1219 };
1233 1220 meta = {
1234 1221 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1235 1222 };
1236 1223 };
1237 1224 python-dateutil = super.buildPythonPackage {
1238 1225 name = "python-dateutil-1.5";
1239 1226 buildInputs = with self; [];
1240 1227 doCheck = false;
1241 1228 propagatedBuildInputs = with self; [];
1242 1229 src = fetchurl {
1243 1230 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1244 1231 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1245 1232 };
1246 1233 meta = {
1247 1234 license = [ pkgs.lib.licenses.psfl ];
1248 1235 };
1249 1236 };
1250 1237 python-editor = super.buildPythonPackage {
1251 1238 name = "python-editor-1.0.1";
1252 1239 buildInputs = with self; [];
1253 1240 doCheck = false;
1254 1241 propagatedBuildInputs = with self; [];
1255 1242 src = fetchurl {
1256 1243 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1257 1244 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1258 1245 };
1259 1246 meta = {
1260 1247 license = [ pkgs.lib.licenses.asl20 ];
1261 1248 };
1262 1249 };
1263 1250 python-ldap = super.buildPythonPackage {
1264 1251 name = "python-ldap-2.4.19";
1265 1252 buildInputs = with self; [];
1266 1253 doCheck = false;
1267 1254 propagatedBuildInputs = with self; [setuptools];
1268 1255 src = fetchurl {
1269 1256 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1270 1257 md5 = "b941bf31d09739492aa19ef679e94ae3";
1271 1258 };
1272 1259 meta = {
1273 1260 license = [ pkgs.lib.licenses.psfl ];
1274 1261 };
1275 1262 };
1276 1263 python-memcached = super.buildPythonPackage {
1277 1264 name = "python-memcached-1.57";
1278 1265 buildInputs = with self; [];
1279 1266 doCheck = false;
1280 1267 propagatedBuildInputs = with self; [six];
1281 1268 src = fetchurl {
1282 1269 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1283 1270 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1284 1271 };
1285 1272 meta = {
1286 1273 license = [ pkgs.lib.licenses.psfl ];
1287 1274 };
1288 1275 };
1289 1276 python-pam = super.buildPythonPackage {
1290 1277 name = "python-pam-1.8.2";
1291 1278 buildInputs = with self; [];
1292 1279 doCheck = false;
1293 1280 propagatedBuildInputs = with self; [];
1294 1281 src = fetchurl {
1295 1282 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1296 1283 md5 = "db71b6b999246fb05d78ecfbe166629d";
1297 1284 };
1298 1285 meta = {
1299 1286 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1300 1287 };
1301 1288 };
1302 1289 pytz = super.buildPythonPackage {
1303 1290 name = "pytz-2015.4";
1304 1291 buildInputs = with self; [];
1305 1292 doCheck = false;
1306 1293 propagatedBuildInputs = with self; [];
1307 1294 src = fetchurl {
1308 1295 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1309 1296 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1310 1297 };
1311 1298 meta = {
1312 1299 license = [ pkgs.lib.licenses.mit ];
1313 1300 };
1314 1301 };
1315 1302 pyzmq = super.buildPythonPackage {
1316 1303 name = "pyzmq-14.6.0";
1317 1304 buildInputs = with self; [];
1318 1305 doCheck = false;
1319 1306 propagatedBuildInputs = with self; [];
1320 1307 src = fetchurl {
1321 1308 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1322 1309 md5 = "395b5de95a931afa5b14c9349a5b8024";
1323 1310 };
1324 1311 meta = {
1325 1312 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1326 1313 };
1327 1314 };
1328 1315 recaptcha-client = super.buildPythonPackage {
1329 1316 name = "recaptcha-client-1.0.6";
1330 1317 buildInputs = with self; [];
1331 1318 doCheck = false;
1332 1319 propagatedBuildInputs = with self; [];
1333 1320 src = fetchurl {
1334 1321 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1335 1322 md5 = "74228180f7e1fb76c4d7089160b0d919";
1336 1323 };
1337 1324 meta = {
1338 1325 license = [ { fullName = "MIT/X11"; } ];
1339 1326 };
1340 1327 };
1341 1328 repoze.lru = super.buildPythonPackage {
1342 1329 name = "repoze.lru-0.6";
1343 1330 buildInputs = with self; [];
1344 1331 doCheck = false;
1345 1332 propagatedBuildInputs = with self; [];
1346 1333 src = fetchurl {
1347 1334 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1348 1335 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1349 1336 };
1350 1337 meta = {
1351 1338 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1352 1339 };
1353 1340 };
1354 1341 requests = super.buildPythonPackage {
1355 1342 name = "requests-2.9.1";
1356 1343 buildInputs = with self; [];
1357 1344 doCheck = false;
1358 1345 propagatedBuildInputs = with self; [];
1359 1346 src = fetchurl {
1360 1347 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1361 1348 md5 = "0b7f480d19012ec52bab78292efd976d";
1362 1349 };
1363 1350 meta = {
1364 1351 license = [ pkgs.lib.licenses.asl20 ];
1365 1352 };
1366 1353 };
1367 1354 rhodecode-enterprise-ce = super.buildPythonPackage {
1368 1355 name = "rhodecode-enterprise-ce-4.3.0";
1369 1356 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1370 1357 doCheck = true;
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1372 1359 src = ./.;
1373 1360 meta = {
1374 1361 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1375 1362 };
1376 1363 };
1377 1364 rhodecode-tools = super.buildPythonPackage {
1378 1365 name = "rhodecode-tools-0.8.3";
1379 1366 buildInputs = with self; [];
1380 1367 doCheck = false;
1381 1368 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1382 1369 src = fetchurl {
1383 1370 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1384 1371 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1385 1372 };
1386 1373 meta = {
1387 1374 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1388 1375 };
1389 1376 };
1390 1377 serpent = super.buildPythonPackage {
1391 1378 name = "serpent-1.12";
1392 1379 buildInputs = with self; [];
1393 1380 doCheck = false;
1394 1381 propagatedBuildInputs = with self; [];
1395 1382 src = fetchurl {
1396 1383 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1397 1384 md5 = "05869ac7b062828b34f8f927f0457b65";
1398 1385 };
1399 1386 meta = {
1400 1387 license = [ pkgs.lib.licenses.mit ];
1401 1388 };
1402 1389 };
1403 1390 setproctitle = super.buildPythonPackage {
1404 1391 name = "setproctitle-1.1.8";
1405 1392 buildInputs = with self; [];
1406 1393 doCheck = false;
1407 1394 propagatedBuildInputs = with self; [];
1408 1395 src = fetchurl {
1409 1396 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1410 1397 md5 = "728f4c8c6031bbe56083a48594027edd";
1411 1398 };
1412 1399 meta = {
1413 1400 license = [ pkgs.lib.licenses.bsdOriginal ];
1414 1401 };
1415 1402 };
1416 1403 setuptools = super.buildPythonPackage {
1417 1404 name = "setuptools-20.8.1";
1418 1405 buildInputs = with self; [];
1419 1406 doCheck = false;
1420 1407 propagatedBuildInputs = with self; [];
1421 1408 src = fetchurl {
1422 1409 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1423 1410 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1424 1411 };
1425 1412 meta = {
1426 1413 license = [ pkgs.lib.licenses.mit ];
1427 1414 };
1428 1415 };
1429 1416 setuptools-scm = super.buildPythonPackage {
1430 1417 name = "setuptools-scm-1.11.0";
1431 1418 buildInputs = with self; [];
1432 1419 doCheck = false;
1433 1420 propagatedBuildInputs = with self; [];
1434 1421 src = fetchurl {
1435 1422 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1436 1423 md5 = "4c5c896ba52e134bbc3507bac6400087";
1437 1424 };
1438 1425 meta = {
1439 1426 license = [ pkgs.lib.licenses.mit ];
1440 1427 };
1441 1428 };
1442 1429 simplejson = super.buildPythonPackage {
1443 1430 name = "simplejson-3.7.2";
1444 1431 buildInputs = with self; [];
1445 1432 doCheck = false;
1446 1433 propagatedBuildInputs = with self; [];
1447 1434 src = fetchurl {
1448 1435 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1449 1436 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1450 1437 };
1451 1438 meta = {
1452 1439 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1453 1440 };
1454 1441 };
1455 1442 six = super.buildPythonPackage {
1456 1443 name = "six-1.9.0";
1457 1444 buildInputs = with self; [];
1458 1445 doCheck = false;
1459 1446 propagatedBuildInputs = with self; [];
1460 1447 src = fetchurl {
1461 1448 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1462 1449 md5 = "476881ef4012262dfc8adc645ee786c4";
1463 1450 };
1464 1451 meta = {
1465 1452 license = [ pkgs.lib.licenses.mit ];
1466 1453 };
1467 1454 };
1468 1455 subprocess32 = super.buildPythonPackage {
1469 1456 name = "subprocess32-3.2.6";
1470 1457 buildInputs = with self; [];
1471 1458 doCheck = false;
1472 1459 propagatedBuildInputs = with self; [];
1473 1460 src = fetchurl {
1474 1461 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1475 1462 md5 = "754c5ab9f533e764f931136974b618f1";
1476 1463 };
1477 1464 meta = {
1478 1465 license = [ pkgs.lib.licenses.psfl ];
1479 1466 };
1480 1467 };
1481 1468 supervisor = super.buildPythonPackage {
1482 1469 name = "supervisor-3.3.0";
1483 1470 buildInputs = with self; [];
1484 1471 doCheck = false;
1485 1472 propagatedBuildInputs = with self; [meld3];
1486 1473 src = fetchurl {
1487 1474 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1488 1475 md5 = "46bac00378d1eddb616752b990c67416";
1489 1476 };
1490 1477 meta = {
1491 1478 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1492 1479 };
1493 1480 };
1494 1481 transifex-client = super.buildPythonPackage {
1495 1482 name = "transifex-client-0.10";
1496 1483 buildInputs = with self; [];
1497 1484 doCheck = false;
1498 1485 propagatedBuildInputs = with self; [];
1499 1486 src = fetchurl {
1500 1487 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1501 1488 md5 = "5549538d84b8eede6b254cd81ae024fa";
1502 1489 };
1503 1490 meta = {
1504 1491 license = [ pkgs.lib.licenses.gpl2 ];
1505 1492 };
1506 1493 };
1507 1494 translationstring = super.buildPythonPackage {
1508 1495 name = "translationstring-1.3";
1509 1496 buildInputs = with self; [];
1510 1497 doCheck = false;
1511 1498 propagatedBuildInputs = with self; [];
1512 1499 src = fetchurl {
1513 1500 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1514 1501 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1515 1502 };
1516 1503 meta = {
1517 1504 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1518 1505 };
1519 1506 };
1520 1507 trollius = super.buildPythonPackage {
1521 1508 name = "trollius-1.0.4";
1522 1509 buildInputs = with self; [];
1523 1510 doCheck = false;
1524 1511 propagatedBuildInputs = with self; [futures];
1525 1512 src = fetchurl {
1526 1513 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1527 1514 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1528 1515 };
1529 1516 meta = {
1530 1517 license = [ pkgs.lib.licenses.asl20 ];
1531 1518 };
1532 1519 };
1533 1520 uWSGI = super.buildPythonPackage {
1534 1521 name = "uWSGI-2.0.11.2";
1535 1522 buildInputs = with self; [];
1536 1523 doCheck = false;
1537 1524 propagatedBuildInputs = with self; [];
1538 1525 src = fetchurl {
1539 1526 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1540 1527 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1541 1528 };
1542 1529 meta = {
1543 1530 license = [ pkgs.lib.licenses.gpl2 ];
1544 1531 };
1545 1532 };
1546 1533 urllib3 = super.buildPythonPackage {
1547 1534 name = "urllib3-1.16";
1548 1535 buildInputs = with self; [];
1549 1536 doCheck = false;
1550 1537 propagatedBuildInputs = with self; [];
1551 1538 src = fetchurl {
1552 1539 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1553 1540 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1554 1541 };
1555 1542 meta = {
1556 1543 license = [ pkgs.lib.licenses.mit ];
1557 1544 };
1558 1545 };
1559 1546 venusian = super.buildPythonPackage {
1560 1547 name = "venusian-1.0";
1561 1548 buildInputs = with self; [];
1562 1549 doCheck = false;
1563 1550 propagatedBuildInputs = with self; [];
1564 1551 src = fetchurl {
1565 1552 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1566 1553 md5 = "dccf2eafb7113759d60c86faf5538756";
1567 1554 };
1568 1555 meta = {
1569 1556 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1570 1557 };
1571 1558 };
1572 1559 waitress = super.buildPythonPackage {
1573 1560 name = "waitress-0.8.9";
1574 1561 buildInputs = with self; [];
1575 1562 doCheck = false;
1576 1563 propagatedBuildInputs = with self; [setuptools];
1577 1564 src = fetchurl {
1578 1565 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1579 1566 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1580 1567 };
1581 1568 meta = {
1582 1569 license = [ pkgs.lib.licenses.zpt21 ];
1583 1570 };
1584 1571 };
1585 1572 wsgiref = super.buildPythonPackage {
1586 1573 name = "wsgiref-0.1.2";
1587 1574 buildInputs = with self; [];
1588 1575 doCheck = false;
1589 1576 propagatedBuildInputs = with self; [];
1590 1577 src = fetchurl {
1591 1578 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1592 1579 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1593 1580 };
1594 1581 meta = {
1595 1582 license = [ { fullName = "PSF or ZPL"; } ];
1596 1583 };
1597 1584 };
1598 1585 zope.cachedescriptors = super.buildPythonPackage {
1599 1586 name = "zope.cachedescriptors-4.0.0";
1600 1587 buildInputs = with self; [];
1601 1588 doCheck = false;
1602 1589 propagatedBuildInputs = with self; [setuptools];
1603 1590 src = fetchurl {
1604 1591 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1605 1592 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1606 1593 };
1607 1594 meta = {
1608 1595 license = [ pkgs.lib.licenses.zpt21 ];
1609 1596 };
1610 1597 };
1611 1598 zope.deprecation = super.buildPythonPackage {
1612 1599 name = "zope.deprecation-4.1.2";
1613 1600 buildInputs = with self; [];
1614 1601 doCheck = false;
1615 1602 propagatedBuildInputs = with self; [setuptools];
1616 1603 src = fetchurl {
1617 1604 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1618 1605 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1619 1606 };
1620 1607 meta = {
1621 1608 license = [ pkgs.lib.licenses.zpt21 ];
1622 1609 };
1623 1610 };
1624 1611 zope.event = super.buildPythonPackage {
1625 1612 name = "zope.event-4.0.3";
1626 1613 buildInputs = with self; [];
1627 1614 doCheck = false;
1628 1615 propagatedBuildInputs = with self; [setuptools];
1629 1616 src = fetchurl {
1630 1617 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1631 1618 md5 = "9a3780916332b18b8b85f522bcc3e249";
1632 1619 };
1633 1620 meta = {
1634 1621 license = [ pkgs.lib.licenses.zpt21 ];
1635 1622 };
1636 1623 };
1637 1624 zope.interface = super.buildPythonPackage {
1638 1625 name = "zope.interface-4.1.3";
1639 1626 buildInputs = with self; [];
1640 1627 doCheck = false;
1641 1628 propagatedBuildInputs = with self; [setuptools];
1642 1629 src = fetchurl {
1643 1630 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1644 1631 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1645 1632 };
1646 1633 meta = {
1647 1634 license = [ pkgs.lib.licenses.zpt21 ];
1648 1635 };
1649 1636 };
1650 1637
1651 1638 ### Test requirements
1652 1639
1653 1640
1654 1641 }
@@ -1,152 +1,151 b''
1 1 Babel==1.3
2 2 Beaker==1.7.0
3 3 CProfileV==1.0.6
4 4 Fabric==1.10.0
5 5 FormEncode==1.2.4
6 6 Jinja2==2.7.3
7 7 Mako==1.0.1
8 8 Markdown==2.6.2
9 9 MarkupSafe==0.23
10 10 MySQL-python==1.2.5
11 11 Paste==2.0.2
12 12 PasteDeploy==1.5.2
13 13 PasteScript==1.7.5
14 14 Pygments==2.1.3
15 15
16 16 # TODO: This version is not available on PyPI
17 17 # Pylons==1.0.2.dev20160108
18 18 Pylons==1.0.1
19 19
20 20 # TODO: This version is not available, but newer ones are
21 21 # Pyro4==4.35
22 22 Pyro4==4.41
23 23
24 24 # TODO: This should probably not be in here
25 25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26 26
27 27 # TODO: This is not really a dependency, we should add it only
28 28 # into the development environment, since there it is useful.
29 29 # RhodeCodeVCSServer==3.9.0
30 30
31 31 Routes==1.13
32 32 SQLAlchemy==0.9.9
33 33 Sphinx==1.2.2
34 34 Tempita==0.5.2
35 35 URLObject==2.4.0
36 36 WebError==0.10.3
37 37
38 38 # TODO: This is modified by us, needs a better integration. For now
39 39 # using the latest version before.
40 40 # WebHelpers==1.3.dev20150807
41 41 WebHelpers==1.3
42 42
43 43 WebHelpers2==2.0
44 44 WebOb==1.3.1
45 45 WebTest==1.4.3
46 46 Whoosh==2.7.0
47 47 alembic==0.8.4
48 48 amqplib==1.0.2
49 49 anyjson==0.3.3
50 50 appenlight-client==0.6.14
51 51 authomatic==0.1.0.post1;
52 52 backport-ipaddress==0.1
53 53 bottle==0.12.8
54 54 bumpversion==0.5.3
55 55 celery==2.2.10
56 56 click==5.1
57 57 colander==1.2
58 58 configobj==5.0.6
59 59 cov-core==1.15.0
60 60 coverage==3.7.1
61 61 cssselect==0.9.1
62 62 decorator==3.4.2
63 63 docutils==0.12
64 64 dogpile.cache==0.6.1
65 65 dogpile.core==0.4.1
66 66 dulwich==0.12.0
67 67 ecdsa==0.11
68 68 flake8==2.4.1
69 69 future==0.14.3
70 70 futures==3.0.2
71 71 gprof2dot==2015.12.1
72 72 gunicorn==19.6.0
73 73
74 74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 75 # actually we should not need this for Enterprise at all.
76 76 # hgsubversion==1.8.2
77 77
78 78 gnureadline==6.3.3
79 79 infrae.cache==1.0.1
80 80 invoke==0.13.0
81 81 ipdb==0.8
82 82 ipython==3.1.0
83 83 iso8601==0.1.11
84 84 itsdangerous==0.24
85 85 kombu==1.5.1
86 86 lxml==3.4.4
87 marshmallow==2.8.0
88 87 mccabe==0.3
89 88 meld3==1.0.2
90 89 mock==1.0.1
91 90 msgpack-python==0.4.6
92 91 nose==1.3.6
93 92 objgraph==2.0.0
94 93 packaging==15.2
95 94 paramiko==1.15.1
96 95 pep8==1.5.7
97 96 psutil==2.2.1
98 97 psycopg2==2.6.1
99 98 py==1.4.29
100 99 py-bcrypt==0.4
101 100 py-gfm==0.1.3
102 101 pycrypto==2.6.1
103 102 pycurl==7.19.5
104 103 pyflakes==0.8.1
105 104 pyparsing==1.5.7
106 105 pyramid==1.6.1
107 106 pyramid-beaker==0.8
108 107 pyramid-debugtoolbar==2.4.2
109 108 pyramid-jinja2==2.5
110 109 pyramid-mako==1.0.2
111 110 pysqlite==2.6.3
112 111 pytest==2.8.5
113 112 pytest-runner==2.7.1
114 113 pytest-catchlog==1.2.2
115 114 pytest-cov==1.8.1
116 115 pytest-profiling==1.0.1
117 116 pytest-timeout==0.4
118 117 python-dateutil==1.5
119 118 python-ldap==2.4.19
120 119 python-memcached==1.57
121 120 python-pam==1.8.2
122 121 pytz==2015.4
123 122 pyzmq==14.6.0
124 123
125 124 # TODO: This is not available in public
126 125 # rc-testdata==0.2.0
127 126
128 127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
129 128
130 129
131 130 recaptcha-client==1.0.6
132 131 repoze.lru==0.6
133 132 requests==2.9.1
134 133 serpent==1.12
135 134 setproctitle==1.1.8
136 135 setuptools==20.8.1
137 136 setuptools-scm==1.11.0
138 137 simplejson==3.7.2
139 138 six==1.9.0
140 139 subprocess32==3.2.6
141 140 supervisor==3.3.0
142 141 transifex-client==0.10
143 142 translationstring==1.3
144 143 trollius==1.0.4
145 144 uWSGI==2.0.11.2
146 145 venusian==1.0
147 146 waitress==0.8.9
148 147 wsgiref==0.1.2
149 148 zope.cachedescriptors==4.0.0
150 149 zope.deprecation==4.1.2
151 150 zope.event==4.0.3
152 151 zope.interface==4.1.3
@@ -1,58 +1,58 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pylons
44 44 CONFIG = {}
45 45
46 46 # Linked module for extensions
47 47 EXTENSIONS = {}
48 48
49 49 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
50 __dbversion__ = 54 # defines current db version for migrations
50 __dbversion__ = 55 # defines current db version for migrations
51 51 __platform__ = platform.system()
52 52 __license__ = 'AGPLv3, and Commercial License'
53 53 __author__ = 'RhodeCode GmbH'
54 54 __url__ = 'http://rhodecode.com'
55 55
56 56 is_windows = __platform__ in ['Windows']
57 57 is_unix = not is_windows
58 58 is_test = False
@@ -1,124 +1,126 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 from pylons import url
26 26 from zope.interface import implementer
27 27
28 28 from rhodecode.admin.interfaces import IAdminNavigationRegistry
29 29 from rhodecode.lib.utils import get_registry
30 30 from rhodecode.translation import _
31 31
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35 NavListEntry = collections.namedtuple('NavListEntry', ['key', 'name', 'url'])
36 36
37 37
38 38 class NavEntry(object):
39 39 """
40 40 Represents an entry in the admin navigation.
41 41
42 42 :param key: Unique identifier used to store reference in an OrderedDict.
43 43 :param name: Display name, usually a translation string.
44 44 :param view_name: Name of the view, used generate the URL.
45 45 :param pyramid: Indicator to use pyramid for URL generation. This should
46 46 be removed as soon as we are fully migrated to pyramid.
47 47 """
48 48
49 49 def __init__(self, key, name, view_name, pyramid=False):
50 50 self.key = key
51 51 self.name = name
52 52 self.view_name = view_name
53 53 self.pyramid = pyramid
54 54
55 55 def generate_url(self, request):
56 56 if self.pyramid:
57 57 if hasattr(request, 'route_path'):
58 58 return request.route_path(self.view_name)
59 59 else:
60 60 # TODO: johbo: Remove this after migrating to pyramid.
61 61 # We need the pyramid request here to generate URLs to pyramid
62 62 # views from within pylons views.
63 63 from pyramid.threadlocal import get_current_request
64 64 pyramid_request = get_current_request()
65 65 return pyramid_request.route_path(self.view_name)
66 66 else:
67 67 return url(self.view_name)
68 68
69 69
70 70 @implementer(IAdminNavigationRegistry)
71 71 class NavigationRegistry(object):
72 72
73 73 _base_entries = [
74 74 NavEntry('global', _('Global'), 'admin_settings_global'),
75 75 NavEntry('vcs', _('VCS'), 'admin_settings_vcs'),
76 76 NavEntry('visual', _('Visual'), 'admin_settings_visual'),
77 77 NavEntry('mapping', _('Remap and Rescan'), 'admin_settings_mapping'),
78 78 NavEntry('issuetracker', _('Issue Tracker'),
79 79 'admin_settings_issuetracker'),
80 80 NavEntry('email', _('Email'), 'admin_settings_email'),
81 81 NavEntry('hooks', _('Hooks'), 'admin_settings_hooks'),
82 82 NavEntry('search', _('Full Text Search'), 'admin_settings_search'),
83 NavEntry('integrations', _('Integrations'),
84 'global_integrations_home', pyramid=True),
83 85 NavEntry('system', _('System Info'), 'admin_settings_system'),
84 86 NavEntry('open_source', _('Open Source Licenses'),
85 87 'admin_settings_open_source', pyramid=True),
86 88 # TODO: marcink: we disable supervisor now until the supervisor stats
87 89 # page is fixed in the nix configuration
88 90 # NavEntry('supervisor', _('Supervisor'), 'admin_settings_supervisor'),
89 91 ]
90 92
91 93 _labs_entry = NavEntry('labs', _('Labs'),
92 94 'admin_settings_labs')
93 95
94 96 def __init__(self, labs_active=False):
95 97 self._registered_entries = collections.OrderedDict([
96 98 (item.key, item) for item in self.__class__._base_entries
97 99 ])
98 100
99 101 if labs_active:
100 102 self.add_entry(self._labs_entry)
101 103
102 104 def add_entry(self, entry):
103 105 self._registered_entries[entry.key] = entry
104 106
105 107 def get_navlist(self, request):
106 108 navlist = [NavListEntry(i.key, i.name, i.generate_url(request))
107 109 for i in self._registered_entries.values()]
108 110 return navlist
109 111
110 112
111 113 def navigation_registry(request):
112 114 """
113 115 Helper that returns the admin navigation registry.
114 116 """
115 117 pyramid_registry = get_registry(request)
116 118 nav_registry = pyramid_registry.queryUtility(IAdminNavigationRegistry)
117 119 return nav_registry
118 120
119 121
120 122 def navigation_list(request):
121 123 """
122 124 Helper that returns the admin navigation as list of NavListEntry objects.
123 125 """
124 126 return navigation_registry(request).get_navlist(request)
@@ -1,387 +1,388 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25
26 26 from paste.registry import RegistryManager
27 27 from paste.gzipper import make_gzip_middleware
28 28 from pylons.wsgiapp import PylonsApp
29 29 from pyramid.authorization import ACLAuthorizationPolicy
30 30 from pyramid.config import Configurator
31 31 from pyramid.static import static_view
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.httpexceptions import HTTPError, HTTPInternalServerError
35 35 import pyramid.httpexceptions as httpexceptions
36 36 from pyramid.renderers import render_to_response, render
37 37 from routes.middleware import RoutesMiddleware
38 38 import routes.util
39 39
40 40 import rhodecode
41 41 from rhodecode.config import patches
42 42 from rhodecode.config.environment import (
43 43 load_environment, load_pyramid_environment)
44 44 from rhodecode.lib.middleware import csrf
45 45 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
46 46 from rhodecode.lib.middleware.disable_vcs import DisableVCSPagesWrapper
47 47 from rhodecode.lib.middleware.https_fixup import HttpsFixup
48 48 from rhodecode.lib.middleware.vcs import VCSMiddleware
49 49 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
50 50
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
56 56 """Create a Pylons WSGI application and return it
57 57
58 58 ``global_conf``
59 59 The inherited configuration for this application. Normally from
60 60 the [DEFAULT] section of the Paste ini file.
61 61
62 62 ``full_stack``
63 63 Whether or not this application provides a full WSGI stack (by
64 64 default, meaning it handles its own exceptions and errors).
65 65 Disable full_stack when this application is "managed" by
66 66 another WSGI middleware.
67 67
68 68 ``app_conf``
69 69 The application's local configuration. Normally specified in
70 70 the [app:<name>] section of the Paste ini file (where <name>
71 71 defaults to main).
72 72
73 73 """
74 74 # Apply compatibility patches
75 75 patches.kombu_1_5_1_python_2_7_11()
76 76 patches.inspect_getargspec()
77 77
78 78 # Configure the Pylons environment
79 79 config = load_environment(global_conf, app_conf)
80 80
81 81 # The Pylons WSGI app
82 82 app = PylonsApp(config=config)
83 83 if rhodecode.is_test:
84 84 app = csrf.CSRFDetector(app)
85 85
86 86 expected_origin = config.get('expected_origin')
87 87 if expected_origin:
88 88 # The API can be accessed from other Origins.
89 89 app = csrf.OriginChecker(app, expected_origin,
90 90 skip_urls=[routes.util.url_for('api')])
91 91
92 92
93 93 if asbool(full_stack):
94 94
95 95 # Appenlight monitoring and error handler
96 96 app, appenlight_client = wrap_in_appenlight_if_enabled(app, config)
97 97
98 98 # we want our low level middleware to get to the request ASAP. We don't
99 99 # need any pylons stack middleware in them
100 100 app = VCSMiddleware(app, config, appenlight_client)
101 101
102 102 # Establish the Registry for this application
103 103 app = RegistryManager(app)
104 104
105 105 app.config = config
106 106
107 107 return app
108 108
109 109
110 110 def make_pyramid_app(global_config, **settings):
111 111 """
112 112 Constructs the WSGI application based on Pyramid and wraps the Pylons based
113 113 application.
114 114
115 115 Specials:
116 116
117 117 * We migrate from Pylons to Pyramid. While doing this, we keep both
118 118 frameworks functional. This involves moving some WSGI middlewares around
119 119 and providing access to some data internals, so that the old code is
120 120 still functional.
121 121
122 122 * The application can also be integrated like a plugin via the call to
123 123 `includeme`. This is accompanied with the other utility functions which
124 124 are called. Changing this should be done with great care to not break
125 125 cases when these fragments are assembled from another place.
126 126
127 127 """
128 128 # The edition string should be available in pylons too, so we add it here
129 129 # before copying the settings.
130 130 settings.setdefault('rhodecode.edition', 'Community Edition')
131 131
132 132 # As long as our Pylons application does expect "unprepared" settings, make
133 133 # sure that we keep an unmodified copy. This avoids unintentional change of
134 134 # behavior in the old application.
135 135 settings_pylons = settings.copy()
136 136
137 137 sanitize_settings_and_apply_defaults(settings)
138 138 config = Configurator(settings=settings)
139 139 add_pylons_compat_data(config.registry, global_config, settings_pylons)
140 140
141 141 load_pyramid_environment(global_config, settings)
142 142
143 143 includeme(config)
144 144 includeme_last(config)
145 145 pyramid_app = config.make_wsgi_app()
146 146 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
147 147 return pyramid_app
148 148
149 149
150 150 def add_pylons_compat_data(registry, global_config, settings):
151 151 """
152 152 Attach data to the registry to support the Pylons integration.
153 153 """
154 154 registry._pylons_compat_global_config = global_config
155 155 registry._pylons_compat_settings = settings
156 156
157 157
158 158 def webob_to_pyramid_http_response(webob_response):
159 159 ResponseClass = httpexceptions.status_map[webob_response.status_int]
160 160 pyramid_response = ResponseClass(webob_response.status)
161 161 pyramid_response.status = webob_response.status
162 162 pyramid_response.headers.update(webob_response.headers)
163 163 if pyramid_response.headers['content-type'] == 'text/html':
164 164 pyramid_response.headers['content-type'] = 'text/html; charset=UTF-8'
165 165 return pyramid_response
166 166
167 167
168 168 def error_handler(exception, request):
169 169 # TODO: dan: replace the old pylons error controller with this
170 170 from rhodecode.model.settings import SettingsModel
171 171 from rhodecode.lib.utils2 import AttributeDict
172 172
173 173 try:
174 174 rc_config = SettingsModel().get_all_settings()
175 175 except Exception:
176 176 log.exception('failed to fetch settings')
177 177 rc_config = {}
178 178
179 179 base_response = HTTPInternalServerError()
180 180 # prefer original exception for the response since it may have headers set
181 181 if isinstance(exception, HTTPError):
182 182 base_response = exception
183 183
184 184 c = AttributeDict()
185 185 c.error_message = base_response.status
186 186 c.error_explanation = base_response.explanation or str(base_response)
187 187 c.visual = AttributeDict()
188 188
189 189 c.visual.rhodecode_support_url = (
190 190 request.registry.settings.get('rhodecode_support_url') or
191 191 request.route_url('rhodecode_support')
192 192 )
193 193 c.redirect_time = 0
194 194 c.rhodecode_name = rc_config.get('rhodecode_title', '')
195 195 if not c.rhodecode_name:
196 196 c.rhodecode_name = 'Rhodecode'
197 197
198 198 response = render_to_response(
199 199 '/errors/error_document.html', {'c': c}, request=request,
200 200 response=base_response)
201 201
202 202 return response
203 203
204 204
205 205 def includeme(config):
206 206 settings = config.registry.settings
207 207
208 208 if asbool(settings.get('appenlight', 'false')):
209 209 config.include('appenlight_client.ext.pyramid_tween')
210 210
211 211 # Includes which are required. The application would fail without them.
212 212 config.include('pyramid_mako')
213 213 config.include('pyramid_beaker')
214 214 config.include('rhodecode.admin')
215 215 config.include('rhodecode.authentication')
216 config.include('rhodecode.integrations')
216 217 config.include('rhodecode.login')
217 218 config.include('rhodecode.tweens')
218 219 config.include('rhodecode.api')
219 220 config.add_route(
220 221 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
221 222
222 223 # Set the authorization policy.
223 224 authz_policy = ACLAuthorizationPolicy()
224 225 config.set_authorization_policy(authz_policy)
225 226
226 227 # Set the default renderer for HTML templates to mako.
227 228 config.add_mako_renderer('.html')
228 229
229 230 # plugin information
230 231 config.registry.rhodecode_plugins = {}
231 232
232 233 config.add_directive(
233 234 'register_rhodecode_plugin', register_rhodecode_plugin)
234 235 # include RhodeCode plugins
235 236 includes = aslist(settings.get('rhodecode.includes', []))
236 237 for inc in includes:
237 238 config.include(inc)
238 239
239 240 pylons_app = make_app(
240 241 config.registry._pylons_compat_global_config,
241 242 **config.registry._pylons_compat_settings)
242 243 config.registry._pylons_compat_config = pylons_app.config
243 244
244 245 pylons_app_as_view = wsgiapp(pylons_app)
245 246
246 247 # Protect from VCS Server error related pages when server is not available
247 248 vcs_server_enabled = asbool(settings.get('vcs.server.enable', 'true'))
248 249 if not vcs_server_enabled:
249 250 pylons_app_as_view = DisableVCSPagesWrapper(pylons_app_as_view)
250 251
251 252
252 253 def pylons_app_with_error_handler(context, request):
253 254 """
254 255 Handle exceptions from rc pylons app:
255 256
256 257 - old webob type exceptions get converted to pyramid exceptions
257 258 - pyramid exceptions are passed to the error handler view
258 259 """
259 260 try:
260 261 response = pylons_app_as_view(context, request)
261 262 if 400 <= response.status_int <= 599: # webob type error responses
262 263 return error_handler(
263 264 webob_to_pyramid_http_response(response), request)
264 265 except HTTPError as e: # pyramid type exceptions
265 266 return error_handler(e, request)
266 267 except Exception:
267 268 if settings.get('debugtoolbar.enabled', False):
268 269 raise
269 270 return error_handler(HTTPInternalServerError(), request)
270 271 return response
271 272
272 273 # This is the glue which allows us to migrate in chunks. By registering the
273 274 # pylons based application as the "Not Found" view in Pyramid, we will
274 275 # fallback to the old application each time the new one does not yet know
275 276 # how to handle a request.
276 277 config.add_notfound_view(pylons_app_with_error_handler)
277 278
278 279 if settings.get('debugtoolbar.enabled', False):
279 280 # if toolbar, then only http type exceptions get caught and rendered
280 281 ExcClass = HTTPError
281 282 else:
282 283 # if no toolbar, then any exception gets caught and rendered
283 284 ExcClass = Exception
284 285 config.add_view(error_handler, context=ExcClass)
285 286
286 287
287 288 def includeme_last(config):
288 289 """
289 290 The static file catchall needs to be last in the view configuration.
290 291 """
291 292 settings = config.registry.settings
292 293
293 294 # Note: johbo: I would prefer to register a prefix for static files at some
294 295 # point, e.g. move them under '_static/'. This would fully avoid that we
295 296 # can have name clashes with a repository name. Imaging someone calling his
296 297 # repo "css" ;-) Also having an external web server to serve out the static
297 298 # files seems to be easier to set up if they have a common prefix.
298 299 #
299 300 # Example: config.add_static_view('_static', path='rhodecode:public')
300 301 #
301 302 # It might be an option to register both paths for a while and then migrate
302 303 # over to the new location.
303 304
304 305 # Serving static files with a catchall.
305 306 if settings['static_files']:
306 307 config.add_route('catchall_static', '/*subpath')
307 308 config.add_view(
308 309 static_view('rhodecode:public'), route_name='catchall_static')
309 310
310 311
311 312 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
312 313 """
313 314 Apply outer WSGI middlewares around the application.
314 315
315 316 Part of this has been moved up from the Pylons layer, so that the
316 317 data is also available if old Pylons code is hit through an already ported
317 318 view.
318 319 """
319 320 settings = config.registry.settings
320 321
321 322 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
322 323 pyramid_app = HttpsFixup(pyramid_app, settings)
323 324
324 325 # Add RoutesMiddleware to support the pylons compatibility tween during
325 326
326 327 # migration to pyramid.
327 328 pyramid_app = RoutesMiddleware(
328 329 pyramid_app, config.registry._pylons_compat_config['routes.map'])
329 330
330 331 if asbool(settings.get('appenlight', 'false')):
331 332 pyramid_app, _ = wrap_in_appenlight_if_enabled(
332 333 pyramid_app, config.registry._pylons_compat_config)
333 334
334 335 # TODO: johbo: Don't really see why we enable the gzip middleware when
335 336 # serving static files, might be something that should have its own setting
336 337 # as well?
337 338 if settings['static_files']:
338 339 pyramid_app = make_gzip_middleware(
339 340 pyramid_app, settings, compress_level=1)
340 341
341 342 return pyramid_app
342 343
343 344
344 345 def sanitize_settings_and_apply_defaults(settings):
345 346 """
346 347 Applies settings defaults and does all type conversion.
347 348
348 349 We would move all settings parsing and preparation into this place, so that
349 350 we have only one place left which deals with this part. The remaining parts
350 351 of the application would start to rely fully on well prepared settings.
351 352
352 353 This piece would later be split up per topic to avoid a big fat monster
353 354 function.
354 355 """
355 356
356 357 # Pyramid's mako renderer has to search in the templates folder so that the
357 358 # old templates still work. Ported and new templates are expected to use
358 359 # real asset specifications for the includes.
359 360 mako_directories = settings.setdefault('mako.directories', [
360 361 # Base templates of the original Pylons application
361 362 'rhodecode:templates',
362 363 ])
363 364 log.debug(
364 365 "Using the following Mako template directories: %s",
365 366 mako_directories)
366 367
367 368 # Default includes, possible to change as a user
368 369 pyramid_includes = settings.setdefault('pyramid.includes', [
369 370 'rhodecode.lib.middleware.request_wrapper',
370 371 ])
371 372 log.debug(
372 373 "Using the following pyramid.includes: %s",
373 374 pyramid_includes)
374 375
375 376 # TODO: johbo: Re-think this, usually the call to config.include
376 377 # should allow to pass in a prefix.
377 378 settings.setdefault('rhodecode.api.url', '/_admin/api')
378 379
379 380 _bool_setting(settings, 'vcs.server.enable', 'true')
380 381 _bool_setting(settings, 'static_files', 'true')
381 382 _bool_setting(settings, 'is_test', 'false')
382 383
383 384 return settings
384 385
385 386
386 387 def _bool_setting(settings, name, default):
387 388 settings[name] = asbool(settings.get(name, default))
@@ -1,1141 +1,1154 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Routes configuration
23 23
24 24 The more specific and detailed routes should be defined first so they
25 25 may take precedent over the more generic routes. For more information
26 26 refer to the routes manual at http://routes.groovie.org/docs/
27 27
28 28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 29 and _route_name variable which uses some of stored naming here to do redirects.
30 30 """
31 31 import os
32 32 import re
33 33 from routes import Mapper
34 34
35 35 from rhodecode.config import routing_links
36 36
37 37 # prefix for non repository related links needs to be prefixed with `/`
38 38 ADMIN_PREFIX = '/_admin'
39 39
40 40 # Default requirements for URL parts
41 41 URL_NAME_REQUIREMENTS = {
42 42 # group name can have a slash in them, but they must not end with a slash
43 43 'group_name': r'.*?[^/]',
44 44 # repo names can have a slash in them, but they must not end with a slash
45 45 'repo_name': r'.*?[^/]',
46 46 # file path eats up everything at the end
47 47 'f_path': r'.*',
48 48 # reference types
49 49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
50 50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
51 51 }
52 52
53 53
54 def add_route_requirements(route_path, requirements):
55 """
56 Adds regex requirements to pyramid routes using a mapping dict
57
58 >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'})
59 '/{action}/{id:\d+}'
60
61 """
62 for key, regex in requirements.items():
63 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
64 return route_path
65
66
54 67 class JSRoutesMapper(Mapper):
55 68 """
56 69 Wrapper for routes.Mapper to make pyroutes compatible url definitions
57 70 """
58 71 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
59 72 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
60 73 def __init__(self, *args, **kw):
61 74 super(JSRoutesMapper, self).__init__(*args, **kw)
62 75 self._jsroutes = []
63 76
64 77 def connect(self, *args, **kw):
65 78 """
66 79 Wrapper for connect to take an extra argument jsroute=True
67 80
68 81 :param jsroute: boolean, if True will add the route to the pyroutes list
69 82 """
70 83 if kw.pop('jsroute', False):
71 84 if not self._named_route_regex.match(args[0]):
72 85 raise Exception('only named routes can be added to pyroutes')
73 86 self._jsroutes.append(args[0])
74 87
75 88 super(JSRoutesMapper, self).connect(*args, **kw)
76 89
77 90 def _extract_route_information(self, route):
78 91 """
79 92 Convert a route into tuple(name, path, args), eg:
80 93 ('user_profile', '/profile/%(username)s', ['username'])
81 94 """
82 95 routepath = route.routepath
83 96 def replace(matchobj):
84 97 if matchobj.group(1):
85 98 return "%%(%s)s" % matchobj.group(1).split(':')[0]
86 99 else:
87 100 return "%%(%s)s" % matchobj.group(2)
88 101
89 102 routepath = self._argument_prog.sub(replace, routepath)
90 103 return (
91 104 route.name,
92 105 routepath,
93 106 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
94 107 for arg in self._argument_prog.findall(route.routepath)]
95 108 )
96 109
97 110 def jsroutes(self):
98 111 """
99 112 Return a list of pyroutes.js compatible routes
100 113 """
101 114 for route_name in self._jsroutes:
102 115 yield self._extract_route_information(self._routenames[route_name])
103 116
104 117
105 118 def make_map(config):
106 119 """Create, configure and return the routes Mapper"""
107 120 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
108 121 always_scan=config['debug'])
109 122 rmap.minimization = False
110 123 rmap.explicit = False
111 124
112 125 from rhodecode.lib.utils2 import str2bool
113 126 from rhodecode.model import repo, repo_group
114 127
115 128 def check_repo(environ, match_dict):
116 129 """
117 130 check for valid repository for proper 404 handling
118 131
119 132 :param environ:
120 133 :param match_dict:
121 134 """
122 135 repo_name = match_dict.get('repo_name')
123 136
124 137 if match_dict.get('f_path'):
125 138 # fix for multiple initial slashes that causes errors
126 139 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
127 140 repo_model = repo.RepoModel()
128 141 by_name_match = repo_model.get_by_repo_name(repo_name)
129 142 # if we match quickly from database, short circuit the operation,
130 143 # and validate repo based on the type.
131 144 if by_name_match:
132 145 return True
133 146
134 147 by_id_match = repo_model.get_repo_by_id(repo_name)
135 148 if by_id_match:
136 149 repo_name = by_id_match.repo_name
137 150 match_dict['repo_name'] = repo_name
138 151 return True
139 152
140 153 return False
141 154
142 155 def check_group(environ, match_dict):
143 156 """
144 157 check for valid repository group path for proper 404 handling
145 158
146 159 :param environ:
147 160 :param match_dict:
148 161 """
149 162 repo_group_name = match_dict.get('group_name')
150 163 repo_group_model = repo_group.RepoGroupModel()
151 164 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
152 165 if by_name_match:
153 166 return True
154 167
155 168 return False
156 169
157 170 def check_user_group(environ, match_dict):
158 171 """
159 172 check for valid user group for proper 404 handling
160 173
161 174 :param environ:
162 175 :param match_dict:
163 176 """
164 177 return True
165 178
166 179 def check_int(environ, match_dict):
167 180 return match_dict.get('id').isdigit()
168 181
169 182
170 183 #==========================================================================
171 184 # CUSTOM ROUTES HERE
172 185 #==========================================================================
173 186
174 187 # MAIN PAGE
175 188 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
176 189 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
177 190 action='goto_switcher_data')
178 191 rmap.connect('repo_list_data', '/_repos', controller='home',
179 192 action='repo_list_data')
180 193
181 194 rmap.connect('user_autocomplete_data', '/_users', controller='home',
182 195 action='user_autocomplete_data', jsroute=True)
183 196 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
184 197 action='user_group_autocomplete_data')
185 198
186 199 rmap.connect(
187 200 'user_profile', '/_profiles/{username}', controller='users',
188 201 action='user_profile')
189 202
190 203 # TODO: johbo: Static links, to be replaced by our redirection mechanism
191 204 rmap.connect('rst_help',
192 205 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
193 206 _static=True)
194 207 rmap.connect('markdown_help',
195 208 'http://daringfireball.net/projects/markdown/syntax',
196 209 _static=True)
197 210 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
198 211 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
199 212 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
200 213 # TODO: anderson - making this a static link since redirect won't play
201 214 # nice with POST requests
202 215 rmap.connect('enterprise_license_convert_from_old',
203 216 'https://rhodecode.com/u/license-upgrade',
204 217 _static=True)
205 218
206 219 routing_links.connect_redirection_links(rmap)
207 220
208 221 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
209 222 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
210 223
211 224 # ADMIN REPOSITORY ROUTES
212 225 with rmap.submapper(path_prefix=ADMIN_PREFIX,
213 226 controller='admin/repos') as m:
214 227 m.connect('repos', '/repos',
215 228 action='create', conditions={'method': ['POST']})
216 229 m.connect('repos', '/repos',
217 230 action='index', conditions={'method': ['GET']})
218 231 m.connect('new_repo', '/create_repository', jsroute=True,
219 232 action='create_repository', conditions={'method': ['GET']})
220 233 m.connect('/repos/{repo_name}',
221 234 action='update', conditions={'method': ['PUT'],
222 235 'function': check_repo},
223 236 requirements=URL_NAME_REQUIREMENTS)
224 237 m.connect('delete_repo', '/repos/{repo_name}',
225 238 action='delete', conditions={'method': ['DELETE']},
226 239 requirements=URL_NAME_REQUIREMENTS)
227 240 m.connect('repo', '/repos/{repo_name}',
228 241 action='show', conditions={'method': ['GET'],
229 242 'function': check_repo},
230 243 requirements=URL_NAME_REQUIREMENTS)
231 244
232 245 # ADMIN REPOSITORY GROUPS ROUTES
233 246 with rmap.submapper(path_prefix=ADMIN_PREFIX,
234 247 controller='admin/repo_groups') as m:
235 248 m.connect('repo_groups', '/repo_groups',
236 249 action='create', conditions={'method': ['POST']})
237 250 m.connect('repo_groups', '/repo_groups',
238 251 action='index', conditions={'method': ['GET']})
239 252 m.connect('new_repo_group', '/repo_groups/new',
240 253 action='new', conditions={'method': ['GET']})
241 254 m.connect('update_repo_group', '/repo_groups/{group_name}',
242 255 action='update', conditions={'method': ['PUT'],
243 256 'function': check_group},
244 257 requirements=URL_NAME_REQUIREMENTS)
245 258
246 259 # EXTRAS REPO GROUP ROUTES
247 260 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
248 261 action='edit',
249 262 conditions={'method': ['GET'], 'function': check_group},
250 263 requirements=URL_NAME_REQUIREMENTS)
251 264 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
252 265 action='edit',
253 266 conditions={'method': ['PUT'], 'function': check_group},
254 267 requirements=URL_NAME_REQUIREMENTS)
255 268
256 269 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
257 270 action='edit_repo_group_advanced',
258 271 conditions={'method': ['GET'], 'function': check_group},
259 272 requirements=URL_NAME_REQUIREMENTS)
260 273 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
261 274 action='edit_repo_group_advanced',
262 275 conditions={'method': ['PUT'], 'function': check_group},
263 276 requirements=URL_NAME_REQUIREMENTS)
264 277
265 278 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
266 279 action='edit_repo_group_perms',
267 280 conditions={'method': ['GET'], 'function': check_group},
268 281 requirements=URL_NAME_REQUIREMENTS)
269 282 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
270 283 action='update_perms',
271 284 conditions={'method': ['PUT'], 'function': check_group},
272 285 requirements=URL_NAME_REQUIREMENTS)
273 286
274 287 m.connect('delete_repo_group', '/repo_groups/{group_name}',
275 288 action='delete', conditions={'method': ['DELETE'],
276 289 'function': check_group},
277 290 requirements=URL_NAME_REQUIREMENTS)
278 291
279 292 # ADMIN USER ROUTES
280 293 with rmap.submapper(path_prefix=ADMIN_PREFIX,
281 294 controller='admin/users') as m:
282 295 m.connect('users', '/users',
283 296 action='create', conditions={'method': ['POST']})
284 297 m.connect('users', '/users',
285 298 action='index', conditions={'method': ['GET']})
286 299 m.connect('new_user', '/users/new',
287 300 action='new', conditions={'method': ['GET']})
288 301 m.connect('update_user', '/users/{user_id}',
289 302 action='update', conditions={'method': ['PUT']})
290 303 m.connect('delete_user', '/users/{user_id}',
291 304 action='delete', conditions={'method': ['DELETE']})
292 305 m.connect('edit_user', '/users/{user_id}/edit',
293 306 action='edit', conditions={'method': ['GET']})
294 307 m.connect('user', '/users/{user_id}',
295 308 action='show', conditions={'method': ['GET']})
296 309 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
297 310 action='reset_password', conditions={'method': ['POST']})
298 311 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
299 312 action='create_personal_repo_group', conditions={'method': ['POST']})
300 313
301 314 # EXTRAS USER ROUTES
302 315 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
303 316 action='edit_advanced', conditions={'method': ['GET']})
304 317 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
305 318 action='update_advanced', conditions={'method': ['PUT']})
306 319
307 320 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
308 321 action='edit_auth_tokens', conditions={'method': ['GET']})
309 322 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
310 323 action='add_auth_token', conditions={'method': ['PUT']})
311 324 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
312 325 action='delete_auth_token', conditions={'method': ['DELETE']})
313 326
314 327 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
315 328 action='edit_global_perms', conditions={'method': ['GET']})
316 329 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
317 330 action='update_global_perms', conditions={'method': ['PUT']})
318 331
319 332 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
320 333 action='edit_perms_summary', conditions={'method': ['GET']})
321 334
322 335 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
323 336 action='edit_emails', conditions={'method': ['GET']})
324 337 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
325 338 action='add_email', conditions={'method': ['PUT']})
326 339 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
327 340 action='delete_email', conditions={'method': ['DELETE']})
328 341
329 342 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
330 343 action='edit_ips', conditions={'method': ['GET']})
331 344 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
332 345 action='add_ip', conditions={'method': ['PUT']})
333 346 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
334 347 action='delete_ip', conditions={'method': ['DELETE']})
335 348
336 349 # ADMIN USER GROUPS REST ROUTES
337 350 with rmap.submapper(path_prefix=ADMIN_PREFIX,
338 351 controller='admin/user_groups') as m:
339 352 m.connect('users_groups', '/user_groups',
340 353 action='create', conditions={'method': ['POST']})
341 354 m.connect('users_groups', '/user_groups',
342 355 action='index', conditions={'method': ['GET']})
343 356 m.connect('new_users_group', '/user_groups/new',
344 357 action='new', conditions={'method': ['GET']})
345 358 m.connect('update_users_group', '/user_groups/{user_group_id}',
346 359 action='update', conditions={'method': ['PUT']})
347 360 m.connect('delete_users_group', '/user_groups/{user_group_id}',
348 361 action='delete', conditions={'method': ['DELETE']})
349 362 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
350 363 action='edit', conditions={'method': ['GET']},
351 364 function=check_user_group)
352 365
353 366 # EXTRAS USER GROUP ROUTES
354 367 m.connect('edit_user_group_global_perms',
355 368 '/user_groups/{user_group_id}/edit/global_permissions',
356 369 action='edit_global_perms', conditions={'method': ['GET']})
357 370 m.connect('edit_user_group_global_perms',
358 371 '/user_groups/{user_group_id}/edit/global_permissions',
359 372 action='update_global_perms', conditions={'method': ['PUT']})
360 373 m.connect('edit_user_group_perms_summary',
361 374 '/user_groups/{user_group_id}/edit/permissions_summary',
362 375 action='edit_perms_summary', conditions={'method': ['GET']})
363 376
364 377 m.connect('edit_user_group_perms',
365 378 '/user_groups/{user_group_id}/edit/permissions',
366 379 action='edit_perms', conditions={'method': ['GET']})
367 380 m.connect('edit_user_group_perms',
368 381 '/user_groups/{user_group_id}/edit/permissions',
369 382 action='update_perms', conditions={'method': ['PUT']})
370 383
371 384 m.connect('edit_user_group_advanced',
372 385 '/user_groups/{user_group_id}/edit/advanced',
373 386 action='edit_advanced', conditions={'method': ['GET']})
374 387
375 388 m.connect('edit_user_group_members',
376 389 '/user_groups/{user_group_id}/edit/members', jsroute=True,
377 390 action='edit_members', conditions={'method': ['GET']})
378 391
379 392 # ADMIN PERMISSIONS ROUTES
380 393 with rmap.submapper(path_prefix=ADMIN_PREFIX,
381 394 controller='admin/permissions') as m:
382 395 m.connect('admin_permissions_application', '/permissions/application',
383 396 action='permission_application_update', conditions={'method': ['POST']})
384 397 m.connect('admin_permissions_application', '/permissions/application',
385 398 action='permission_application', conditions={'method': ['GET']})
386 399
387 400 m.connect('admin_permissions_global', '/permissions/global',
388 401 action='permission_global_update', conditions={'method': ['POST']})
389 402 m.connect('admin_permissions_global', '/permissions/global',
390 403 action='permission_global', conditions={'method': ['GET']})
391 404
392 405 m.connect('admin_permissions_object', '/permissions/object',
393 406 action='permission_objects_update', conditions={'method': ['POST']})
394 407 m.connect('admin_permissions_object', '/permissions/object',
395 408 action='permission_objects', conditions={'method': ['GET']})
396 409
397 410 m.connect('admin_permissions_ips', '/permissions/ips',
398 411 action='permission_ips', conditions={'method': ['POST']})
399 412 m.connect('admin_permissions_ips', '/permissions/ips',
400 413 action='permission_ips', conditions={'method': ['GET']})
401 414
402 415 m.connect('admin_permissions_overview', '/permissions/overview',
403 416 action='permission_perms', conditions={'method': ['GET']})
404 417
405 418 # ADMIN DEFAULTS REST ROUTES
406 419 with rmap.submapper(path_prefix=ADMIN_PREFIX,
407 420 controller='admin/defaults') as m:
408 421 m.connect('admin_defaults_repositories', '/defaults/repositories',
409 422 action='update_repository_defaults', conditions={'method': ['POST']})
410 423 m.connect('admin_defaults_repositories', '/defaults/repositories',
411 424 action='index', conditions={'method': ['GET']})
412 425
413 426 # ADMIN DEBUG STYLE ROUTES
414 427 if str2bool(config.get('debug_style')):
415 428 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
416 429 controller='debug_style') as m:
417 430 m.connect('debug_style_home', '',
418 431 action='index', conditions={'method': ['GET']})
419 432 m.connect('debug_style_template', '/t/{t_path}',
420 433 action='template', conditions={'method': ['GET']})
421 434
422 435 # ADMIN SETTINGS ROUTES
423 436 with rmap.submapper(path_prefix=ADMIN_PREFIX,
424 437 controller='admin/settings') as m:
425 438
426 439 # default
427 440 m.connect('admin_settings', '/settings',
428 441 action='settings_global_update',
429 442 conditions={'method': ['POST']})
430 443 m.connect('admin_settings', '/settings',
431 444 action='settings_global', conditions={'method': ['GET']})
432 445
433 446 m.connect('admin_settings_vcs', '/settings/vcs',
434 447 action='settings_vcs_update',
435 448 conditions={'method': ['POST']})
436 449 m.connect('admin_settings_vcs', '/settings/vcs',
437 450 action='settings_vcs',
438 451 conditions={'method': ['GET']})
439 452 m.connect('admin_settings_vcs', '/settings/vcs',
440 453 action='delete_svn_pattern',
441 454 conditions={'method': ['DELETE']})
442 455
443 456 m.connect('admin_settings_mapping', '/settings/mapping',
444 457 action='settings_mapping_update',
445 458 conditions={'method': ['POST']})
446 459 m.connect('admin_settings_mapping', '/settings/mapping',
447 460 action='settings_mapping', conditions={'method': ['GET']})
448 461
449 462 m.connect('admin_settings_global', '/settings/global',
450 463 action='settings_global_update',
451 464 conditions={'method': ['POST']})
452 465 m.connect('admin_settings_global', '/settings/global',
453 466 action='settings_global', conditions={'method': ['GET']})
454 467
455 468 m.connect('admin_settings_visual', '/settings/visual',
456 469 action='settings_visual_update',
457 470 conditions={'method': ['POST']})
458 471 m.connect('admin_settings_visual', '/settings/visual',
459 472 action='settings_visual', conditions={'method': ['GET']})
460 473
461 474 m.connect('admin_settings_issuetracker',
462 475 '/settings/issue-tracker', action='settings_issuetracker',
463 476 conditions={'method': ['GET']})
464 477 m.connect('admin_settings_issuetracker_save',
465 478 '/settings/issue-tracker/save',
466 479 action='settings_issuetracker_save',
467 480 conditions={'method': ['POST']})
468 481 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
469 482 action='settings_issuetracker_test',
470 483 conditions={'method': ['POST']})
471 484 m.connect('admin_issuetracker_delete',
472 485 '/settings/issue-tracker/delete',
473 486 action='settings_issuetracker_delete',
474 487 conditions={'method': ['DELETE']})
475 488
476 489 m.connect('admin_settings_email', '/settings/email',
477 490 action='settings_email_update',
478 491 conditions={'method': ['POST']})
479 492 m.connect('admin_settings_email', '/settings/email',
480 493 action='settings_email', conditions={'method': ['GET']})
481 494
482 495 m.connect('admin_settings_hooks', '/settings/hooks',
483 496 action='settings_hooks_update',
484 497 conditions={'method': ['POST', 'DELETE']})
485 498 m.connect('admin_settings_hooks', '/settings/hooks',
486 499 action='settings_hooks', conditions={'method': ['GET']})
487 500
488 501 m.connect('admin_settings_search', '/settings/search',
489 502 action='settings_search', conditions={'method': ['GET']})
490 503
491 504 m.connect('admin_settings_system', '/settings/system',
492 505 action='settings_system', conditions={'method': ['GET']})
493 506
494 507 m.connect('admin_settings_system_update', '/settings/system/updates',
495 508 action='settings_system_update', conditions={'method': ['GET']})
496 509
497 510 m.connect('admin_settings_supervisor', '/settings/supervisor',
498 511 action='settings_supervisor', conditions={'method': ['GET']})
499 512 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
500 513 action='settings_supervisor_log', conditions={'method': ['GET']})
501 514
502 515 m.connect('admin_settings_labs', '/settings/labs',
503 516 action='settings_labs_update',
504 517 conditions={'method': ['POST']})
505 518 m.connect('admin_settings_labs', '/settings/labs',
506 519 action='settings_labs', conditions={'method': ['GET']})
507 520
508 521 # ADMIN MY ACCOUNT
509 522 with rmap.submapper(path_prefix=ADMIN_PREFIX,
510 523 controller='admin/my_account') as m:
511 524
512 525 m.connect('my_account', '/my_account',
513 526 action='my_account', conditions={'method': ['GET']})
514 527 m.connect('my_account_edit', '/my_account/edit',
515 528 action='my_account_edit', conditions={'method': ['GET']})
516 529 m.connect('my_account', '/my_account',
517 530 action='my_account_update', conditions={'method': ['POST']})
518 531
519 532 m.connect('my_account_password', '/my_account/password',
520 533 action='my_account_password', conditions={'method': ['GET']})
521 534 m.connect('my_account_password', '/my_account/password',
522 535 action='my_account_password_update', conditions={'method': ['POST']})
523 536
524 537 m.connect('my_account_repos', '/my_account/repos',
525 538 action='my_account_repos', conditions={'method': ['GET']})
526 539
527 540 m.connect('my_account_watched', '/my_account/watched',
528 541 action='my_account_watched', conditions={'method': ['GET']})
529 542
530 543 m.connect('my_account_pullrequests', '/my_account/pull_requests',
531 544 action='my_account_pullrequests', conditions={'method': ['GET']})
532 545
533 546 m.connect('my_account_perms', '/my_account/perms',
534 547 action='my_account_perms', conditions={'method': ['GET']})
535 548
536 549 m.connect('my_account_emails', '/my_account/emails',
537 550 action='my_account_emails', conditions={'method': ['GET']})
538 551 m.connect('my_account_emails', '/my_account/emails',
539 552 action='my_account_emails_add', conditions={'method': ['POST']})
540 553 m.connect('my_account_emails', '/my_account/emails',
541 554 action='my_account_emails_delete', conditions={'method': ['DELETE']})
542 555
543 556 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
544 557 action='my_account_auth_tokens', conditions={'method': ['GET']})
545 558 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
546 559 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
547 560 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
548 561 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
549 562
550 563 # NOTIFICATION REST ROUTES
551 564 with rmap.submapper(path_prefix=ADMIN_PREFIX,
552 565 controller='admin/notifications') as m:
553 566 m.connect('notifications', '/notifications',
554 567 action='index', conditions={'method': ['GET']})
555 568 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
556 569 action='mark_all_read', conditions={'method': ['POST']})
557 570
558 571 m.connect('/notifications/{notification_id}',
559 572 action='update', conditions={'method': ['PUT']})
560 573 m.connect('/notifications/{notification_id}',
561 574 action='delete', conditions={'method': ['DELETE']})
562 575 m.connect('notification', '/notifications/{notification_id}',
563 576 action='show', conditions={'method': ['GET']})
564 577
565 578 # ADMIN GIST
566 579 with rmap.submapper(path_prefix=ADMIN_PREFIX,
567 580 controller='admin/gists') as m:
568 581 m.connect('gists', '/gists',
569 582 action='create', conditions={'method': ['POST']})
570 583 m.connect('gists', '/gists', jsroute=True,
571 584 action='index', conditions={'method': ['GET']})
572 585 m.connect('new_gist', '/gists/new', jsroute=True,
573 586 action='new', conditions={'method': ['GET']})
574 587
575 588 m.connect('/gists/{gist_id}',
576 589 action='delete', conditions={'method': ['DELETE']})
577 590 m.connect('edit_gist', '/gists/{gist_id}/edit',
578 591 action='edit_form', conditions={'method': ['GET']})
579 592 m.connect('edit_gist', '/gists/{gist_id}/edit',
580 593 action='edit', conditions={'method': ['POST']})
581 594 m.connect(
582 595 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
583 596 action='check_revision', conditions={'method': ['GET']})
584 597
585 598 m.connect('gist', '/gists/{gist_id}',
586 599 action='show', conditions={'method': ['GET']})
587 600 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
588 601 revision='tip',
589 602 action='show', conditions={'method': ['GET']})
590 603 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
591 604 revision='tip',
592 605 action='show', conditions={'method': ['GET']})
593 606 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
594 607 revision='tip',
595 608 action='show', conditions={'method': ['GET']},
596 609 requirements=URL_NAME_REQUIREMENTS)
597 610
598 611 # ADMIN MAIN PAGES
599 612 with rmap.submapper(path_prefix=ADMIN_PREFIX,
600 613 controller='admin/admin') as m:
601 614 m.connect('admin_home', '', action='index')
602 615 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
603 616 action='add_repo')
604 617 m.connect(
605 618 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
606 619 action='pull_requests')
607 620 m.connect(
608 621 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}',
609 622 action='pull_requests')
610 623
611 624
612 625 # USER JOURNAL
613 626 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
614 627 controller='journal', action='index')
615 628 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
616 629 controller='journal', action='journal_rss')
617 630 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
618 631 controller='journal', action='journal_atom')
619 632
620 633 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
621 634 controller='journal', action='public_journal')
622 635
623 636 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
624 637 controller='journal', action='public_journal_rss')
625 638
626 639 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
627 640 controller='journal', action='public_journal_rss')
628 641
629 642 rmap.connect('public_journal_atom',
630 643 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
631 644 action='public_journal_atom')
632 645
633 646 rmap.connect('public_journal_atom_old',
634 647 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
635 648 action='public_journal_atom')
636 649
637 650 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
638 651 controller='journal', action='toggle_following', jsroute=True,
639 652 conditions={'method': ['POST']})
640 653
641 654 # FULL TEXT SEARCH
642 655 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
643 656 controller='search')
644 657 rmap.connect('search_repo_home', '/{repo_name}/search',
645 658 controller='search',
646 659 action='index',
647 660 conditions={'function': check_repo},
648 661 requirements=URL_NAME_REQUIREMENTS)
649 662
650 663 # FEEDS
651 664 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
652 665 controller='feed', action='rss',
653 666 conditions={'function': check_repo},
654 667 requirements=URL_NAME_REQUIREMENTS)
655 668
656 669 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
657 670 controller='feed', action='atom',
658 671 conditions={'function': check_repo},
659 672 requirements=URL_NAME_REQUIREMENTS)
660 673
661 674 #==========================================================================
662 675 # REPOSITORY ROUTES
663 676 #==========================================================================
664 677
665 678 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
666 679 controller='admin/repos', action='repo_creating',
667 680 requirements=URL_NAME_REQUIREMENTS)
668 681 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
669 682 controller='admin/repos', action='repo_check',
670 683 requirements=URL_NAME_REQUIREMENTS)
671 684
672 685 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
673 686 controller='summary', action='repo_stats',
674 687 conditions={'function': check_repo},
675 688 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
676 689
677 690 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
678 691 controller='summary', action='repo_refs_data', jsroute=True,
679 692 requirements=URL_NAME_REQUIREMENTS)
680 693 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
681 694 controller='summary', action='repo_refs_changelog_data',
682 695 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
683 696
684 697 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
685 698 controller='changeset', revision='tip', jsroute=True,
686 699 conditions={'function': check_repo},
687 700 requirements=URL_NAME_REQUIREMENTS)
688 701 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
689 702 controller='changeset', revision='tip', action='changeset_children',
690 703 conditions={'function': check_repo},
691 704 requirements=URL_NAME_REQUIREMENTS)
692 705 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
693 706 controller='changeset', revision='tip', action='changeset_parents',
694 707 conditions={'function': check_repo},
695 708 requirements=URL_NAME_REQUIREMENTS)
696 709
697 710 # repo edit options
698 711 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
699 712 controller='admin/repos', action='edit',
700 713 conditions={'method': ['GET'], 'function': check_repo},
701 714 requirements=URL_NAME_REQUIREMENTS)
702 715
703 716 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
704 717 jsroute=True,
705 718 controller='admin/repos', action='edit_permissions',
706 719 conditions={'method': ['GET'], 'function': check_repo},
707 720 requirements=URL_NAME_REQUIREMENTS)
708 721 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
709 722 controller='admin/repos', action='edit_permissions_update',
710 723 conditions={'method': ['PUT'], 'function': check_repo},
711 724 requirements=URL_NAME_REQUIREMENTS)
712 725
713 726 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
714 727 controller='admin/repos', action='edit_fields',
715 728 conditions={'method': ['GET'], 'function': check_repo},
716 729 requirements=URL_NAME_REQUIREMENTS)
717 730 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
718 731 controller='admin/repos', action='create_repo_field',
719 732 conditions={'method': ['PUT'], 'function': check_repo},
720 733 requirements=URL_NAME_REQUIREMENTS)
721 734 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
722 735 controller='admin/repos', action='delete_repo_field',
723 736 conditions={'method': ['DELETE'], 'function': check_repo},
724 737 requirements=URL_NAME_REQUIREMENTS)
725 738
726 739 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
727 740 controller='admin/repos', action='edit_advanced',
728 741 conditions={'method': ['GET'], 'function': check_repo},
729 742 requirements=URL_NAME_REQUIREMENTS)
730 743
731 744 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
732 745 controller='admin/repos', action='edit_advanced_locking',
733 746 conditions={'method': ['PUT'], 'function': check_repo},
734 747 requirements=URL_NAME_REQUIREMENTS)
735 748 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
736 749 controller='admin/repos', action='toggle_locking',
737 750 conditions={'method': ['GET'], 'function': check_repo},
738 751 requirements=URL_NAME_REQUIREMENTS)
739 752
740 753 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
741 754 controller='admin/repos', action='edit_advanced_journal',
742 755 conditions={'method': ['PUT'], 'function': check_repo},
743 756 requirements=URL_NAME_REQUIREMENTS)
744 757
745 758 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
746 759 controller='admin/repos', action='edit_advanced_fork',
747 760 conditions={'method': ['PUT'], 'function': check_repo},
748 761 requirements=URL_NAME_REQUIREMENTS)
749 762
750 763 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
751 764 controller='admin/repos', action='edit_caches_form',
752 765 conditions={'method': ['GET'], 'function': check_repo},
753 766 requirements=URL_NAME_REQUIREMENTS)
754 767 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
755 768 controller='admin/repos', action='edit_caches',
756 769 conditions={'method': ['PUT'], 'function': check_repo},
757 770 requirements=URL_NAME_REQUIREMENTS)
758 771
759 772 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
760 773 controller='admin/repos', action='edit_remote_form',
761 774 conditions={'method': ['GET'], 'function': check_repo},
762 775 requirements=URL_NAME_REQUIREMENTS)
763 776 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
764 777 controller='admin/repos', action='edit_remote',
765 778 conditions={'method': ['PUT'], 'function': check_repo},
766 779 requirements=URL_NAME_REQUIREMENTS)
767 780
768 781 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
769 782 controller='admin/repos', action='edit_statistics_form',
770 783 conditions={'method': ['GET'], 'function': check_repo},
771 784 requirements=URL_NAME_REQUIREMENTS)
772 785 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
773 786 controller='admin/repos', action='edit_statistics',
774 787 conditions={'method': ['PUT'], 'function': check_repo},
775 788 requirements=URL_NAME_REQUIREMENTS)
776 789 rmap.connect('repo_settings_issuetracker',
777 790 '/{repo_name}/settings/issue-tracker',
778 791 controller='admin/repos', action='repo_issuetracker',
779 792 conditions={'method': ['GET'], 'function': check_repo},
780 793 requirements=URL_NAME_REQUIREMENTS)
781 794 rmap.connect('repo_issuetracker_test',
782 795 '/{repo_name}/settings/issue-tracker/test',
783 796 controller='admin/repos', action='repo_issuetracker_test',
784 797 conditions={'method': ['POST'], 'function': check_repo},
785 798 requirements=URL_NAME_REQUIREMENTS)
786 799 rmap.connect('repo_issuetracker_delete',
787 800 '/{repo_name}/settings/issue-tracker/delete',
788 801 controller='admin/repos', action='repo_issuetracker_delete',
789 802 conditions={'method': ['DELETE'], 'function': check_repo},
790 803 requirements=URL_NAME_REQUIREMENTS)
791 804 rmap.connect('repo_issuetracker_save',
792 805 '/{repo_name}/settings/issue-tracker/save',
793 806 controller='admin/repos', action='repo_issuetracker_save',
794 807 conditions={'method': ['POST'], 'function': check_repo},
795 808 requirements=URL_NAME_REQUIREMENTS)
796 809 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
797 810 controller='admin/repos', action='repo_settings_vcs_update',
798 811 conditions={'method': ['POST'], 'function': check_repo},
799 812 requirements=URL_NAME_REQUIREMENTS)
800 813 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
801 814 controller='admin/repos', action='repo_settings_vcs',
802 815 conditions={'method': ['GET'], 'function': check_repo},
803 816 requirements=URL_NAME_REQUIREMENTS)
804 817 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
805 818 controller='admin/repos', action='repo_delete_svn_pattern',
806 819 conditions={'method': ['DELETE'], 'function': check_repo},
807 820 requirements=URL_NAME_REQUIREMENTS)
808 821
809 822 # still working url for backward compat.
810 823 rmap.connect('raw_changeset_home_depraced',
811 824 '/{repo_name}/raw-changeset/{revision}',
812 825 controller='changeset', action='changeset_raw',
813 826 revision='tip', conditions={'function': check_repo},
814 827 requirements=URL_NAME_REQUIREMENTS)
815 828
816 829 # new URLs
817 830 rmap.connect('changeset_raw_home',
818 831 '/{repo_name}/changeset-diff/{revision}',
819 832 controller='changeset', action='changeset_raw',
820 833 revision='tip', conditions={'function': check_repo},
821 834 requirements=URL_NAME_REQUIREMENTS)
822 835
823 836 rmap.connect('changeset_patch_home',
824 837 '/{repo_name}/changeset-patch/{revision}',
825 838 controller='changeset', action='changeset_patch',
826 839 revision='tip', conditions={'function': check_repo},
827 840 requirements=URL_NAME_REQUIREMENTS)
828 841
829 842 rmap.connect('changeset_download_home',
830 843 '/{repo_name}/changeset-download/{revision}',
831 844 controller='changeset', action='changeset_download',
832 845 revision='tip', conditions={'function': check_repo},
833 846 requirements=URL_NAME_REQUIREMENTS)
834 847
835 848 rmap.connect('changeset_comment',
836 849 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
837 850 controller='changeset', revision='tip', action='comment',
838 851 conditions={'function': check_repo},
839 852 requirements=URL_NAME_REQUIREMENTS)
840 853
841 854 rmap.connect('changeset_comment_preview',
842 855 '/{repo_name}/changeset/comment/preview', jsroute=True,
843 856 controller='changeset', action='preview_comment',
844 857 conditions={'function': check_repo, 'method': ['POST']},
845 858 requirements=URL_NAME_REQUIREMENTS)
846 859
847 860 rmap.connect('changeset_comment_delete',
848 861 '/{repo_name}/changeset/comment/{comment_id}/delete',
849 862 controller='changeset', action='delete_comment',
850 863 conditions={'function': check_repo, 'method': ['DELETE']},
851 864 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
852 865
853 866 rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}',
854 867 controller='changeset', action='changeset_info',
855 868 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
856 869
857 870 rmap.connect('compare_home',
858 871 '/{repo_name}/compare',
859 872 controller='compare', action='index',
860 873 conditions={'function': check_repo},
861 874 requirements=URL_NAME_REQUIREMENTS)
862 875
863 876 rmap.connect('compare_url',
864 877 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
865 878 controller='compare', action='compare',
866 879 conditions={'function': check_repo},
867 880 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
868 881
869 882 rmap.connect('pullrequest_home',
870 883 '/{repo_name}/pull-request/new', controller='pullrequests',
871 884 action='index', conditions={'function': check_repo,
872 885 'method': ['GET']},
873 886 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
874 887
875 888 rmap.connect('pullrequest',
876 889 '/{repo_name}/pull-request/new', controller='pullrequests',
877 890 action='create', conditions={'function': check_repo,
878 891 'method': ['POST']},
879 892 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
880 893
881 894 rmap.connect('pullrequest_repo_refs',
882 895 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
883 896 controller='pullrequests',
884 897 action='get_repo_refs',
885 898 conditions={'function': check_repo, 'method': ['GET']},
886 899 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
887 900
888 901 rmap.connect('pullrequest_repo_destinations',
889 902 '/{repo_name}/pull-request/repo-destinations',
890 903 controller='pullrequests',
891 904 action='get_repo_destinations',
892 905 conditions={'function': check_repo, 'method': ['GET']},
893 906 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
894 907
895 908 rmap.connect('pullrequest_show',
896 909 '/{repo_name}/pull-request/{pull_request_id}',
897 910 controller='pullrequests',
898 911 action='show', conditions={'function': check_repo,
899 912 'method': ['GET']},
900 913 requirements=URL_NAME_REQUIREMENTS)
901 914
902 915 rmap.connect('pullrequest_update',
903 916 '/{repo_name}/pull-request/{pull_request_id}',
904 917 controller='pullrequests',
905 918 action='update', conditions={'function': check_repo,
906 919 'method': ['PUT']},
907 920 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
908 921
909 922 rmap.connect('pullrequest_merge',
910 923 '/{repo_name}/pull-request/{pull_request_id}',
911 924 controller='pullrequests',
912 925 action='merge', conditions={'function': check_repo,
913 926 'method': ['POST']},
914 927 requirements=URL_NAME_REQUIREMENTS)
915 928
916 929 rmap.connect('pullrequest_delete',
917 930 '/{repo_name}/pull-request/{pull_request_id}',
918 931 controller='pullrequests',
919 932 action='delete', conditions={'function': check_repo,
920 933 'method': ['DELETE']},
921 934 requirements=URL_NAME_REQUIREMENTS)
922 935
923 936 rmap.connect('pullrequest_show_all',
924 937 '/{repo_name}/pull-request',
925 938 controller='pullrequests',
926 939 action='show_all', conditions={'function': check_repo,
927 940 'method': ['GET']},
928 941 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
929 942
930 943 rmap.connect('pullrequest_comment',
931 944 '/{repo_name}/pull-request-comment/{pull_request_id}',
932 945 controller='pullrequests',
933 946 action='comment', conditions={'function': check_repo,
934 947 'method': ['POST']},
935 948 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
936 949
937 950 rmap.connect('pullrequest_comment_delete',
938 951 '/{repo_name}/pull-request-comment/{comment_id}/delete',
939 952 controller='pullrequests', action='delete_comment',
940 953 conditions={'function': check_repo, 'method': ['DELETE']},
941 954 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
942 955
943 956 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
944 957 controller='summary', conditions={'function': check_repo},
945 958 requirements=URL_NAME_REQUIREMENTS)
946 959
947 960 rmap.connect('branches_home', '/{repo_name}/branches',
948 961 controller='branches', conditions={'function': check_repo},
949 962 requirements=URL_NAME_REQUIREMENTS)
950 963
951 964 rmap.connect('tags_home', '/{repo_name}/tags',
952 965 controller='tags', conditions={'function': check_repo},
953 966 requirements=URL_NAME_REQUIREMENTS)
954 967
955 968 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
956 969 controller='bookmarks', conditions={'function': check_repo},
957 970 requirements=URL_NAME_REQUIREMENTS)
958 971
959 972 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
960 973 controller='changelog', conditions={'function': check_repo},
961 974 requirements=URL_NAME_REQUIREMENTS)
962 975
963 976 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
964 977 controller='changelog', action='changelog_summary',
965 978 conditions={'function': check_repo},
966 979 requirements=URL_NAME_REQUIREMENTS)
967 980
968 981 rmap.connect('changelog_file_home',
969 982 '/{repo_name}/changelog/{revision}/{f_path}',
970 983 controller='changelog', f_path=None,
971 984 conditions={'function': check_repo},
972 985 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
973 986
974 987 rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}',
975 988 controller='changelog', action='changelog_details',
976 989 conditions={'function': check_repo},
977 990 requirements=URL_NAME_REQUIREMENTS)
978 991
979 992 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
980 993 controller='files', revision='tip', f_path='',
981 994 conditions={'function': check_repo},
982 995 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
983 996
984 997 rmap.connect('files_home_simple_catchrev',
985 998 '/{repo_name}/files/{revision}',
986 999 controller='files', revision='tip', f_path='',
987 1000 conditions={'function': check_repo},
988 1001 requirements=URL_NAME_REQUIREMENTS)
989 1002
990 1003 rmap.connect('files_home_simple_catchall',
991 1004 '/{repo_name}/files',
992 1005 controller='files', revision='tip', f_path='',
993 1006 conditions={'function': check_repo},
994 1007 requirements=URL_NAME_REQUIREMENTS)
995 1008
996 1009 rmap.connect('files_history_home',
997 1010 '/{repo_name}/history/{revision}/{f_path}',
998 1011 controller='files', action='history', revision='tip', f_path='',
999 1012 conditions={'function': check_repo},
1000 1013 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1001 1014
1002 1015 rmap.connect('files_authors_home',
1003 1016 '/{repo_name}/authors/{revision}/{f_path}',
1004 1017 controller='files', action='authors', revision='tip', f_path='',
1005 1018 conditions={'function': check_repo},
1006 1019 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1007 1020
1008 1021 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1009 1022 controller='files', action='diff', f_path='',
1010 1023 conditions={'function': check_repo},
1011 1024 requirements=URL_NAME_REQUIREMENTS)
1012 1025
1013 1026 rmap.connect('files_diff_2way_home',
1014 1027 '/{repo_name}/diff-2way/{f_path}',
1015 1028 controller='files', action='diff_2way', f_path='',
1016 1029 conditions={'function': check_repo},
1017 1030 requirements=URL_NAME_REQUIREMENTS)
1018 1031
1019 1032 rmap.connect('files_rawfile_home',
1020 1033 '/{repo_name}/rawfile/{revision}/{f_path}',
1021 1034 controller='files', action='rawfile', revision='tip',
1022 1035 f_path='', conditions={'function': check_repo},
1023 1036 requirements=URL_NAME_REQUIREMENTS)
1024 1037
1025 1038 rmap.connect('files_raw_home',
1026 1039 '/{repo_name}/raw/{revision}/{f_path}',
1027 1040 controller='files', action='raw', revision='tip', f_path='',
1028 1041 conditions={'function': check_repo},
1029 1042 requirements=URL_NAME_REQUIREMENTS)
1030 1043
1031 1044 rmap.connect('files_render_home',
1032 1045 '/{repo_name}/render/{revision}/{f_path}',
1033 1046 controller='files', action='index', revision='tip', f_path='',
1034 1047 rendered=True, conditions={'function': check_repo},
1035 1048 requirements=URL_NAME_REQUIREMENTS)
1036 1049
1037 1050 rmap.connect('files_annotate_home',
1038 1051 '/{repo_name}/annotate/{revision}/{f_path}',
1039 1052 controller='files', action='index', revision='tip',
1040 1053 f_path='', annotate=True, conditions={'function': check_repo},
1041 1054 requirements=URL_NAME_REQUIREMENTS)
1042 1055
1043 1056 rmap.connect('files_edit',
1044 1057 '/{repo_name}/edit/{revision}/{f_path}',
1045 1058 controller='files', action='edit', revision='tip',
1046 1059 f_path='',
1047 1060 conditions={'function': check_repo, 'method': ['POST']},
1048 1061 requirements=URL_NAME_REQUIREMENTS)
1049 1062
1050 1063 rmap.connect('files_edit_home',
1051 1064 '/{repo_name}/edit/{revision}/{f_path}',
1052 1065 controller='files', action='edit_home', revision='tip',
1053 1066 f_path='', conditions={'function': check_repo},
1054 1067 requirements=URL_NAME_REQUIREMENTS)
1055 1068
1056 1069 rmap.connect('files_add',
1057 1070 '/{repo_name}/add/{revision}/{f_path}',
1058 1071 controller='files', action='add', revision='tip',
1059 1072 f_path='',
1060 1073 conditions={'function': check_repo, 'method': ['POST']},
1061 1074 requirements=URL_NAME_REQUIREMENTS)
1062 1075
1063 1076 rmap.connect('files_add_home',
1064 1077 '/{repo_name}/add/{revision}/{f_path}',
1065 1078 controller='files', action='add_home', revision='tip',
1066 1079 f_path='', conditions={'function': check_repo},
1067 1080 requirements=URL_NAME_REQUIREMENTS)
1068 1081
1069 1082 rmap.connect('files_delete',
1070 1083 '/{repo_name}/delete/{revision}/{f_path}',
1071 1084 controller='files', action='delete', revision='tip',
1072 1085 f_path='',
1073 1086 conditions={'function': check_repo, 'method': ['POST']},
1074 1087 requirements=URL_NAME_REQUIREMENTS)
1075 1088
1076 1089 rmap.connect('files_delete_home',
1077 1090 '/{repo_name}/delete/{revision}/{f_path}',
1078 1091 controller='files', action='delete_home', revision='tip',
1079 1092 f_path='', conditions={'function': check_repo},
1080 1093 requirements=URL_NAME_REQUIREMENTS)
1081 1094
1082 1095 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1083 1096 controller='files', action='archivefile',
1084 1097 conditions={'function': check_repo},
1085 1098 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1086 1099
1087 1100 rmap.connect('files_nodelist_home',
1088 1101 '/{repo_name}/nodelist/{revision}/{f_path}',
1089 1102 controller='files', action='nodelist',
1090 1103 conditions={'function': check_repo},
1091 1104 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1092 1105
1093 1106 rmap.connect('files_metadata_list_home',
1094 1107 '/{repo_name}/metadata_list/{revision}/{f_path}',
1095 1108 controller='files', action='metadata_list',
1096 1109 conditions={'function': check_repo},
1097 1110 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1098 1111
1099 1112 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1100 1113 controller='forks', action='fork_create',
1101 1114 conditions={'function': check_repo, 'method': ['POST']},
1102 1115 requirements=URL_NAME_REQUIREMENTS)
1103 1116
1104 1117 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1105 1118 controller='forks', action='fork',
1106 1119 conditions={'function': check_repo},
1107 1120 requirements=URL_NAME_REQUIREMENTS)
1108 1121
1109 1122 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1110 1123 controller='forks', action='forks',
1111 1124 conditions={'function': check_repo},
1112 1125 requirements=URL_NAME_REQUIREMENTS)
1113 1126
1114 1127 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1115 1128 controller='followers', action='followers',
1116 1129 conditions={'function': check_repo},
1117 1130 requirements=URL_NAME_REQUIREMENTS)
1118 1131
1119 1132 # must be here for proper group/repo catching pattern
1120 1133 _connect_with_slash(
1121 1134 rmap, 'repo_group_home', '/{group_name}',
1122 1135 controller='home', action='index_repo_group',
1123 1136 conditions={'function': check_group},
1124 1137 requirements=URL_NAME_REQUIREMENTS)
1125 1138
1126 1139 # catch all, at the end
1127 1140 _connect_with_slash(
1128 1141 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1129 1142 controller='summary', action='index',
1130 1143 conditions={'function': check_repo},
1131 1144 requirements=URL_NAME_REQUIREMENTS)
1132 1145
1133 1146 return rmap
1134 1147
1135 1148
1136 1149 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1137 1150 """
1138 1151 Connect a route with an optional trailing slash in `path`.
1139 1152 """
1140 1153 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1141 1154 mapper.connect(name, path, *args, **kwargs)
@@ -1,57 +1,66 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from pyramid.threadlocal import get_current_registry
20 20
21 21
22 def trigger(event):
22 def trigger(event, registry=None):
23 23 """
24 24 Helper method to send an event. This wraps the pyramid logic to send an
25 25 event.
26 26 """
27 27 # For the first step we are using pyramids thread locals here. If the
28 28 # event mechanism works out as a good solution we should think about
29 29 # passing the registry as an argument to get rid of it.
30 registry = get_current_registry()
30 registry = registry or get_current_registry()
31 31 registry.notify(event)
32 32
33 # Until we can work around the problem that VCS operations do not have a
34 # pyramid context to work with, we send the events to integrations directly
35
36 # Later it will be possible to use regular pyramid subscribers ie:
37 # config.add_subscriber(integrations_event_handler, RhodecodeEvent)
38 from rhodecode.integrations import integrations_event_handler
39 if isinstance(event, RhodecodeEvent):
40 integrations_event_handler(event)
41
33 42
34 43 from rhodecode.events.base import RhodecodeEvent
35 44
36 45 from rhodecode.events.user import (
37 46 UserPreCreate,
38 47 UserPreUpdate,
39 48 UserRegistered
40 49 )
41 50
42 51 from rhodecode.events.repo import (
43 52 RepoEvent,
44 RepoPreCreateEvent, RepoCreatedEvent,
45 RepoPreDeleteEvent, RepoDeletedEvent,
53 RepoPreCreateEvent, RepoCreateEvent,
54 RepoPreDeleteEvent, RepoDeleteEvent,
46 55 RepoPrePushEvent, RepoPushEvent,
47 56 RepoPrePullEvent, RepoPullEvent,
48 57 )
49 58
50 59 from rhodecode.events.pullrequest import (
51 60 PullRequestEvent,
52 61 PullRequestCreateEvent,
53 62 PullRequestUpdateEvent,
54 63 PullRequestReviewEvent,
55 64 PullRequestMergeEvent,
56 65 PullRequestCloseEvent,
57 ) No newline at end of file
66 )
@@ -1,71 +1,59 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from datetime import datetime
20 from marshmallow import Schema, fields
21 20 from pyramid.threadlocal import get_current_request
22 21 from rhodecode.lib.utils2 import AttributeDict
23 22
24 23
25 24 # this is a user object to be used for events caused by the system (eg. shell)
26 25 SYSTEM_USER = AttributeDict(dict(
27 26 username='__SYSTEM__'
28 27 ))
29 28
30 29
31 class UserSchema(Schema):
32 """
33 Marshmallow schema for a user
34 """
35 username = fields.Str()
36
37
38 class RhodecodeEventSchema(Schema):
39 """
40 Marshmallow schema for a rhodecode event
41 """
42 utc_timestamp = fields.DateTime()
43 actor = fields.Nested(UserSchema)
44 actor_ip = fields.Str()
45 name = fields.Str()
46
47
48 30 class RhodecodeEvent(object):
49 31 """
50 32 Base event class for all Rhodecode events
51 33 """
52 MarshmallowSchema = RhodecodeEventSchema
53
54 34 def __init__(self):
55 35 self.request = get_current_request()
56 36 self.utc_timestamp = datetime.utcnow()
57 37
58 38 @property
59 39 def actor(self):
60 40 if self.request:
61 41 return self.request.user.get_instance()
62 42 return SYSTEM_USER
63 43
64 44 @property
65 45 def actor_ip(self):
66 46 if self.request:
67 47 return self.request.user.ip_addr
68 48 return '<no ip available>'
69 49
70 50 def as_dict(self):
71 return self.MarshmallowSchema().dump(self).data
51 data = {
52 'name': self.name,
53 'utc_timestamp': self.utc_timestamp,
54 'actor_ip': self.actor_ip,
55 'actor': {
56 'username': self.actor.username
57 }
58 }
59 return data No newline at end of file
@@ -1,97 +1,97 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
20 19
20 from rhodecode.translation import lazy_ugettext
21 21 from rhodecode.events.repo import RepoEvent
22 22
23 23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
45 24 class PullRequestEvent(RepoEvent):
46 25 """
47 26 Base class for pull request events.
48 27
49 28 :param pullrequest: a :class:`PullRequest` instance
50 29 """
51 MarshmallowSchema = PullRequestEventSchema
52 30
53 31 def __init__(self, pullrequest):
54 32 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
55 33 self.pullrequest = pullrequest
56 34
35 def as_dict(self):
36 from rhodecode.model.pull_request import PullRequestModel
37 data = super(PullRequestEvent, self).as_dict()
38
39 commits = self._commits_as_dict(self.pullrequest.revisions)
40 issues = self._issues_as_dict(commits)
41
42 data.update({
43 'pullrequest': {
44 'title': self.pullrequest.title,
45 'issues': issues,
46 'pull_request_id': self.pullrequest.pull_request_id,
47 'url': PullRequestModel().get_url(self.pullrequest)
48 }
49 })
50 return data
51
57 52
58 53 class PullRequestCreateEvent(PullRequestEvent):
59 54 """
60 55 An instance of this class is emitted as an :term:`event` after a pull
61 56 request is created.
62 57 """
63 58 name = 'pullrequest-create'
59 display_name = lazy_ugettext('pullrequest created')
64 60
65 61
66 62 class PullRequestCloseEvent(PullRequestEvent):
67 63 """
68 64 An instance of this class is emitted as an :term:`event` after a pull
69 65 request is closed.
70 66 """
71 67 name = 'pullrequest-close'
68 display_name = lazy_ugettext('pullrequest closed')
72 69
73 70
74 71 class PullRequestUpdateEvent(PullRequestEvent):
75 72 """
76 73 An instance of this class is emitted as an :term:`event` after a pull
77 74 request is updated.
78 75 """
79 76 name = 'pullrequest-update'
77 display_name = lazy_ugettext('pullrequest updated')
80 78
81 79
82 80 class PullRequestMergeEvent(PullRequestEvent):
83 81 """
84 82 An instance of this class is emitted as an :term:`event` after a pull
85 83 request is merged.
86 84 """
87 85 name = 'pullrequest-merge'
86 display_name = lazy_ugettext('pullrequest merged')
88 87
89 88
90 89 class PullRequestReviewEvent(PullRequestEvent):
91 90 """
92 91 An instance of this class is emitted as an :term:`event` after a pull
93 92 request is reviewed.
94 93 """
95 94 name = 'pullrequest-review'
95 display_name = lazy_ugettext('pullrequest reviewed')
96 96
97 97
@@ -1,149 +1,219 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 from marshmallow import Schema, fields
19 import logging
20 20
21 from rhodecode.translation import lazy_ugettext
21 22 from rhodecode.model.db import User, Repository, Session
22 23 from rhodecode.events.base import RhodecodeEvent
23 24
24
25 def get_repo_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_repo_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repo = fields.Nested(RepositorySchema)
25 log = logging.getLogger()
44 26
45 27
46 28 class RepoEvent(RhodecodeEvent):
47 29 """
48 30 Base class for events acting on a repository.
49 31
50 32 :param repo: a :class:`Repository` instance
51 33 """
52 MarshmallowSchema = RepoEventSchema
53 34
54 35 def __init__(self, repo):
55 36 super(RepoEvent, self).__init__()
56 37 self.repo = repo
57 38
39 def as_dict(self):
40 from rhodecode.model.repo import RepoModel
41 data = super(RepoEvent, self).as_dict()
42 data.update({
43 'repo': {
44 'repo_id': self.repo.repo_id,
45 'repo_name': self.repo.repo_name,
46 'url': RepoModel().get_url(self.repo)
47 }
48 })
49 return data
50
51 def _commits_as_dict(self, commit_ids):
52 """ Helper function to serialize commit_ids """
53
54 from rhodecode.lib.utils2 import extract_mentioned_users
55 from rhodecode.model.db import Repository
56 from rhodecode.lib import helpers as h
57 from rhodecode.lib.helpers import process_patterns
58 from rhodecode.lib.helpers import urlify_commit_message
59 if not commit_ids:
60 return []
61 commits = []
62 reviewers = []
63 vcs_repo = self.repo.scm_instance(cache=False)
64 try:
65 for commit_id in commit_ids:
66 cs = vcs_repo.get_changeset(commit_id)
67 cs_data = cs.__json__()
68 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
69 cs_data['reviewers'] = reviewers
70 cs_data['url'] = h.url('changeset_home',
71 repo_name=self.repo.repo_name,
72 revision=cs_data['raw_id'],
73 qualified=True
74 )
75 urlified_message, issues_data = process_patterns(
76 cs_data['message'], self.repo.repo_name)
77 cs_data['issues'] = issues_data
78 cs_data['message_html'] = urlify_commit_message(cs_data['message'],
79 self.repo.repo_name)
80 commits.append(cs_data)
81 except Exception as e:
82 log.exception(e)
83 # we don't send any commits when crash happens, only full list matters
84 # we short circuit then.
85 return []
86 return commits
87
88 def _issues_as_dict(self, commits):
89 """ Helper function to serialize issues from commits """
90 issues = {}
91 for commit in commits:
92 for issue in commit['issues']:
93 issues[issue['id']] = issue
94 return issues
95
58 96
59 97 class RepoPreCreateEvent(RepoEvent):
60 98 """
61 99 An instance of this class is emitted as an :term:`event` before a repo is
62 100 created.
63 101 """
64 102 name = 'repo-pre-create'
103 display_name = lazy_ugettext('repository pre create')
65 104
66 105
67 class RepoCreatedEvent(RepoEvent):
106 class RepoCreateEvent(RepoEvent):
68 107 """
69 108 An instance of this class is emitted as an :term:`event` whenever a repo is
70 109 created.
71 110 """
72 name = 'repo-created'
111 name = 'repo-create'
112 display_name = lazy_ugettext('repository created')
73 113
74 114
75 115 class RepoPreDeleteEvent(RepoEvent):
76 116 """
77 117 An instance of this class is emitted as an :term:`event` whenever a repo is
78 118 created.
79 119 """
80 120 name = 'repo-pre-delete'
121 display_name = lazy_ugettext('repository pre delete')
81 122
82 123
83 class RepoDeletedEvent(RepoEvent):
124 class RepoDeleteEvent(RepoEvent):
84 125 """
85 126 An instance of this class is emitted as an :term:`event` whenever a repo is
86 127 created.
87 128 """
88 name = 'repo-deleted'
129 name = 'repo-delete'
130 display_name = lazy_ugettext('repository deleted')
89 131
90 132
91 133 class RepoVCSEvent(RepoEvent):
92 134 """
93 135 Base class for events triggered by the VCS
94 136 """
95 137 def __init__(self, repo_name, extras):
96 138 self.repo = Repository.get_by_repo_name(repo_name)
97 139 if not self.repo:
98 140 raise Exception('repo by this name %s does not exist' % repo_name)
99 141 self.extras = extras
100 142 super(RepoVCSEvent, self).__init__(self.repo)
101 143
102 144 @property
103 145 def actor(self):
104 146 if self.extras.get('username'):
105 147 return User.get_by_username(self.extras['username'])
106 148
107 149 @property
108 150 def actor_ip(self):
109 151 if self.extras.get('ip'):
110 152 return self.extras['ip']
111 153
112 154
113 155 class RepoPrePullEvent(RepoVCSEvent):
114 156 """
115 157 An instance of this class is emitted as an :term:`event` before commits
116 158 are pulled from a repo.
117 159 """
118 160 name = 'repo-pre-pull'
161 display_name = lazy_ugettext('repository pre pull')
119 162
120 163
121 164 class RepoPullEvent(RepoVCSEvent):
122 165 """
123 166 An instance of this class is emitted as an :term:`event` after commits
124 167 are pulled from a repo.
125 168 """
126 169 name = 'repo-pull'
170 display_name = lazy_ugettext('repository pull')
127 171
128 172
129 173 class RepoPrePushEvent(RepoVCSEvent):
130 174 """
131 175 An instance of this class is emitted as an :term:`event` before commits
132 176 are pushed to a repo.
133 177 """
134 178 name = 'repo-pre-push'
179 display_name = lazy_ugettext('repository pre push')
135 180
136 181
137 182 class RepoPushEvent(RepoVCSEvent):
138 183 """
139 184 An instance of this class is emitted as an :term:`event` after commits
140 185 are pushed to a repo.
141 186
142 187 :param extras: (optional) dict of data from proxied VCS actions
143 188 """
144 189 name = 'repo-push'
190 display_name = lazy_ugettext('repository push')
145 191
146 192 def __init__(self, repo_name, pushed_commit_ids, extras):
147 193 super(RepoPushEvent, self).__init__(repo_name, extras)
148 194 self.pushed_commit_ids = pushed_commit_ids
149 195
196 def as_dict(self):
197 data = super(RepoPushEvent, self).as_dict()
198 branch_url = repo_url = data['repo']['url']
199
200 commits = self._commits_as_dict(self.pushed_commit_ids)
201 issues = self._issues_as_dict(commits)
202
203 branches = set(
204 commit['branch'] for commit in commits if commit['branch'])
205 branches = [
206 {
207 'name': branch,
208 'url': '{}/changelog?branch={}'.format(
209 data['repo']['url'], branch)
210 }
211 for branch in branches
212 ]
213
214 data['push'] = {
215 'commits': commits,
216 'issues': issues,
217 'branches': branches,
218 }
219 return data No newline at end of file
@@ -1,55 +1,65 b''
1 1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from zope.interface import implementer
20 20
21 from rhodecode.translation import lazy_ugettext
21 22 from rhodecode.events.base import RhodecodeEvent
22 23 from rhodecode.events.interfaces import (
23 24 IUserRegistered, IUserPreCreate, IUserPreUpdate)
24 25
25 26
26 27 @implementer(IUserRegistered)
27 28 class UserRegistered(RhodecodeEvent):
28 29 """
29 30 An instance of this class is emitted as an :term:`event` whenever a user
30 31 account is registered.
31 32 """
33 name = 'user-register'
34 display_name = lazy_ugettext('user registered')
35
32 36 def __init__(self, user, session):
33 37 self.user = user
34 38 self.session = session
35 39
36 40
37 41 @implementer(IUserPreCreate)
38 42 class UserPreCreate(RhodecodeEvent):
39 43 """
40 44 An instance of this class is emitted as an :term:`event` before a new user
41 45 object is created.
42 46 """
47 name = 'user-pre-create'
48 display_name = lazy_ugettext('user pre create')
49
43 50 def __init__(self, user_data):
44 51 self.user_data = user_data
45 52
46 53
47 54 @implementer(IUserPreUpdate)
48 55 class UserPreUpdate(RhodecodeEvent):
49 56 """
50 57 An instance of this class is emitted as an :term:`event` before a user
51 58 object is updated.
52 59 """
60 name = 'user-pre-update'
61 display_name = lazy_ugettext('user pre update')
62
53 63 def __init__(self, user, user_data):
54 64 self.user = user
55 65 self.user_data = user_data
@@ -1,1900 +1,1931 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 import pygments
40 40
41 41 from datetime import datetime
42 42 from functools import partial
43 43 from pygments.formatters.html import HtmlFormatter
44 44 from pygments import highlight as code_highlight
45 45 from pygments.lexers import (
46 46 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
47 from pylons import url
47 from pylons import url as pylons_url
48 48 from pylons.i18n.translation import _, ungettext
49 49 from pyramid.threadlocal import get_current_request
50 50
51 51 from webhelpers.html import literal, HTML, escape
52 52 from webhelpers.html.tools import *
53 53 from webhelpers.html.builder import make_tag
54 54 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
55 55 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
56 56 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
57 57 submit, text, password, textarea, title, ul, xml_declaration, radio
58 58 from webhelpers.html.tools import auto_link, button_to, highlight, \
59 59 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
60 60 from webhelpers.pylonslib import Flash as _Flash
61 61 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
62 62 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
63 63 replace_whitespace, urlify, truncate, wrap_paragraphs
64 64 from webhelpers.date import time_ago_in_words
65 65 from webhelpers.paginate import Page as _Page
66 66 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
67 67 convert_boolean_attrs, NotGiven, _make_safe_id_component
68 68 from webhelpers2.number import format_byte_size
69 69
70 70 from rhodecode.lib.annotate import annotate_highlight
71 71 from rhodecode.lib.action_parser import action_parser
72 72 from rhodecode.lib.ext_json import json
73 73 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
74 74 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
75 75 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
76 76 AttributeDict, safe_int, md5, md5_safe
77 77 from rhodecode.lib.markup_renderer import MarkupRenderer
78 78 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
79 79 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
80 80 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
81 81 from rhodecode.model.changeset_status import ChangesetStatusModel
82 82 from rhodecode.model.db import Permission, User, Repository
83 83 from rhodecode.model.repo_group import RepoGroupModel
84 84 from rhodecode.model.settings import IssueTrackerSettingsModel
85 85
86 86 log = logging.getLogger(__name__)
87 87
88 88 DEFAULT_USER = User.DEFAULT_USER
89 89 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
90 90
91 def url(*args, **kw):
92 return pylons_url(*args, **kw)
93
94 def pylons_url_current(*args, **kw):
95 """
96 This function overrides pylons.url.current() which returns the current
97 path so that it will also work from a pyramid only context. This
98 should be removed once port to pyramid is complete.
99 """
100 if not args and not kw:
101 request = get_current_request()
102 return request.path
103 return pylons_url.current(*args, **kw)
104
105 url.current = pylons_url_current
106
91 107
92 108 def html_escape(text, html_escape_table=None):
93 109 """Produce entities within text."""
94 110 if not html_escape_table:
95 111 html_escape_table = {
96 112 "&": "&amp;",
97 113 '"': "&quot;",
98 114 "'": "&apos;",
99 115 ">": "&gt;",
100 116 "<": "&lt;",
101 117 }
102 118 return "".join(html_escape_table.get(c, c) for c in text)
103 119
104 120
105 121 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
106 122 """
107 123 Truncate string ``s`` at the first occurrence of ``sub``.
108 124
109 125 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
110 126 """
111 127 suffix_if_chopped = suffix_if_chopped or ''
112 128 pos = s.find(sub)
113 129 if pos == -1:
114 130 return s
115 131
116 132 if inclusive:
117 133 pos += len(sub)
118 134
119 135 chopped = s[:pos]
120 136 left = s[pos:].strip()
121 137
122 138 if left and suffix_if_chopped:
123 139 chopped += suffix_if_chopped
124 140
125 141 return chopped
126 142
127 143
128 144 def shorter(text, size=20):
129 145 postfix = '...'
130 146 if len(text) > size:
131 147 return text[:size - len(postfix)] + postfix
132 148 return text
133 149
134 150
135 151 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
136 152 """
137 153 Reset button
138 154 """
139 155 _set_input_attrs(attrs, type, name, value)
140 156 _set_id_attr(attrs, id, name)
141 157 convert_boolean_attrs(attrs, ["disabled"])
142 158 return HTML.input(**attrs)
143 159
144 160 reset = _reset
145 161 safeid = _make_safe_id_component
146 162
147 163
148 164 def branding(name, length=40):
149 165 return truncate(name, length, indicator="")
150 166
151 167
152 168 def FID(raw_id, path):
153 169 """
154 170 Creates a unique ID for filenode based on it's hash of path and commit
155 171 it's safe to use in urls
156 172
157 173 :param raw_id:
158 174 :param path:
159 175 """
160 176
161 177 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
162 178
163 179
164 180 class _GetError(object):
165 181 """Get error from form_errors, and represent it as span wrapped error
166 182 message
167 183
168 184 :param field_name: field to fetch errors for
169 185 :param form_errors: form errors dict
170 186 """
171 187
172 188 def __call__(self, field_name, form_errors):
173 189 tmpl = """<span class="error_msg">%s</span>"""
174 190 if form_errors and field_name in form_errors:
175 191 return literal(tmpl % form_errors.get(field_name))
176 192
177 193 get_error = _GetError()
178 194
179 195
180 196 class _ToolTip(object):
181 197
182 198 def __call__(self, tooltip_title, trim_at=50):
183 199 """
184 200 Special function just to wrap our text into nice formatted
185 201 autowrapped text
186 202
187 203 :param tooltip_title:
188 204 """
189 205 tooltip_title = escape(tooltip_title)
190 206 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
191 207 return tooltip_title
192 208 tooltip = _ToolTip()
193 209
194 210
195 211 def files_breadcrumbs(repo_name, commit_id, file_path):
196 212 if isinstance(file_path, str):
197 213 file_path = safe_unicode(file_path)
198 214
199 215 # TODO: johbo: Is this always a url like path, or is this operating
200 216 # system dependent?
201 217 path_segments = file_path.split('/')
202 218
203 219 repo_name_html = escape(repo_name)
204 220 if len(path_segments) == 1 and path_segments[0] == '':
205 221 url_segments = [repo_name_html]
206 222 else:
207 223 url_segments = [
208 224 link_to(
209 225 repo_name_html,
210 226 url('files_home',
211 227 repo_name=repo_name,
212 228 revision=commit_id,
213 229 f_path=''),
214 230 class_='pjax-link')]
215 231
216 232 last_cnt = len(path_segments) - 1
217 233 for cnt, segment in enumerate(path_segments):
218 234 if not segment:
219 235 continue
220 236 segment_html = escape(segment)
221 237
222 238 if cnt != last_cnt:
223 239 url_segments.append(
224 240 link_to(
225 241 segment_html,
226 242 url('files_home',
227 243 repo_name=repo_name,
228 244 revision=commit_id,
229 245 f_path='/'.join(path_segments[:cnt + 1])),
230 246 class_='pjax-link'))
231 247 else:
232 248 url_segments.append(segment_html)
233 249
234 250 return literal('/'.join(url_segments))
235 251
236 252
237 253 class CodeHtmlFormatter(HtmlFormatter):
238 254 """
239 255 My code Html Formatter for source codes
240 256 """
241 257
242 258 def wrap(self, source, outfile):
243 259 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
244 260
245 261 def _wrap_code(self, source):
246 262 for cnt, it in enumerate(source):
247 263 i, t = it
248 264 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
249 265 yield i, t
250 266
251 267 def _wrap_tablelinenos(self, inner):
252 268 dummyoutfile = StringIO.StringIO()
253 269 lncount = 0
254 270 for t, line in inner:
255 271 if t:
256 272 lncount += 1
257 273 dummyoutfile.write(line)
258 274
259 275 fl = self.linenostart
260 276 mw = len(str(lncount + fl - 1))
261 277 sp = self.linenospecial
262 278 st = self.linenostep
263 279 la = self.lineanchors
264 280 aln = self.anchorlinenos
265 281 nocls = self.noclasses
266 282 if sp:
267 283 lines = []
268 284
269 285 for i in range(fl, fl + lncount):
270 286 if i % st == 0:
271 287 if i % sp == 0:
272 288 if aln:
273 289 lines.append('<a href="#%s%d" class="special">%*d</a>' %
274 290 (la, i, mw, i))
275 291 else:
276 292 lines.append('<span class="special">%*d</span>' % (mw, i))
277 293 else:
278 294 if aln:
279 295 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
280 296 else:
281 297 lines.append('%*d' % (mw, i))
282 298 else:
283 299 lines.append('')
284 300 ls = '\n'.join(lines)
285 301 else:
286 302 lines = []
287 303 for i in range(fl, fl + lncount):
288 304 if i % st == 0:
289 305 if aln:
290 306 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
291 307 else:
292 308 lines.append('%*d' % (mw, i))
293 309 else:
294 310 lines.append('')
295 311 ls = '\n'.join(lines)
296 312
297 313 # in case you wonder about the seemingly redundant <div> here: since the
298 314 # content in the other cell also is wrapped in a div, some browsers in
299 315 # some configurations seem to mess up the formatting...
300 316 if nocls:
301 317 yield 0, ('<table class="%stable">' % self.cssclass +
302 318 '<tr><td><div class="linenodiv" '
303 319 'style="background-color: #f0f0f0; padding-right: 10px">'
304 320 '<pre style="line-height: 125%">' +
305 321 ls + '</pre></div></td><td id="hlcode" class="code">')
306 322 else:
307 323 yield 0, ('<table class="%stable">' % self.cssclass +
308 324 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
309 325 ls + '</pre></div></td><td id="hlcode" class="code">')
310 326 yield 0, dummyoutfile.getvalue()
311 327 yield 0, '</td></tr></table>'
312 328
313 329
314 330 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
315 331 def __init__(self, **kw):
316 332 # only show these line numbers if set
317 333 self.only_lines = kw.pop('only_line_numbers', [])
318 334 self.query_terms = kw.pop('query_terms', [])
319 335 self.max_lines = kw.pop('max_lines', 5)
320 336 self.line_context = kw.pop('line_context', 3)
321 337 self.url = kw.pop('url', None)
322 338
323 339 super(CodeHtmlFormatter, self).__init__(**kw)
324 340
325 341 def _wrap_code(self, source):
326 342 for cnt, it in enumerate(source):
327 343 i, t = it
328 344 t = '<pre>%s</pre>' % t
329 345 yield i, t
330 346
331 347 def _wrap_tablelinenos(self, inner):
332 348 yield 0, '<table class="code-highlight %stable">' % self.cssclass
333 349
334 350 last_shown_line_number = 0
335 351 current_line_number = 1
336 352
337 353 for t, line in inner:
338 354 if not t:
339 355 yield t, line
340 356 continue
341 357
342 358 if current_line_number in self.only_lines:
343 359 if last_shown_line_number + 1 != current_line_number:
344 360 yield 0, '<tr>'
345 361 yield 0, '<td class="line">...</td>'
346 362 yield 0, '<td id="hlcode" class="code"></td>'
347 363 yield 0, '</tr>'
348 364
349 365 yield 0, '<tr>'
350 366 if self.url:
351 367 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
352 368 self.url, current_line_number, current_line_number)
353 369 else:
354 370 yield 0, '<td class="line"><a href="">%i</a></td>' % (
355 371 current_line_number)
356 372 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
357 373 yield 0, '</tr>'
358 374
359 375 last_shown_line_number = current_line_number
360 376
361 377 current_line_number += 1
362 378
363 379
364 380 yield 0, '</table>'
365 381
366 382
367 383 def extract_phrases(text_query):
368 384 """
369 385 Extracts phrases from search term string making sure phrases
370 386 contained in double quotes are kept together - and discarding empty values
371 387 or fully whitespace values eg.
372 388
373 389 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
374 390
375 391 """
376 392
377 393 in_phrase = False
378 394 buf = ''
379 395 phrases = []
380 396 for char in text_query:
381 397 if in_phrase:
382 398 if char == '"': # end phrase
383 399 phrases.append(buf)
384 400 buf = ''
385 401 in_phrase = False
386 402 continue
387 403 else:
388 404 buf += char
389 405 continue
390 406 else:
391 407 if char == '"': # start phrase
392 408 in_phrase = True
393 409 phrases.append(buf)
394 410 buf = ''
395 411 continue
396 412 elif char == ' ':
397 413 phrases.append(buf)
398 414 buf = ''
399 415 continue
400 416 else:
401 417 buf += char
402 418
403 419 phrases.append(buf)
404 420 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
405 421 return phrases
406 422
407 423
408 424 def get_matching_offsets(text, phrases):
409 425 """
410 426 Returns a list of string offsets in `text` that the list of `terms` match
411 427
412 428 >>> get_matching_offsets('some text here', ['some', 'here'])
413 429 [(0, 4), (10, 14)]
414 430
415 431 """
416 432 offsets = []
417 433 for phrase in phrases:
418 434 for match in re.finditer(phrase, text):
419 435 offsets.append((match.start(), match.end()))
420 436
421 437 return offsets
422 438
423 439
424 440 def normalize_text_for_matching(x):
425 441 """
426 442 Replaces all non alnum characters to spaces and lower cases the string,
427 443 useful for comparing two text strings without punctuation
428 444 """
429 445 return re.sub(r'[^\w]', ' ', x.lower())
430 446
431 447
432 448 def get_matching_line_offsets(lines, terms):
433 449 """ Return a set of `lines` indices (starting from 1) matching a
434 450 text search query, along with `context` lines above/below matching lines
435 451
436 452 :param lines: list of strings representing lines
437 453 :param terms: search term string to match in lines eg. 'some text'
438 454 :param context: number of lines above/below a matching line to add to result
439 455 :param max_lines: cut off for lines of interest
440 456 eg.
441 457
442 458 text = '''
443 459 words words words
444 460 words words words
445 461 some text some
446 462 words words words
447 463 words words words
448 464 text here what
449 465 '''
450 466 get_matching_line_offsets(text, 'text', context=1)
451 467 {3: [(5, 9)], 6: [(0, 4)]]
452 468
453 469 """
454 470 matching_lines = {}
455 471 phrases = [normalize_text_for_matching(phrase)
456 472 for phrase in extract_phrases(terms)]
457 473
458 474 for line_index, line in enumerate(lines, start=1):
459 475 match_offsets = get_matching_offsets(
460 476 normalize_text_for_matching(line), phrases)
461 477 if match_offsets:
462 478 matching_lines[line_index] = match_offsets
463 479
464 480 return matching_lines
465 481
466 482
467 483 def get_lexer_safe(mimetype=None, filepath=None):
468 484 """
469 485 Tries to return a relevant pygments lexer using mimetype/filepath name,
470 486 defaulting to plain text if none could be found
471 487 """
472 488 lexer = None
473 489 try:
474 490 if mimetype:
475 491 lexer = get_lexer_for_mimetype(mimetype)
476 492 if not lexer:
477 493 lexer = get_lexer_for_filename(filepath)
478 494 except pygments.util.ClassNotFound:
479 495 pass
480 496
481 497 if not lexer:
482 498 lexer = get_lexer_by_name('text')
483 499
484 500 return lexer
485 501
486 502
487 503 def pygmentize(filenode, **kwargs):
488 504 """
489 505 pygmentize function using pygments
490 506
491 507 :param filenode:
492 508 """
493 509 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
494 510 return literal(code_highlight(filenode.content, lexer,
495 511 CodeHtmlFormatter(**kwargs)))
496 512
497 513
498 514 def pygmentize_annotation(repo_name, filenode, **kwargs):
499 515 """
500 516 pygmentize function for annotation
501 517
502 518 :param filenode:
503 519 """
504 520
505 521 color_dict = {}
506 522
507 523 def gen_color(n=10000):
508 524 """generator for getting n of evenly distributed colors using
509 525 hsv color and golden ratio. It always return same order of colors
510 526
511 527 :returns: RGB tuple
512 528 """
513 529
514 530 def hsv_to_rgb(h, s, v):
515 531 if s == 0.0:
516 532 return v, v, v
517 533 i = int(h * 6.0) # XXX assume int() truncates!
518 534 f = (h * 6.0) - i
519 535 p = v * (1.0 - s)
520 536 q = v * (1.0 - s * f)
521 537 t = v * (1.0 - s * (1.0 - f))
522 538 i = i % 6
523 539 if i == 0:
524 540 return v, t, p
525 541 if i == 1:
526 542 return q, v, p
527 543 if i == 2:
528 544 return p, v, t
529 545 if i == 3:
530 546 return p, q, v
531 547 if i == 4:
532 548 return t, p, v
533 549 if i == 5:
534 550 return v, p, q
535 551
536 552 golden_ratio = 0.618033988749895
537 553 h = 0.22717784590367374
538 554
539 555 for _ in xrange(n):
540 556 h += golden_ratio
541 557 h %= 1
542 558 HSV_tuple = [h, 0.95, 0.95]
543 559 RGB_tuple = hsv_to_rgb(*HSV_tuple)
544 560 yield map(lambda x: str(int(x * 256)), RGB_tuple)
545 561
546 562 cgenerator = gen_color()
547 563
548 564 def get_color_string(commit_id):
549 565 if commit_id in color_dict:
550 566 col = color_dict[commit_id]
551 567 else:
552 568 col = color_dict[commit_id] = cgenerator.next()
553 569 return "color: rgb(%s)! important;" % (', '.join(col))
554 570
555 571 def url_func(repo_name):
556 572
557 573 def _url_func(commit):
558 574 author = commit.author
559 575 date = commit.date
560 576 message = tooltip(commit.message)
561 577
562 578 tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
563 579 " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
564 580 "</b> %s<br/></div>")
565 581
566 582 tooltip_html = tooltip_html % (author, date, message)
567 583 lnk_format = '%5s:%s' % ('r%s' % commit.idx, commit.short_id)
568 584 uri = link_to(
569 585 lnk_format,
570 586 url('changeset_home', repo_name=repo_name,
571 587 revision=commit.raw_id),
572 588 style=get_color_string(commit.raw_id),
573 589 class_='tooltip',
574 590 title=tooltip_html
575 591 )
576 592
577 593 uri += '\n'
578 594 return uri
579 595 return _url_func
580 596
581 597 return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs))
582 598
583 599
584 600 def is_following_repo(repo_name, user_id):
585 601 from rhodecode.model.scm import ScmModel
586 602 return ScmModel().is_following_repo(repo_name, user_id)
587 603
588 604
589 605 class _Message(object):
590 606 """A message returned by ``Flash.pop_messages()``.
591 607
592 608 Converting the message to a string returns the message text. Instances
593 609 also have the following attributes:
594 610
595 611 * ``message``: the message text.
596 612 * ``category``: the category specified when the message was created.
597 613 """
598 614
599 615 def __init__(self, category, message):
600 616 self.category = category
601 617 self.message = message
602 618
603 619 def __str__(self):
604 620 return self.message
605 621
606 622 __unicode__ = __str__
607 623
608 624 def __html__(self):
609 625 return escape(safe_unicode(self.message))
610 626
611 627
612 628 class Flash(_Flash):
613 629
614 630 def pop_messages(self):
615 631 """Return all accumulated messages and delete them from the session.
616 632
617 633 The return value is a list of ``Message`` objects.
618 634 """
619 635 from pylons import session
620 636
621 637 messages = []
622 638
623 639 # Pop the 'old' pylons flash messages. They are tuples of the form
624 640 # (category, message)
625 641 for cat, msg in session.pop(self.session_key, []):
626 642 messages.append(_Message(cat, msg))
627 643
628 644 # Pop the 'new' pyramid flash messages for each category as list
629 645 # of strings.
630 646 for cat in self.categories:
631 647 for msg in session.pop_flash(queue=cat):
632 648 messages.append(_Message(cat, msg))
633 649 # Map messages from the default queue to the 'notice' category.
634 650 for msg in session.pop_flash():
635 651 messages.append(_Message('notice', msg))
636 652
637 653 session.save()
638 654 return messages
639 655
640 656 flash = Flash()
641 657
642 658 #==============================================================================
643 659 # SCM FILTERS available via h.
644 660 #==============================================================================
645 661 from rhodecode.lib.vcs.utils import author_name, author_email
646 662 from rhodecode.lib.utils2 import credentials_filter, age as _age
647 663 from rhodecode.model.db import User, ChangesetStatus
648 664
649 665 age = _age
650 666 capitalize = lambda x: x.capitalize()
651 667 email = author_email
652 668 short_id = lambda x: x[:12]
653 669 hide_credentials = lambda x: ''.join(credentials_filter(x))
654 670
655 671
656 672 def age_component(datetime_iso, value=None, time_is_local=False):
657 673 title = value or format_date(datetime_iso)
658 674
659 675 # detect if we have a timezone info, otherwise, add it
660 676 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
661 677 tzinfo = '+00:00'
662 678
663 679 if time_is_local:
664 680 tzinfo = time.strftime("+%H:%M",
665 681 time.gmtime(
666 682 (datetime.now() - datetime.utcnow()).seconds + 1
667 683 )
668 684 )
669 685
670 686 return literal(
671 687 '<time class="timeago tooltip" '
672 688 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
673 689 datetime_iso, title, tzinfo))
674 690
675 691
676 692 def _shorten_commit_id(commit_id):
677 693 from rhodecode import CONFIG
678 694 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
679 695 return commit_id[:def_len]
680 696
681 697
682 698 def show_id(commit):
683 699 """
684 700 Configurable function that shows ID
685 701 by default it's r123:fffeeefffeee
686 702
687 703 :param commit: commit instance
688 704 """
689 705 from rhodecode import CONFIG
690 706 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
691 707
692 708 raw_id = _shorten_commit_id(commit.raw_id)
693 709 if show_idx:
694 710 return 'r%s:%s' % (commit.idx, raw_id)
695 711 else:
696 712 return '%s' % (raw_id, )
697 713
698 714
699 715 def format_date(date):
700 716 """
701 717 use a standardized formatting for dates used in RhodeCode
702 718
703 719 :param date: date/datetime object
704 720 :return: formatted date
705 721 """
706 722
707 723 if date:
708 724 _fmt = "%a, %d %b %Y %H:%M:%S"
709 725 return safe_unicode(date.strftime(_fmt))
710 726
711 727 return u""
712 728
713 729
714 730 class _RepoChecker(object):
715 731
716 732 def __init__(self, backend_alias):
717 733 self._backend_alias = backend_alias
718 734
719 735 def __call__(self, repository):
720 736 if hasattr(repository, 'alias'):
721 737 _type = repository.alias
722 738 elif hasattr(repository, 'repo_type'):
723 739 _type = repository.repo_type
724 740 else:
725 741 _type = repository
726 742 return _type == self._backend_alias
727 743
728 744 is_git = _RepoChecker('git')
729 745 is_hg = _RepoChecker('hg')
730 746 is_svn = _RepoChecker('svn')
731 747
732 748
733 749 def get_repo_type_by_name(repo_name):
734 750 repo = Repository.get_by_repo_name(repo_name)
735 751 return repo.repo_type
736 752
737 753
738 754 def is_svn_without_proxy(repository):
739 755 from rhodecode import CONFIG
740 756 if is_svn(repository):
741 757 if not CONFIG.get('rhodecode_proxy_subversion_http_requests', False):
742 758 return True
743 759 return False
744 760
745 761
746 762 def discover_user(author):
747 763 """
748 764 Tries to discover RhodeCode User based on the autho string. Author string
749 765 is typically `FirstName LastName <email@address.com>`
750 766 """
751 767
752 768 # if author is already an instance use it for extraction
753 769 if isinstance(author, User):
754 770 return author
755 771
756 772 # Valid email in the attribute passed, see if they're in the system
757 773 _email = author_email(author)
758 774 if _email != '':
759 775 user = User.get_by_email(_email, case_insensitive=True, cache=True)
760 776 if user is not None:
761 777 return user
762 778
763 779 # Maybe it's a username, we try to extract it and fetch by username ?
764 780 _author = author_name(author)
765 781 user = User.get_by_username(_author, case_insensitive=True, cache=True)
766 782 if user is not None:
767 783 return user
768 784
769 785 return None
770 786
771 787
772 788 def email_or_none(author):
773 789 # extract email from the commit string
774 790 _email = author_email(author)
775 791
776 792 # If we have an email, use it, otherwise
777 793 # see if it contains a username we can get an email from
778 794 if _email != '':
779 795 return _email
780 796 else:
781 797 user = User.get_by_username(author_name(author), case_insensitive=True,
782 798 cache=True)
783 799
784 800 if user is not None:
785 801 return user.email
786 802
787 803 # No valid email, not a valid user in the system, none!
788 804 return None
789 805
790 806
791 807 def link_to_user(author, length=0, **kwargs):
792 808 user = discover_user(author)
793 809 # user can be None, but if we have it already it means we can re-use it
794 810 # in the person() function, so we save 1 intensive-query
795 811 if user:
796 812 author = user
797 813
798 814 display_person = person(author, 'username_or_name_or_email')
799 815 if length:
800 816 display_person = shorter(display_person, length)
801 817
802 818 if user:
803 819 return link_to(
804 820 escape(display_person),
805 821 url('user_profile', username=user.username),
806 822 **kwargs)
807 823 else:
808 824 return escape(display_person)
809 825
810 826
811 827 def person(author, show_attr="username_and_name"):
812 828 user = discover_user(author)
813 829 if user:
814 830 return getattr(user, show_attr)
815 831 else:
816 832 _author = author_name(author)
817 833 _email = email(author)
818 834 return _author or _email
819 835
820 836
821 837 def author_string(email):
822 838 if email:
823 839 user = User.get_by_email(email, case_insensitive=True, cache=True)
824 840 if user:
825 841 if user.firstname or user.lastname:
826 842 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
827 843 else:
828 844 return email
829 845 else:
830 846 return email
831 847 else:
832 848 return None
833 849
834 850
835 851 def person_by_id(id_, show_attr="username_and_name"):
836 852 # attr to return from fetched user
837 853 person_getter = lambda usr: getattr(usr, show_attr)
838 854
839 855 #maybe it's an ID ?
840 856 if str(id_).isdigit() or isinstance(id_, int):
841 857 id_ = int(id_)
842 858 user = User.get(id_)
843 859 if user is not None:
844 860 return person_getter(user)
845 861 return id_
846 862
847 863
848 864 def gravatar_with_user(author, show_disabled=False):
849 865 from rhodecode.lib.utils import PartialRenderer
850 866 _render = PartialRenderer('base/base.html')
851 867 return _render('gravatar_with_user', author, show_disabled=show_disabled)
852 868
853 869
854 870 def desc_stylize(value):
855 871 """
856 872 converts tags from value into html equivalent
857 873
858 874 :param value:
859 875 """
860 876 if not value:
861 877 return ''
862 878
863 879 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
864 880 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
865 881 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
866 882 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
867 883 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
868 884 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
869 885 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
870 886 '<div class="metatag" tag="lang">\\2</div>', value)
871 887 value = re.sub(r'\[([a-z]+)\]',
872 888 '<div class="metatag" tag="\\1">\\1</div>', value)
873 889
874 890 return value
875 891
876 892
877 893 def escaped_stylize(value):
878 894 """
879 895 converts tags from value into html equivalent, but escaping its value first
880 896 """
881 897 if not value:
882 898 return ''
883 899
884 900 # Using default webhelper escape method, but has to force it as a
885 901 # plain unicode instead of a markup tag to be used in regex expressions
886 902 value = unicode(escape(safe_unicode(value)))
887 903
888 904 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
889 905 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
890 906 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
891 907 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
892 908 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
893 909 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
894 910 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
895 911 '<div class="metatag" tag="lang">\\2</div>', value)
896 912 value = re.sub(r'\[([a-z]+)\]',
897 913 '<div class="metatag" tag="\\1">\\1</div>', value)
898 914
899 915 return value
900 916
901 917
902 918 def bool2icon(value):
903 919 """
904 920 Returns boolean value of a given value, represented as html element with
905 921 classes that will represent icons
906 922
907 923 :param value: given value to convert to html node
908 924 """
909 925
910 926 if value: # does bool conversion
911 927 return HTML.tag('i', class_="icon-true")
912 928 else: # not true as bool
913 929 return HTML.tag('i', class_="icon-false")
914 930
915 931
916 932 #==============================================================================
917 933 # PERMS
918 934 #==============================================================================
919 935 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
920 936 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
921 937 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token
922 938
923 939
924 940 #==============================================================================
925 941 # GRAVATAR URL
926 942 #==============================================================================
927 943 class InitialsGravatar(object):
928 944 def __init__(self, email_address, first_name, last_name, size=30,
929 945 background=None, text_color='#fff'):
930 946 self.size = size
931 947 self.first_name = first_name
932 948 self.last_name = last_name
933 949 self.email_address = email_address
934 950 self.background = background or self.str2color(email_address)
935 951 self.text_color = text_color
936 952
937 953 def get_color_bank(self):
938 954 """
939 955 returns a predefined list of colors that gravatars can use.
940 956 Those are randomized distinct colors that guarantee readability and
941 957 uniqueness.
942 958
943 959 generated with: http://phrogz.net/css/distinct-colors.html
944 960 """
945 961 return [
946 962 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
947 963 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
948 964 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
949 965 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
950 966 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
951 967 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
952 968 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
953 969 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
954 970 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
955 971 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
956 972 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
957 973 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
958 974 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
959 975 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
960 976 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
961 977 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
962 978 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
963 979 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
964 980 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
965 981 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
966 982 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
967 983 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
968 984 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
969 985 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
970 986 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
971 987 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
972 988 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
973 989 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
974 990 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
975 991 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
976 992 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
977 993 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
978 994 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
979 995 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
980 996 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
981 997 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
982 998 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
983 999 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
984 1000 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
985 1001 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
986 1002 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
987 1003 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
988 1004 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
989 1005 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
990 1006 '#4f8c46', '#368dd9', '#5c0073'
991 1007 ]
992 1008
993 1009 def rgb_to_hex_color(self, rgb_tuple):
994 1010 """
995 1011 Converts an rgb_tuple passed to an hex color.
996 1012
997 1013 :param rgb_tuple: tuple with 3 ints represents rgb color space
998 1014 """
999 1015 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1000 1016
1001 1017 def email_to_int_list(self, email_str):
1002 1018 """
1003 1019 Get every byte of the hex digest value of email and turn it to integer.
1004 1020 It's going to be always between 0-255
1005 1021 """
1006 1022 digest = md5_safe(email_str.lower())
1007 1023 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1008 1024
1009 1025 def pick_color_bank_index(self, email_str, color_bank):
1010 1026 return self.email_to_int_list(email_str)[0] % len(color_bank)
1011 1027
1012 1028 def str2color(self, email_str):
1013 1029 """
1014 1030 Tries to map in a stable algorithm an email to color
1015 1031
1016 1032 :param email_str:
1017 1033 """
1018 1034 color_bank = self.get_color_bank()
1019 1035 # pick position (module it's length so we always find it in the
1020 1036 # bank even if it's smaller than 256 values
1021 1037 pos = self.pick_color_bank_index(email_str, color_bank)
1022 1038 return color_bank[pos]
1023 1039
1024 1040 def normalize_email(self, email_address):
1025 1041 import unicodedata
1026 1042 # default host used to fill in the fake/missing email
1027 1043 default_host = u'localhost'
1028 1044
1029 1045 if not email_address:
1030 1046 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1031 1047
1032 1048 email_address = safe_unicode(email_address)
1033 1049
1034 1050 if u'@' not in email_address:
1035 1051 email_address = u'%s@%s' % (email_address, default_host)
1036 1052
1037 1053 if email_address.endswith(u'@'):
1038 1054 email_address = u'%s%s' % (email_address, default_host)
1039 1055
1040 1056 email_address = unicodedata.normalize('NFKD', email_address)\
1041 1057 .encode('ascii', 'ignore')
1042 1058 return email_address
1043 1059
1044 1060 def get_initials(self):
1045 1061 """
1046 1062 Returns 2 letter initials calculated based on the input.
1047 1063 The algorithm picks first given email address, and takes first letter
1048 1064 of part before @, and then the first letter of server name. In case
1049 1065 the part before @ is in a format of `somestring.somestring2` it replaces
1050 1066 the server letter with first letter of somestring2
1051 1067
1052 1068 In case function was initialized with both first and lastname, this
1053 1069 overrides the extraction from email by first letter of the first and
1054 1070 last name. We add special logic to that functionality, In case Full name
1055 1071 is compound, like Guido Von Rossum, we use last part of the last name
1056 1072 (Von Rossum) picking `R`.
1057 1073
1058 1074 Function also normalizes the non-ascii characters to they ascii
1059 1075 representation, eg Ą => A
1060 1076 """
1061 1077 import unicodedata
1062 1078 # replace non-ascii to ascii
1063 1079 first_name = unicodedata.normalize(
1064 1080 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1065 1081 last_name = unicodedata.normalize(
1066 1082 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1067 1083
1068 1084 # do NFKD encoding, and also make sure email has proper format
1069 1085 email_address = self.normalize_email(self.email_address)
1070 1086
1071 1087 # first push the email initials
1072 1088 prefix, server = email_address.split('@', 1)
1073 1089
1074 1090 # check if prefix is maybe a 'firstname.lastname' syntax
1075 1091 _dot_split = prefix.rsplit('.', 1)
1076 1092 if len(_dot_split) == 2:
1077 1093 initials = [_dot_split[0][0], _dot_split[1][0]]
1078 1094 else:
1079 1095 initials = [prefix[0], server[0]]
1080 1096
1081 1097 # then try to replace either firtname or lastname
1082 1098 fn_letter = (first_name or " ")[0].strip()
1083 1099 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1084 1100
1085 1101 if fn_letter:
1086 1102 initials[0] = fn_letter
1087 1103
1088 1104 if ln_letter:
1089 1105 initials[1] = ln_letter
1090 1106
1091 1107 return ''.join(initials).upper()
1092 1108
1093 1109 def get_img_data_by_type(self, font_family, img_type):
1094 1110 default_user = """
1095 1111 <svg xmlns="http://www.w3.org/2000/svg"
1096 1112 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1097 1113 viewBox="-15 -10 439.165 429.164"
1098 1114
1099 1115 xml:space="preserve"
1100 1116 style="background:{background};" >
1101 1117
1102 1118 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1103 1119 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1104 1120 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1105 1121 168.596,153.916,216.671,
1106 1122 204.583,216.671z" fill="{text_color}"/>
1107 1123 <path d="M407.164,374.717L360.88,
1108 1124 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1109 1125 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1110 1126 15.366-44.203,23.488-69.076,23.488c-24.877,
1111 1127 0-48.762-8.122-69.078-23.488
1112 1128 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1113 1129 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1114 1130 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1115 1131 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1116 1132 19.402-10.527 C409.699,390.129,
1117 1133 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1118 1134 </svg>""".format(
1119 1135 size=self.size,
1120 1136 background='#979797', # @grey4
1121 1137 text_color=self.text_color,
1122 1138 font_family=font_family)
1123 1139
1124 1140 return {
1125 1141 "default_user": default_user
1126 1142 }[img_type]
1127 1143
1128 1144 def get_img_data(self, svg_type=None):
1129 1145 """
1130 1146 generates the svg metadata for image
1131 1147 """
1132 1148
1133 1149 font_family = ','.join([
1134 1150 'proximanovaregular',
1135 1151 'Proxima Nova Regular',
1136 1152 'Proxima Nova',
1137 1153 'Arial',
1138 1154 'Lucida Grande',
1139 1155 'sans-serif'
1140 1156 ])
1141 1157 if svg_type:
1142 1158 return self.get_img_data_by_type(font_family, svg_type)
1143 1159
1144 1160 initials = self.get_initials()
1145 1161 img_data = """
1146 1162 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1147 1163 width="{size}" height="{size}"
1148 1164 style="width: 100%; height: 100%; background-color: {background}"
1149 1165 viewBox="0 0 {size} {size}">
1150 1166 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1151 1167 pointer-events="auto" fill="{text_color}"
1152 1168 font-family="{font_family}"
1153 1169 style="font-weight: 400; font-size: {f_size}px;">{text}
1154 1170 </text>
1155 1171 </svg>""".format(
1156 1172 size=self.size,
1157 1173 f_size=self.size/1.85, # scale the text inside the box nicely
1158 1174 background=self.background,
1159 1175 text_color=self.text_color,
1160 1176 text=initials.upper(),
1161 1177 font_family=font_family)
1162 1178
1163 1179 return img_data
1164 1180
1165 1181 def generate_svg(self, svg_type=None):
1166 1182 img_data = self.get_img_data(svg_type)
1167 1183 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1168 1184
1169 1185
1170 1186 def initials_gravatar(email_address, first_name, last_name, size=30):
1171 1187 svg_type = None
1172 1188 if email_address == User.DEFAULT_USER_EMAIL:
1173 1189 svg_type = 'default_user'
1174 1190 klass = InitialsGravatar(email_address, first_name, last_name, size)
1175 1191 return klass.generate_svg(svg_type=svg_type)
1176 1192
1177 1193
1178 1194 def gravatar_url(email_address, size=30):
1179 1195 # doh, we need to re-import those to mock it later
1180 1196 from pylons import tmpl_context as c
1181 1197
1182 1198 _use_gravatar = c.visual.use_gravatar
1183 1199 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1184 1200
1185 1201 email_address = email_address or User.DEFAULT_USER_EMAIL
1186 1202 if isinstance(email_address, unicode):
1187 1203 # hashlib crashes on unicode items
1188 1204 email_address = safe_str(email_address)
1189 1205
1190 1206 # empty email or default user
1191 1207 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1192 1208 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1193 1209
1194 1210 if _use_gravatar:
1195 1211 # TODO: Disuse pyramid thread locals. Think about another solution to
1196 1212 # get the host and schema here.
1197 1213 request = get_current_request()
1198 1214 tmpl = safe_str(_gravatar_url)
1199 1215 tmpl = tmpl.replace('{email}', email_address)\
1200 1216 .replace('{md5email}', md5_safe(email_address.lower())) \
1201 1217 .replace('{netloc}', request.host)\
1202 1218 .replace('{scheme}', request.scheme)\
1203 1219 .replace('{size}', safe_str(size))
1204 1220 return tmpl
1205 1221 else:
1206 1222 return initials_gravatar(email_address, '', '', size=size)
1207 1223
1208 1224
1209 1225 class Page(_Page):
1210 1226 """
1211 1227 Custom pager to match rendering style with paginator
1212 1228 """
1213 1229
1214 1230 def _get_pos(self, cur_page, max_page, items):
1215 1231 edge = (items / 2) + 1
1216 1232 if (cur_page <= edge):
1217 1233 radius = max(items / 2, items - cur_page)
1218 1234 elif (max_page - cur_page) < edge:
1219 1235 radius = (items - 1) - (max_page - cur_page)
1220 1236 else:
1221 1237 radius = items / 2
1222 1238
1223 1239 left = max(1, (cur_page - (radius)))
1224 1240 right = min(max_page, cur_page + (radius))
1225 1241 return left, cur_page, right
1226 1242
1227 1243 def _range(self, regexp_match):
1228 1244 """
1229 1245 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1230 1246
1231 1247 Arguments:
1232 1248
1233 1249 regexp_match
1234 1250 A "re" (regular expressions) match object containing the
1235 1251 radius of linked pages around the current page in
1236 1252 regexp_match.group(1) as a string
1237 1253
1238 1254 This function is supposed to be called as a callable in
1239 1255 re.sub.
1240 1256
1241 1257 """
1242 1258 radius = int(regexp_match.group(1))
1243 1259
1244 1260 # Compute the first and last page number within the radius
1245 1261 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1246 1262 # -> leftmost_page = 5
1247 1263 # -> rightmost_page = 9
1248 1264 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1249 1265 self.last_page,
1250 1266 (radius * 2) + 1)
1251 1267 nav_items = []
1252 1268
1253 1269 # Create a link to the first page (unless we are on the first page
1254 1270 # or there would be no need to insert '..' spacers)
1255 1271 if self.page != self.first_page and self.first_page < leftmost_page:
1256 1272 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1257 1273
1258 1274 # Insert dots if there are pages between the first page
1259 1275 # and the currently displayed page range
1260 1276 if leftmost_page - self.first_page > 1:
1261 1277 # Wrap in a SPAN tag if nolink_attr is set
1262 1278 text = '..'
1263 1279 if self.dotdot_attr:
1264 1280 text = HTML.span(c=text, **self.dotdot_attr)
1265 1281 nav_items.append(text)
1266 1282
1267 1283 for thispage in xrange(leftmost_page, rightmost_page + 1):
1268 1284 # Hilight the current page number and do not use a link
1269 1285 if thispage == self.page:
1270 1286 text = '%s' % (thispage,)
1271 1287 # Wrap in a SPAN tag if nolink_attr is set
1272 1288 if self.curpage_attr:
1273 1289 text = HTML.span(c=text, **self.curpage_attr)
1274 1290 nav_items.append(text)
1275 1291 # Otherwise create just a link to that page
1276 1292 else:
1277 1293 text = '%s' % (thispage,)
1278 1294 nav_items.append(self._pagerlink(thispage, text))
1279 1295
1280 1296 # Insert dots if there are pages between the displayed
1281 1297 # page numbers and the end of the page range
1282 1298 if self.last_page - rightmost_page > 1:
1283 1299 text = '..'
1284 1300 # Wrap in a SPAN tag if nolink_attr is set
1285 1301 if self.dotdot_attr:
1286 1302 text = HTML.span(c=text, **self.dotdot_attr)
1287 1303 nav_items.append(text)
1288 1304
1289 1305 # Create a link to the very last page (unless we are on the last
1290 1306 # page or there would be no need to insert '..' spacers)
1291 1307 if self.page != self.last_page and rightmost_page < self.last_page:
1292 1308 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1293 1309
1294 1310 ## prerender links
1295 1311 #_page_link = url.current()
1296 1312 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1297 1313 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1298 1314 return self.separator.join(nav_items)
1299 1315
1300 1316 def pager(self, format='~2~', page_param='page', partial_param='partial',
1301 1317 show_if_single_page=False, separator=' ', onclick=None,
1302 1318 symbol_first='<<', symbol_last='>>',
1303 1319 symbol_previous='<', symbol_next='>',
1304 1320 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1305 1321 curpage_attr={'class': 'pager_curpage'},
1306 1322 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1307 1323
1308 1324 self.curpage_attr = curpage_attr
1309 1325 self.separator = separator
1310 1326 self.pager_kwargs = kwargs
1311 1327 self.page_param = page_param
1312 1328 self.partial_param = partial_param
1313 1329 self.onclick = onclick
1314 1330 self.link_attr = link_attr
1315 1331 self.dotdot_attr = dotdot_attr
1316 1332
1317 1333 # Don't show navigator if there is no more than one page
1318 1334 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1319 1335 return ''
1320 1336
1321 1337 from string import Template
1322 1338 # Replace ~...~ in token format by range of pages
1323 1339 result = re.sub(r'~(\d+)~', self._range, format)
1324 1340
1325 1341 # Interpolate '%' variables
1326 1342 result = Template(result).safe_substitute({
1327 1343 'first_page': self.first_page,
1328 1344 'last_page': self.last_page,
1329 1345 'page': self.page,
1330 1346 'page_count': self.page_count,
1331 1347 'items_per_page': self.items_per_page,
1332 1348 'first_item': self.first_item,
1333 1349 'last_item': self.last_item,
1334 1350 'item_count': self.item_count,
1335 1351 'link_first': self.page > self.first_page and \
1336 1352 self._pagerlink(self.first_page, symbol_first) or '',
1337 1353 'link_last': self.page < self.last_page and \
1338 1354 self._pagerlink(self.last_page, symbol_last) or '',
1339 1355 'link_previous': self.previous_page and \
1340 1356 self._pagerlink(self.previous_page, symbol_previous) \
1341 1357 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1342 1358 'link_next': self.next_page and \
1343 1359 self._pagerlink(self.next_page, symbol_next) \
1344 1360 or HTML.span(symbol_next, class_="pg-next disabled")
1345 1361 })
1346 1362
1347 1363 return literal(result)
1348 1364
1349 1365
1350 1366 #==============================================================================
1351 1367 # REPO PAGER, PAGER FOR REPOSITORY
1352 1368 #==============================================================================
1353 1369 class RepoPage(Page):
1354 1370
1355 1371 def __init__(self, collection, page=1, items_per_page=20,
1356 1372 item_count=None, url=None, **kwargs):
1357 1373
1358 1374 """Create a "RepoPage" instance. special pager for paging
1359 1375 repository
1360 1376 """
1361 1377 self._url_generator = url
1362 1378
1363 1379 # Safe the kwargs class-wide so they can be used in the pager() method
1364 1380 self.kwargs = kwargs
1365 1381
1366 1382 # Save a reference to the collection
1367 1383 self.original_collection = collection
1368 1384
1369 1385 self.collection = collection
1370 1386
1371 1387 # The self.page is the number of the current page.
1372 1388 # The first page has the number 1!
1373 1389 try:
1374 1390 self.page = int(page) # make it int() if we get it as a string
1375 1391 except (ValueError, TypeError):
1376 1392 self.page = 1
1377 1393
1378 1394 self.items_per_page = items_per_page
1379 1395
1380 1396 # Unless the user tells us how many items the collections has
1381 1397 # we calculate that ourselves.
1382 1398 if item_count is not None:
1383 1399 self.item_count = item_count
1384 1400 else:
1385 1401 self.item_count = len(self.collection)
1386 1402
1387 1403 # Compute the number of the first and last available page
1388 1404 if self.item_count > 0:
1389 1405 self.first_page = 1
1390 1406 self.page_count = int(math.ceil(float(self.item_count) /
1391 1407 self.items_per_page))
1392 1408 self.last_page = self.first_page + self.page_count - 1
1393 1409
1394 1410 # Make sure that the requested page number is the range of
1395 1411 # valid pages
1396 1412 if self.page > self.last_page:
1397 1413 self.page = self.last_page
1398 1414 elif self.page < self.first_page:
1399 1415 self.page = self.first_page
1400 1416
1401 1417 # Note: the number of items on this page can be less than
1402 1418 # items_per_page if the last page is not full
1403 1419 self.first_item = max(0, (self.item_count) - (self.page *
1404 1420 items_per_page))
1405 1421 self.last_item = ((self.item_count - 1) - items_per_page *
1406 1422 (self.page - 1))
1407 1423
1408 1424 self.items = list(self.collection[self.first_item:self.last_item + 1])
1409 1425
1410 1426 # Links to previous and next page
1411 1427 if self.page > self.first_page:
1412 1428 self.previous_page = self.page - 1
1413 1429 else:
1414 1430 self.previous_page = None
1415 1431
1416 1432 if self.page < self.last_page:
1417 1433 self.next_page = self.page + 1
1418 1434 else:
1419 1435 self.next_page = None
1420 1436
1421 1437 # No items available
1422 1438 else:
1423 1439 self.first_page = None
1424 1440 self.page_count = 0
1425 1441 self.last_page = None
1426 1442 self.first_item = None
1427 1443 self.last_item = None
1428 1444 self.previous_page = None
1429 1445 self.next_page = None
1430 1446 self.items = []
1431 1447
1432 1448 # This is a subclass of the 'list' type. Initialise the list now.
1433 1449 list.__init__(self, reversed(self.items))
1434 1450
1435 1451
1436 1452 def changed_tooltip(nodes):
1437 1453 """
1438 1454 Generates a html string for changed nodes in commit page.
1439 1455 It limits the output to 30 entries
1440 1456
1441 1457 :param nodes: LazyNodesGenerator
1442 1458 """
1443 1459 if nodes:
1444 1460 pref = ': <br/> '
1445 1461 suf = ''
1446 1462 if len(nodes) > 30:
1447 1463 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1448 1464 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1449 1465 for x in nodes[:30]]) + suf)
1450 1466 else:
1451 1467 return ': ' + _('No Files')
1452 1468
1453 1469
1454 1470 def breadcrumb_repo_link(repo):
1455 1471 """
1456 1472 Makes a breadcrumbs path link to repo
1457 1473
1458 1474 ex::
1459 1475 group >> subgroup >> repo
1460 1476
1461 1477 :param repo: a Repository instance
1462 1478 """
1463 1479
1464 1480 path = [
1465 1481 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1466 1482 for group in repo.groups_with_parents
1467 1483 ] + [
1468 1484 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1469 1485 ]
1470 1486
1471 1487 return literal(' &raquo; '.join(path))
1472 1488
1473 1489
1474 1490 def format_byte_size_binary(file_size):
1475 1491 """
1476 1492 Formats file/folder sizes to standard.
1477 1493 """
1478 1494 formatted_size = format_byte_size(file_size, binary=True)
1479 1495 return formatted_size
1480 1496
1481 1497
1482 1498 def fancy_file_stats(stats):
1483 1499 """
1484 1500 Displays a fancy two colored bar for number of added/deleted
1485 1501 lines of code on file
1486 1502
1487 1503 :param stats: two element list of added/deleted lines of code
1488 1504 """
1489 1505 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1490 1506 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1491 1507
1492 1508 def cgen(l_type, a_v, d_v):
1493 1509 mapping = {'tr': 'top-right-rounded-corner-mid',
1494 1510 'tl': 'top-left-rounded-corner-mid',
1495 1511 'br': 'bottom-right-rounded-corner-mid',
1496 1512 'bl': 'bottom-left-rounded-corner-mid'}
1497 1513 map_getter = lambda x: mapping[x]
1498 1514
1499 1515 if l_type == 'a' and d_v:
1500 1516 #case when added and deleted are present
1501 1517 return ' '.join(map(map_getter, ['tl', 'bl']))
1502 1518
1503 1519 if l_type == 'a' and not d_v:
1504 1520 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1505 1521
1506 1522 if l_type == 'd' and a_v:
1507 1523 return ' '.join(map(map_getter, ['tr', 'br']))
1508 1524
1509 1525 if l_type == 'd' and not a_v:
1510 1526 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1511 1527
1512 1528 a, d = stats['added'], stats['deleted']
1513 1529 width = 100
1514 1530
1515 1531 if stats['binary']: # binary operations like chmod/rename etc
1516 1532 lbl = []
1517 1533 bin_op = 0 # undefined
1518 1534
1519 1535 # prefix with bin for binary files
1520 1536 if BIN_FILENODE in stats['ops']:
1521 1537 lbl += ['bin']
1522 1538
1523 1539 if NEW_FILENODE in stats['ops']:
1524 1540 lbl += [_('new file')]
1525 1541 bin_op = NEW_FILENODE
1526 1542 elif MOD_FILENODE in stats['ops']:
1527 1543 lbl += [_('mod')]
1528 1544 bin_op = MOD_FILENODE
1529 1545 elif DEL_FILENODE in stats['ops']:
1530 1546 lbl += [_('del')]
1531 1547 bin_op = DEL_FILENODE
1532 1548 elif RENAMED_FILENODE in stats['ops']:
1533 1549 lbl += [_('rename')]
1534 1550 bin_op = RENAMED_FILENODE
1535 1551
1536 1552 # chmod can go with other operations, so we add a + to lbl if needed
1537 1553 if CHMOD_FILENODE in stats['ops']:
1538 1554 lbl += [_('chmod')]
1539 1555 if bin_op == 0:
1540 1556 bin_op = CHMOD_FILENODE
1541 1557
1542 1558 lbl = '+'.join(lbl)
1543 1559 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1544 1560 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1545 1561 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1546 1562 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1547 1563
1548 1564 t = stats['added'] + stats['deleted']
1549 1565 unit = float(width) / (t or 1)
1550 1566
1551 1567 # needs > 9% of width to be visible or 0 to be hidden
1552 1568 a_p = max(9, unit * a) if a > 0 else 0
1553 1569 d_p = max(9, unit * d) if d > 0 else 0
1554 1570 p_sum = a_p + d_p
1555 1571
1556 1572 if p_sum > width:
1557 1573 #adjust the percentage to be == 100% since we adjusted to 9
1558 1574 if a_p > d_p:
1559 1575 a_p = a_p - (p_sum - width)
1560 1576 else:
1561 1577 d_p = d_p - (p_sum - width)
1562 1578
1563 1579 a_v = a if a > 0 else ''
1564 1580 d_v = d if d > 0 else ''
1565 1581
1566 1582 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1567 1583 cgen('a', a_v, d_v), a_p, a_v
1568 1584 )
1569 1585 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1570 1586 cgen('d', a_v, d_v), d_p, d_v
1571 1587 )
1572 1588 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1573 1589
1574 1590
1575 1591 def urlify_text(text_, safe=True):
1576 1592 """
1577 1593 Extrac urls from text and make html links out of them
1578 1594
1579 1595 :param text_:
1580 1596 """
1581 1597
1582 1598 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1583 1599 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1584 1600
1585 1601 def url_func(match_obj):
1586 1602 url_full = match_obj.groups()[0]
1587 1603 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1588 1604 _newtext = url_pat.sub(url_func, text_)
1589 1605 if safe:
1590 1606 return literal(_newtext)
1591 1607 return _newtext
1592 1608
1593 1609
1594 1610 def urlify_commits(text_, repository):
1595 1611 """
1596 1612 Extract commit ids from text and make link from them
1597 1613
1598 1614 :param text_:
1599 1615 :param repository: repo name to build the URL with
1600 1616 """
1601 1617 from pylons import url # doh, we need to re-import url to mock it later
1602 1618 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1603 1619
1604 1620 def url_func(match_obj):
1605 1621 commit_id = match_obj.groups()[1]
1606 1622 pref = match_obj.groups()[0]
1607 1623 suf = match_obj.groups()[2]
1608 1624
1609 1625 tmpl = (
1610 1626 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1611 1627 '%(commit_id)s</a>%(suf)s'
1612 1628 )
1613 1629 return tmpl % {
1614 1630 'pref': pref,
1615 1631 'cls': 'revision-link',
1616 1632 'url': url('changeset_home', repo_name=repository,
1617 revision=commit_id),
1633 revision=commit_id, qualified=True),
1618 1634 'commit_id': commit_id,
1619 1635 'suf': suf
1620 1636 }
1621 1637
1622 1638 newtext = URL_PAT.sub(url_func, text_)
1623 1639
1624 1640 return newtext
1625 1641
1626 1642
1627 def _process_url_func(match_obj, repo_name, uid, entry):
1643 def _process_url_func(match_obj, repo_name, uid, entry,
1644 return_raw_data=False):
1628 1645 pref = ''
1629 1646 if match_obj.group().startswith(' '):
1630 1647 pref = ' '
1631 1648
1632 1649 issue_id = ''.join(match_obj.groups())
1633 1650 tmpl = (
1634 1651 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 1652 '%(issue-prefix)s%(id-repr)s'
1636 1653 '</a>')
1637 1654
1638 1655 (repo_name_cleaned,
1639 1656 parent_group_name) = RepoGroupModel().\
1640 1657 _get_group_name_and_parent(repo_name)
1641 1658
1642 1659 # variables replacement
1643 1660 named_vars = {
1644 1661 'id': issue_id,
1645 1662 'repo': repo_name,
1646 1663 'repo_name': repo_name_cleaned,
1647 1664 'group_name': parent_group_name
1648 1665 }
1649 1666 # named regex variables
1650 1667 named_vars.update(match_obj.groupdict())
1651 1668 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1652 1669
1653 return tmpl % {
1670 data = {
1654 1671 'pref': pref,
1655 1672 'cls': 'issue-tracker-link',
1656 1673 'url': _url,
1657 1674 'id-repr': issue_id,
1658 1675 'issue-prefix': entry['pref'],
1659 1676 'serv': entry['url'],
1660 1677 }
1678 if return_raw_data:
1679 return {
1680 'id': issue_id,
1681 'url': _url
1682 }
1683 return tmpl % data
1661 1684
1662 1685
1663 def process_patterns(text_string, repo_name, config):
1686 def process_patterns(text_string, repo_name, config=None):
1664 1687 repo = None
1665 1688 if repo_name:
1666 1689 # Retrieving repo_name to avoid invalid repo_name to explode on
1667 1690 # IssueTrackerSettingsModel but still passing invalid name further down
1668 1691 repo = Repository.get_by_repo_name(repo_name, cache=True)
1669 1692
1670 1693 settings_model = IssueTrackerSettingsModel(repo=repo)
1671 1694 active_entries = settings_model.get_settings(cache=True)
1672 1695
1696 issues_data = []
1673 1697 newtext = text_string
1674 1698 for uid, entry in active_entries.items():
1675 url_func = partial(
1676 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1677
1678 1699 log.debug('found issue tracker entry with uid %s' % (uid,))
1679 1700
1680 1701 if not (entry['pat'] and entry['url']):
1681 1702 log.debug('skipping due to missing data')
1682 1703 continue
1683 1704
1684 1705 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1685 1706 % (uid, entry['pat'], entry['url'], entry['pref']))
1686 1707
1687 1708 try:
1688 1709 pattern = re.compile(r'%s' % entry['pat'])
1689 1710 except re.error:
1690 1711 log.exception(
1691 1712 'issue tracker pattern: `%s` failed to compile',
1692 1713 entry['pat'])
1693 1714 continue
1694 1715
1716 data_func = partial(
1717 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1718 return_raw_data=True)
1719
1720 for match_obj in pattern.finditer(text_string):
1721 issues_data.append(data_func(match_obj))
1722
1723 url_func = partial(
1724 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1725
1695 1726 newtext = pattern.sub(url_func, newtext)
1696 1727 log.debug('processed prefix:uid `%s`' % (uid,))
1697 1728
1698 return newtext
1729 return newtext, issues_data
1699 1730
1700 1731
1701 1732 def urlify_commit_message(commit_text, repository=None):
1702 1733 """
1703 1734 Parses given text message and makes proper links.
1704 1735 issues are linked to given issue-server, and rest is a commit link
1705 1736
1706 1737 :param commit_text:
1707 1738 :param repository:
1708 1739 """
1709 1740 from pylons import url # doh, we need to re-import url to mock it later
1710 from rhodecode import CONFIG
1711 1741
1712 1742 def escaper(string):
1713 1743 return string.replace('<', '&lt;').replace('>', '&gt;')
1714 1744
1715 1745 newtext = escaper(commit_text)
1746
1747 # extract http/https links and make them real urls
1748 newtext = urlify_text(newtext, safe=False)
1749
1716 1750 # urlify commits - extract commit ids and make link out of them, if we have
1717 1751 # the scope of repository present.
1718 1752 if repository:
1719 1753 newtext = urlify_commits(newtext, repository)
1720 1754
1721 # extract http/https links and make them real urls
1722 newtext = urlify_text(newtext, safe=False)
1723
1724 1755 # process issue tracker patterns
1725 newtext = process_patterns(newtext, repository or '', CONFIG)
1756 newtext, issues = process_patterns(newtext, repository or '')
1726 1757
1727 1758 return literal(newtext)
1728 1759
1729 1760
1730 1761 def rst(source, mentions=False):
1731 1762 return literal('<div class="rst-block">%s</div>' %
1732 1763 MarkupRenderer.rst(source, mentions=mentions))
1733 1764
1734 1765
1735 1766 def markdown(source, mentions=False):
1736 1767 return literal('<div class="markdown-block">%s</div>' %
1737 1768 MarkupRenderer.markdown(source, flavored=True,
1738 1769 mentions=mentions))
1739 1770
1740 1771 def renderer_from_filename(filename, exclude=None):
1741 1772 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1742 1773
1743 1774
1744 1775 def render(source, renderer='rst', mentions=False):
1745 1776 if renderer == 'rst':
1746 1777 return rst(source, mentions=mentions)
1747 1778 if renderer == 'markdown':
1748 1779 return markdown(source, mentions=mentions)
1749 1780
1750 1781
1751 1782 def commit_status(repo, commit_id):
1752 1783 return ChangesetStatusModel().get_status(repo, commit_id)
1753 1784
1754 1785
1755 1786 def commit_status_lbl(commit_status):
1756 1787 return dict(ChangesetStatus.STATUSES).get(commit_status)
1757 1788
1758 1789
1759 1790 def commit_time(repo_name, commit_id):
1760 1791 repo = Repository.get_by_repo_name(repo_name)
1761 1792 commit = repo.get_commit(commit_id=commit_id)
1762 1793 return commit.date
1763 1794
1764 1795
1765 1796 def get_permission_name(key):
1766 1797 return dict(Permission.PERMS).get(key)
1767 1798
1768 1799
1769 1800 def journal_filter_help():
1770 1801 return _(
1771 1802 'Example filter terms:\n' +
1772 1803 ' repository:vcs\n' +
1773 1804 ' username:marcin\n' +
1774 1805 ' action:*push*\n' +
1775 1806 ' ip:127.0.0.1\n' +
1776 1807 ' date:20120101\n' +
1777 1808 ' date:[20120101100000 TO 20120102]\n' +
1778 1809 '\n' +
1779 1810 'Generate wildcards using \'*\' character:\n' +
1780 1811 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1781 1812 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1782 1813 '\n' +
1783 1814 'Optional AND / OR operators in queries\n' +
1784 1815 ' "repository:vcs OR repository:test"\n' +
1785 1816 ' "username:test AND repository:test*"\n'
1786 1817 )
1787 1818
1788 1819
1789 1820 def not_mapped_error(repo_name):
1790 1821 flash(_('%s repository is not mapped to db perhaps'
1791 1822 ' it was created or renamed from the filesystem'
1792 1823 ' please run the application again'
1793 1824 ' in order to rescan repositories') % repo_name, category='error')
1794 1825
1795 1826
1796 1827 def ip_range(ip_addr):
1797 1828 from rhodecode.model.db import UserIpMap
1798 1829 s, e = UserIpMap._get_ip_range(ip_addr)
1799 1830 return '%s - %s' % (s, e)
1800 1831
1801 1832
1802 1833 def form(url, method='post', needs_csrf_token=True, **attrs):
1803 1834 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1804 1835 if method.lower() != 'get' and needs_csrf_token:
1805 1836 raise Exception(
1806 1837 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1807 1838 'CSRF token. If the endpoint does not require such token you can ' +
1808 1839 'explicitly set the parameter needs_csrf_token to false.')
1809 1840
1810 1841 return wh_form(url, method=method, **attrs)
1811 1842
1812 1843
1813 1844 def secure_form(url, method="POST", multipart=False, **attrs):
1814 1845 """Start a form tag that points the action to an url. This
1815 1846 form tag will also include the hidden field containing
1816 1847 the auth token.
1817 1848
1818 1849 The url options should be given either as a string, or as a
1819 1850 ``url()`` function. The method for the form defaults to POST.
1820 1851
1821 1852 Options:
1822 1853
1823 1854 ``multipart``
1824 1855 If set to True, the enctype is set to "multipart/form-data".
1825 1856 ``method``
1826 1857 The method to use when submitting the form, usually either
1827 1858 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1828 1859 hidden input with name _method is added to simulate the verb
1829 1860 over POST.
1830 1861
1831 1862 """
1832 1863 from webhelpers.pylonslib.secure_form import insecure_form
1833 1864 from rhodecode.lib.auth import get_csrf_token, csrf_token_key
1834 1865 form = insecure_form(url, method, multipart, **attrs)
1835 1866 token = HTML.div(hidden(csrf_token_key, get_csrf_token()), style="display: none;")
1836 1867 return literal("%s\n%s" % (form, token))
1837 1868
1838 1869 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1839 1870 select_html = select(name, selected, options, **attrs)
1840 1871 select2 = """
1841 1872 <script>
1842 1873 $(document).ready(function() {
1843 1874 $('#%s').select2({
1844 1875 containerCssClass: 'drop-menu',
1845 1876 dropdownCssClass: 'drop-menu-dropdown',
1846 1877 dropdownAutoWidth: true%s
1847 1878 });
1848 1879 });
1849 1880 </script>
1850 1881 """
1851 1882 filter_option = """,
1852 1883 minimumResultsForSearch: -1
1853 1884 """
1854 1885 input_id = attrs.get('id') or name
1855 1886 filter_enabled = "" if enable_filter else filter_option
1856 1887 select_script = literal(select2 % (input_id, filter_enabled))
1857 1888
1858 1889 return literal(select_html+select_script)
1859 1890
1860 1891
1861 1892 def get_visual_attr(tmpl_context_var, attr_name):
1862 1893 """
1863 1894 A safe way to get a variable from visual variable of template context
1864 1895
1865 1896 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1866 1897 :param attr_name: name of the attribute we fetch from the c.visual
1867 1898 """
1868 1899 visual = getattr(tmpl_context_var, 'visual', None)
1869 1900 if not visual:
1870 1901 return
1871 1902 else:
1872 1903 return getattr(visual, attr_name, None)
1873 1904
1874 1905
1875 1906 def get_last_path_part(file_node):
1876 1907 if not file_node.path:
1877 1908 return u''
1878 1909
1879 1910 path = safe_unicode(file_node.path.split('/')[-1])
1880 1911 return u'../' + path
1881 1912
1882 1913
1883 1914 def route_path(*args, **kwds):
1884 1915 """
1885 1916 Wrapper around pyramids `route_path` function. It is used to generate
1886 1917 URLs from within pylons views or templates. This will be removed when
1887 1918 pyramid migration if finished.
1888 1919 """
1889 1920 req = get_current_request()
1890 1921 return req.route_path(*args, **kwds)
1891 1922
1892 1923
1893 1924 def resource_path(*args, **kwds):
1894 1925 """
1895 1926 Wrapper around pyramids `route_path` function. It is used to generate
1896 1927 URLs from within pylons views or templates. This will be removed when
1897 1928 pyramid migration if finished.
1898 1929 """
1899 1930 req = get_current_request()
1900 1931 return req.resource_path(*args, **kwds)
@@ -1,260 +1,278 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import logging
23 import urlparse
23 24 import threading
24 25 from BaseHTTPServer import BaseHTTPRequestHandler
25 26 from SocketServer import TCPServer
27 from routes.util import URLGenerator
26 28
27 29 import Pyro4
30 import pylons
31 import rhodecode
28 32
29 33 from rhodecode.lib import hooks_base
30 34 from rhodecode.lib.utils2 import AttributeDict
31 35
32 36
33 37 log = logging.getLogger(__name__)
34 38
35 39
36 40 class HooksHttpHandler(BaseHTTPRequestHandler):
37 41 def do_POST(self):
38 42 method, extras = self._read_request()
39 43 try:
40 44 result = self._call_hook(method, extras)
41 45 except Exception as e:
42 46 result = {
43 47 'exception': e.__class__.__name__,
44 48 'exception_args': e.args
45 49 }
46 50 self._write_response(result)
47 51
48 52 def _read_request(self):
49 53 length = int(self.headers['Content-Length'])
50 54 body = self.rfile.read(length).decode('utf-8')
51 55 data = json.loads(body)
52 56 return data['method'], data['extras']
53 57
54 58 def _write_response(self, result):
55 59 self.send_response(200)
56 60 self.send_header("Content-type", "text/json")
57 61 self.end_headers()
58 62 self.wfile.write(json.dumps(result))
59 63
60 64 def _call_hook(self, method, extras):
61 65 hooks = Hooks()
62 66 result = getattr(hooks, method)(extras)
63 67 return result
64 68
65 69 def log_message(self, format, *args):
66 70 """
67 71 This is an overriden method of BaseHTTPRequestHandler which logs using
68 72 logging library instead of writing directly to stderr.
69 73 """
70 74
71 75 message = format % args
72 76
73 77 # TODO: mikhail: add different log levels support
74 78 log.debug(
75 79 "%s - - [%s] %s", self.client_address[0],
76 80 self.log_date_time_string(), message)
77 81
78 82
79 83 class DummyHooksCallbackDaemon(object):
80 84 def __init__(self):
81 85 self.hooks_module = Hooks.__module__
82 86
83 87 def __enter__(self):
84 88 log.debug('Running dummy hooks callback daemon')
85 89 return self
86 90
87 91 def __exit__(self, exc_type, exc_val, exc_tb):
88 92 log.debug('Exiting dummy hooks callback daemon')
89 93
90 94
91 95 class ThreadedHookCallbackDaemon(object):
92 96
93 97 _callback_thread = None
94 98 _daemon = None
95 99 _done = False
96 100
97 101 def __init__(self):
98 102 self._prepare()
99 103
100 104 def __enter__(self):
101 105 self._run()
102 106 return self
103 107
104 108 def __exit__(self, exc_type, exc_val, exc_tb):
105 109 self._stop()
106 110
107 111 def _prepare(self):
108 112 raise NotImplementedError()
109 113
110 114 def _run(self):
111 115 raise NotImplementedError()
112 116
113 117 def _stop(self):
114 118 raise NotImplementedError()
115 119
116 120
117 121 class Pyro4HooksCallbackDaemon(ThreadedHookCallbackDaemon):
118 122 """
119 123 Context manager which will run a callback daemon in a background thread.
120 124 """
121 125
122 126 hooks_uri = None
123 127
124 128 def _prepare(self):
125 129 log.debug("Preparing callback daemon and registering hook object")
126 130 self._daemon = Pyro4.Daemon()
127 131 hooks_interface = Hooks()
128 132 self.hooks_uri = str(self._daemon.register(hooks_interface))
129 133 log.debug("Hooks uri is: %s", self.hooks_uri)
130 134
131 135 def _run(self):
132 136 log.debug("Running event loop of callback daemon in background thread")
133 137 callback_thread = threading.Thread(
134 138 target=self._daemon.requestLoop,
135 139 kwargs={'loopCondition': lambda: not self._done})
136 140 callback_thread.daemon = True
137 141 callback_thread.start()
138 142 self._callback_thread = callback_thread
139 143
140 144 def _stop(self):
141 145 log.debug("Waiting for background thread to finish.")
142 146 self._done = True
143 147 self._callback_thread.join()
144 148 self._daemon.close()
145 149 self._daemon = None
146 150 self._callback_thread = None
147 151
148 152
149 153 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
150 154 """
151 155 Context manager which will run a callback daemon in a background thread.
152 156 """
153 157
154 158 hooks_uri = None
155 159
156 160 IP_ADDRESS = '127.0.0.1'
157 161
158 162 # From Python docs: Polling reduces our responsiveness to a shutdown
159 163 # request and wastes cpu at all other times.
160 164 POLL_INTERVAL = 0.1
161 165
162 166 def _prepare(self):
163 167 log.debug("Preparing callback daemon and registering hook object")
164 168
165 169 self._done = False
166 170 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
167 171 _, port = self._daemon.server_address
168 172 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
169 173
170 174 log.debug("Hooks uri is: %s", self.hooks_uri)
171 175
172 176 def _run(self):
173 177 log.debug("Running event loop of callback daemon in background thread")
174 178 callback_thread = threading.Thread(
175 179 target=self._daemon.serve_forever,
176 180 kwargs={'poll_interval': self.POLL_INTERVAL})
177 181 callback_thread.daemon = True
178 182 callback_thread.start()
179 183 self._callback_thread = callback_thread
180 184
181 185 def _stop(self):
182 186 log.debug("Waiting for background thread to finish.")
183 187 self._daemon.shutdown()
184 188 self._callback_thread.join()
185 189 self._daemon = None
186 190 self._callback_thread = None
187 191
188 192
189 193 def prepare_callback_daemon(extras, protocol=None, use_direct_calls=False):
190 194 callback_daemon = None
191 195 protocol = protocol.lower() if protocol else None
192 196
193 197 if use_direct_calls:
194 198 callback_daemon = DummyHooksCallbackDaemon()
195 199 extras['hooks_module'] = callback_daemon.hooks_module
196 200 else:
197 201 callback_daemon = (
198 202 Pyro4HooksCallbackDaemon()
199 203 if protocol == 'pyro4'
200 204 else HttpHooksCallbackDaemon())
201 205 extras['hooks_uri'] = callback_daemon.hooks_uri
202 206 extras['hooks_protocol'] = protocol
203 207
204 208 return callback_daemon, extras
205 209
206 210
207 211 class Hooks(object):
208 212 """
209 213 Exposes the hooks for remote call backs
210 214 """
211 215
212 216 @Pyro4.callback
213 217 def repo_size(self, extras):
214 218 log.debug("Called repo_size of Hooks object")
215 219 return self._call_hook(hooks_base.repo_size, extras)
216 220
217 221 @Pyro4.callback
218 222 def pre_pull(self, extras):
219 223 log.debug("Called pre_pull of Hooks object")
220 224 return self._call_hook(hooks_base.pre_pull, extras)
221 225
222 226 @Pyro4.callback
223 227 def post_pull(self, extras):
224 228 log.debug("Called post_pull of Hooks object")
225 229 return self._call_hook(hooks_base.post_pull, extras)
226 230
227 231 @Pyro4.callback
228 232 def pre_push(self, extras):
229 233 log.debug("Called pre_push of Hooks object")
230 234 return self._call_hook(hooks_base.pre_push, extras)
231 235
232 236 @Pyro4.callback
233 237 def post_push(self, extras):
234 238 log.debug("Called post_push of Hooks object")
235 239 return self._call_hook(hooks_base.post_push, extras)
236 240
237 241 def _call_hook(self, hook, extras):
238 242 extras = AttributeDict(extras)
243 netloc = urlparse.urlparse(extras.server_url).netloc
244 environ = {
245 'SERVER_NAME': netloc.split(':')[0],
246 'SERVER_PORT': ':' in netloc and netloc.split(':')[1] or '80',
247 'SCRIPT_NAME': '',
248 'PATH_INFO': '/',
249 'HTTP_HOST': 'localhost',
250 'REQUEST_METHOD': 'GET',
251 }
252 pylons_router = URLGenerator(rhodecode.CONFIG['routes.map'], environ)
253 pylons.url._push_object(pylons_router)
239 254
240 255 try:
241 256 result = hook(extras)
242 257 except Exception as error:
243 258 log.exception('Exception when handling hook %s', hook)
244 259 error_args = error.args
245 260 return {
246 261 'status': 128,
247 262 'output': '',
248 263 'exception': type(error).__name__,
249 264 'exception_args': error_args,
250 265 }
266 finally:
267 pylons.url._pop_object()
268
251 269 return {
252 270 'status': result.status,
253 271 'output': result.output,
254 272 }
255 273
256 274 def __enter__(self):
257 275 return self
258 276
259 277 def __exit__(self, exc_type, exc_val, exc_tb):
260 278 pass
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now