##// END OF EJS Templates
events: add serialization .to_dict() to events based on marshmallow
dan -
r379:a86e0931 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,69 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from datetime import datetime
20 from marshmallow import Schema, fields
21 from pyramid.threadlocal import get_current_request
22 from rhodecode.lib.utils2 import AttributeDict
23
24
25 SYSTEM_USER = AttributeDict(dict(
26 username='__SYSTEM__'
27 ))
28
29
30 class UserSchema(Schema):
31 """
32 Marshmallow schema for a user
33 """
34 username = fields.Str()
35
36
37 class RhodecodeEventSchema(Schema):
38 """
39 Marshmallow schema for a rhodecode event
40 """
41 utc_timestamp = fields.DateTime()
42 acting_user = fields.Nested(UserSchema)
43 acting_ip = fields.Str()
44
45
46 class RhodecodeEvent(object):
47 """
48 Base event class for all Rhodecode events
49 """
50 MarshmallowSchema = RhodecodeEventSchema
51
52 def __init__(self):
53 self.request = get_current_request()
54 self.utc_timestamp = datetime.utcnow()
55
56 @property
57 def acting_user(self):
58 if self.request:
59 return self.request.user.get_instance()
60 return SYSTEM_USER
61
62 @property
63 def acting_ip(self):
64 if self.request:
65 return self.request.user.ip_addr
66 return '<no ip available>'
67
68 def as_dict(self):
69 return self.MarshmallowSchema().dump(self).data No newline at end of file
@@ -1,1641 +1,1654 b''
1 {
1 {
2 Babel = super.buildPythonPackage {
2 Babel = super.buildPythonPackage {
3 name = "Babel-1.3";
3 name = "Babel-1.3";
4 buildInputs = with self; [];
4 buildInputs = with self; [];
5 doCheck = false;
5 doCheck = false;
6 propagatedBuildInputs = with self; [pytz];
6 propagatedBuildInputs = with self; [pytz];
7 src = fetchurl {
7 src = fetchurl {
8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
8 url = "https://pypi.python.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
9 md5 = "5264ceb02717843cbc9ffce8e6e06bdb";
10 };
10 };
11 meta = {
11 meta = {
12 license = [ pkgs.lib.licenses.bsdOriginal ];
12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 };
13 };
14 };
14 };
15 Beaker = super.buildPythonPackage {
15 Beaker = super.buildPythonPackage {
16 name = "Beaker-1.7.0";
16 name = "Beaker-1.7.0";
17 buildInputs = with self; [];
17 buildInputs = with self; [];
18 doCheck = false;
18 doCheck = false;
19 propagatedBuildInputs = with self; [];
19 propagatedBuildInputs = with self; [];
20 src = fetchurl {
20 src = fetchurl {
21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
21 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
22 md5 = "386be3f7fe427358881eee4622b428b3";
22 md5 = "386be3f7fe427358881eee4622b428b3";
23 };
23 };
24 meta = {
24 meta = {
25 license = [ pkgs.lib.licenses.bsdOriginal ];
25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 };
26 };
27 };
27 };
28 CProfileV = super.buildPythonPackage {
28 CProfileV = super.buildPythonPackage {
29 name = "CProfileV-1.0.6";
29 name = "CProfileV-1.0.6";
30 buildInputs = with self; [];
30 buildInputs = with self; [];
31 doCheck = false;
31 doCheck = false;
32 propagatedBuildInputs = with self; [bottle];
32 propagatedBuildInputs = with self; [bottle];
33 src = fetchurl {
33 src = fetchurl {
34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
34 url = "https://pypi.python.org/packages/eb/df/983a0b6cfd3ac94abf023f5011cb04f33613ace196e33f53c86cf91850d5/CProfileV-1.0.6.tar.gz";
35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
35 md5 = "08c7c242b6e64237bc53c5d13537e03d";
36 };
36 };
37 meta = {
37 meta = {
38 license = [ pkgs.lib.licenses.mit ];
38 license = [ pkgs.lib.licenses.mit ];
39 };
39 };
40 };
40 };
41 Fabric = super.buildPythonPackage {
41 Fabric = super.buildPythonPackage {
42 name = "Fabric-1.10.0";
42 name = "Fabric-1.10.0";
43 buildInputs = with self; [];
43 buildInputs = with self; [];
44 doCheck = false;
44 doCheck = false;
45 propagatedBuildInputs = with self; [paramiko];
45 propagatedBuildInputs = with self; [paramiko];
46 src = fetchurl {
46 src = fetchurl {
47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
47 url = "https://pypi.python.org/packages/e3/5f/b6ebdb5241d5ec9eab582a5c8a01255c1107da396f849e538801d2fe64a5/Fabric-1.10.0.tar.gz";
48 md5 = "2cb96473387f0e7aa035210892352f4a";
48 md5 = "2cb96473387f0e7aa035210892352f4a";
49 };
49 };
50 meta = {
50 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
52 };
53 };
53 };
54 FormEncode = super.buildPythonPackage {
54 FormEncode = super.buildPythonPackage {
55 name = "FormEncode-1.2.4";
55 name = "FormEncode-1.2.4";
56 buildInputs = with self; [];
56 buildInputs = with self; [];
57 doCheck = false;
57 doCheck = false;
58 propagatedBuildInputs = with self; [];
58 propagatedBuildInputs = with self; [];
59 src = fetchurl {
59 src = fetchurl {
60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
60 url = "https://pypi.python.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
61 md5 = "6bc17fb9aed8aea198975e888e2077f4";
62 };
62 };
63 meta = {
63 meta = {
64 license = [ pkgs.lib.licenses.psfl ];
64 license = [ pkgs.lib.licenses.psfl ];
65 };
65 };
66 };
66 };
67 Jinja2 = super.buildPythonPackage {
67 Jinja2 = super.buildPythonPackage {
68 name = "Jinja2-2.7.3";
68 name = "Jinja2-2.7.3";
69 buildInputs = with self; [];
69 buildInputs = with self; [];
70 doCheck = false;
70 doCheck = false;
71 propagatedBuildInputs = with self; [MarkupSafe];
71 propagatedBuildInputs = with self; [MarkupSafe];
72 src = fetchurl {
72 src = fetchurl {
73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
73 url = "https://pypi.python.org/packages/b0/73/eab0bca302d6d6a0b5c402f47ad1760dc9cb2dd14bbc1873ad48db258e4d/Jinja2-2.7.3.tar.gz";
74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
74 md5 = "b9dffd2f3b43d673802fe857c8445b1a";
75 };
75 };
76 meta = {
76 meta = {
77 license = [ pkgs.lib.licenses.bsdOriginal ];
77 license = [ pkgs.lib.licenses.bsdOriginal ];
78 };
78 };
79 };
79 };
80 Mako = super.buildPythonPackage {
80 Mako = super.buildPythonPackage {
81 name = "Mako-1.0.1";
81 name = "Mako-1.0.1";
82 buildInputs = with self; [];
82 buildInputs = with self; [];
83 doCheck = false;
83 doCheck = false;
84 propagatedBuildInputs = with self; [MarkupSafe];
84 propagatedBuildInputs = with self; [MarkupSafe];
85 src = fetchurl {
85 src = fetchurl {
86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
86 url = "https://pypi.python.org/packages/8e/a4/aa56533ecaa5f22ca92428f74e074d0c9337282933c722391902c8f9e0f8/Mako-1.0.1.tar.gz";
87 md5 = "9f0aafd177b039ef67b90ea350497a54";
87 md5 = "9f0aafd177b039ef67b90ea350497a54";
88 };
88 };
89 meta = {
89 meta = {
90 license = [ pkgs.lib.licenses.mit ];
90 license = [ pkgs.lib.licenses.mit ];
91 };
91 };
92 };
92 };
93 Markdown = super.buildPythonPackage {
93 Markdown = super.buildPythonPackage {
94 name = "Markdown-2.6.2";
94 name = "Markdown-2.6.2";
95 buildInputs = with self; [];
95 buildInputs = with self; [];
96 doCheck = false;
96 doCheck = false;
97 propagatedBuildInputs = with self; [];
97 propagatedBuildInputs = with self; [];
98 src = fetchurl {
98 src = fetchurl {
99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
99 url = "https://pypi.python.org/packages/62/8b/83658b5f6c220d5fcde9f9852d46ea54765d734cfbc5a9f4c05bfc36db4d/Markdown-2.6.2.tar.gz";
100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
100 md5 = "256d19afcc564dc4ce4c229bb762f7ae";
101 };
101 };
102 meta = {
102 meta = {
103 license = [ pkgs.lib.licenses.bsdOriginal ];
103 license = [ pkgs.lib.licenses.bsdOriginal ];
104 };
104 };
105 };
105 };
106 MarkupSafe = super.buildPythonPackage {
106 MarkupSafe = super.buildPythonPackage {
107 name = "MarkupSafe-0.23";
107 name = "MarkupSafe-0.23";
108 buildInputs = with self; [];
108 buildInputs = with self; [];
109 doCheck = false;
109 doCheck = false;
110 propagatedBuildInputs = with self; [];
110 propagatedBuildInputs = with self; [];
111 src = fetchurl {
111 src = fetchurl {
112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
112 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
113 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
114 };
114 };
115 meta = {
115 meta = {
116 license = [ pkgs.lib.licenses.bsdOriginal ];
116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 };
117 };
118 };
118 };
119 MySQL-python = super.buildPythonPackage {
119 MySQL-python = super.buildPythonPackage {
120 name = "MySQL-python-1.2.5";
120 name = "MySQL-python-1.2.5";
121 buildInputs = with self; [];
121 buildInputs = with self; [];
122 doCheck = false;
122 doCheck = false;
123 propagatedBuildInputs = with self; [];
123 propagatedBuildInputs = with self; [];
124 src = fetchurl {
124 src = fetchurl {
125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
125 url = "https://pypi.python.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
126 md5 = "654f75b302db6ed8dc5a898c625e030c";
126 md5 = "654f75b302db6ed8dc5a898c625e030c";
127 };
127 };
128 meta = {
128 meta = {
129 license = [ pkgs.lib.licenses.gpl1 ];
129 license = [ pkgs.lib.licenses.gpl1 ];
130 };
130 };
131 };
131 };
132 Paste = super.buildPythonPackage {
132 Paste = super.buildPythonPackage {
133 name = "Paste-2.0.2";
133 name = "Paste-2.0.2";
134 buildInputs = with self; [];
134 buildInputs = with self; [];
135 doCheck = false;
135 doCheck = false;
136 propagatedBuildInputs = with self; [six];
136 propagatedBuildInputs = with self; [six];
137 src = fetchurl {
137 src = fetchurl {
138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
138 url = "https://pypi.python.org/packages/d5/8d/0f8ac40687b97ff3e07ebd1369be20bdb3f93864d2dc3c2ff542edb4ce50/Paste-2.0.2.tar.gz";
139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
139 md5 = "4bfc8a7eaf858f6309d2ac0f40fc951c";
140 };
140 };
141 meta = {
141 meta = {
142 license = [ pkgs.lib.licenses.mit ];
142 license = [ pkgs.lib.licenses.mit ];
143 };
143 };
144 };
144 };
145 PasteDeploy = super.buildPythonPackage {
145 PasteDeploy = super.buildPythonPackage {
146 name = "PasteDeploy-1.5.2";
146 name = "PasteDeploy-1.5.2";
147 buildInputs = with self; [];
147 buildInputs = with self; [];
148 doCheck = false;
148 doCheck = false;
149 propagatedBuildInputs = with self; [];
149 propagatedBuildInputs = with self; [];
150 src = fetchurl {
150 src = fetchurl {
151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
151 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
152 md5 = "352b7205c78c8de4987578d19431af3b";
152 md5 = "352b7205c78c8de4987578d19431af3b";
153 };
153 };
154 meta = {
154 meta = {
155 license = [ pkgs.lib.licenses.mit ];
155 license = [ pkgs.lib.licenses.mit ];
156 };
156 };
157 };
157 };
158 PasteScript = super.buildPythonPackage {
158 PasteScript = super.buildPythonPackage {
159 name = "PasteScript-1.7.5";
159 name = "PasteScript-1.7.5";
160 buildInputs = with self; [];
160 buildInputs = with self; [];
161 doCheck = false;
161 doCheck = false;
162 propagatedBuildInputs = with self; [Paste PasteDeploy];
162 propagatedBuildInputs = with self; [Paste PasteDeploy];
163 src = fetchurl {
163 src = fetchurl {
164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
164 url = "https://pypi.python.org/packages/a5/05/fc60efa7c2f17a1dbaeccb2a903a1e90902d92b9d00eebabe3095829d806/PasteScript-1.7.5.tar.gz";
165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
165 md5 = "4c72d78dcb6bb993f30536842c16af4d";
166 };
166 };
167 meta = {
167 meta = {
168 license = [ pkgs.lib.licenses.mit ];
168 license = [ pkgs.lib.licenses.mit ];
169 };
169 };
170 };
170 };
171 Pygments = super.buildPythonPackage {
171 Pygments = super.buildPythonPackage {
172 name = "Pygments-2.1.3";
172 name = "Pygments-2.1.3";
173 buildInputs = with self; [];
173 buildInputs = with self; [];
174 doCheck = false;
174 doCheck = false;
175 propagatedBuildInputs = with self; [];
175 propagatedBuildInputs = with self; [];
176 src = fetchurl {
176 src = fetchurl {
177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
177 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
178 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
179 };
179 };
180 meta = {
180 meta = {
181 license = [ pkgs.lib.licenses.bsdOriginal ];
181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 };
182 };
183 };
183 };
184 Pylons = super.buildPythonPackage {
184 Pylons = super.buildPythonPackage {
185 name = "Pylons-1.0.1";
185 name = "Pylons-1.0.1";
186 buildInputs = with self; [];
186 buildInputs = with self; [];
187 doCheck = false;
187 doCheck = false;
188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
188 propagatedBuildInputs = with self; [Routes WebHelpers Beaker Paste PasteDeploy PasteScript FormEncode simplejson decorator nose Mako WebError WebTest Tempita MarkupSafe WebOb];
189 src = fetchurl {
189 src = fetchurl {
190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
190 url = "https://pypi.python.org/packages/a2/69/b835a6bad00acbfeed3f33c6e44fa3f936efc998c795bfb15c61a79ecf62/Pylons-1.0.1.tar.gz";
191 md5 = "6cb880d75fa81213192142b07a6e4915";
191 md5 = "6cb880d75fa81213192142b07a6e4915";
192 };
192 };
193 meta = {
193 meta = {
194 license = [ pkgs.lib.licenses.bsdOriginal ];
194 license = [ pkgs.lib.licenses.bsdOriginal ];
195 };
195 };
196 };
196 };
197 Pyro4 = super.buildPythonPackage {
197 Pyro4 = super.buildPythonPackage {
198 name = "Pyro4-4.41";
198 name = "Pyro4-4.41";
199 buildInputs = with self; [];
199 buildInputs = with self; [];
200 doCheck = false;
200 doCheck = false;
201 propagatedBuildInputs = with self; [serpent];
201 propagatedBuildInputs = with self; [serpent];
202 src = fetchurl {
202 src = fetchurl {
203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
203 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
204 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
205 };
205 };
206 meta = {
206 meta = {
207 license = [ pkgs.lib.licenses.mit ];
207 license = [ pkgs.lib.licenses.mit ];
208 };
208 };
209 };
209 };
210 Routes = super.buildPythonPackage {
210 Routes = super.buildPythonPackage {
211 name = "Routes-1.13";
211 name = "Routes-1.13";
212 buildInputs = with self; [];
212 buildInputs = with self; [];
213 doCheck = false;
213 doCheck = false;
214 propagatedBuildInputs = with self; [repoze.lru];
214 propagatedBuildInputs = with self; [repoze.lru];
215 src = fetchurl {
215 src = fetchurl {
216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
216 url = "https://pypi.python.org/packages/88/d3/259c3b3cde8837eb9441ab5f574a660e8a4acea8f54a078441d4d2acac1c/Routes-1.13.tar.gz";
217 md5 = "d527b0ab7dd9172b1275a41f97448783";
217 md5 = "d527b0ab7dd9172b1275a41f97448783";
218 };
218 };
219 meta = {
219 meta = {
220 license = [ pkgs.lib.licenses.bsdOriginal ];
220 license = [ pkgs.lib.licenses.bsdOriginal ];
221 };
221 };
222 };
222 };
223 SQLAlchemy = super.buildPythonPackage {
223 SQLAlchemy = super.buildPythonPackage {
224 name = "SQLAlchemy-0.9.9";
224 name = "SQLAlchemy-0.9.9";
225 buildInputs = with self; [];
225 buildInputs = with self; [];
226 doCheck = false;
226 doCheck = false;
227 propagatedBuildInputs = with self; [];
227 propagatedBuildInputs = with self; [];
228 src = fetchurl {
228 src = fetchurl {
229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
229 url = "https://pypi.python.org/packages/28/f7/1bbfd0d8597e8c358d5e15a166a486ad82fc5579b4e67b6ef7c05b1d182b/SQLAlchemy-0.9.9.tar.gz";
230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
230 md5 = "8a10a9bd13ed3336ef7333ac2cc679ff";
231 };
231 };
232 meta = {
232 meta = {
233 license = [ pkgs.lib.licenses.mit ];
233 license = [ pkgs.lib.licenses.mit ];
234 };
234 };
235 };
235 };
236 Sphinx = super.buildPythonPackage {
236 Sphinx = super.buildPythonPackage {
237 name = "Sphinx-1.2.2";
237 name = "Sphinx-1.2.2";
238 buildInputs = with self; [];
238 buildInputs = with self; [];
239 doCheck = false;
239 doCheck = false;
240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
240 propagatedBuildInputs = with self; [Pygments docutils Jinja2];
241 src = fetchurl {
241 src = fetchurl {
242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
242 url = "https://pypi.python.org/packages/0a/50/34017e6efcd372893a416aba14b84a1a149fc7074537b0e9cb6ca7b7abe9/Sphinx-1.2.2.tar.gz";
243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
243 md5 = "3dc73ccaa8d0bfb2d62fb671b1f7e8a4";
244 };
244 };
245 meta = {
245 meta = {
246 license = [ pkgs.lib.licenses.bsdOriginal ];
246 license = [ pkgs.lib.licenses.bsdOriginal ];
247 };
247 };
248 };
248 };
249 Tempita = super.buildPythonPackage {
249 Tempita = super.buildPythonPackage {
250 name = "Tempita-0.5.2";
250 name = "Tempita-0.5.2";
251 buildInputs = with self; [];
251 buildInputs = with self; [];
252 doCheck = false;
252 doCheck = false;
253 propagatedBuildInputs = with self; [];
253 propagatedBuildInputs = with self; [];
254 src = fetchurl {
254 src = fetchurl {
255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
255 url = "https://pypi.python.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
256 md5 = "4c2f17bb9d481821c41b6fbee904cea1";
257 };
257 };
258 meta = {
258 meta = {
259 license = [ pkgs.lib.licenses.mit ];
259 license = [ pkgs.lib.licenses.mit ];
260 };
260 };
261 };
261 };
262 URLObject = super.buildPythonPackage {
262 URLObject = super.buildPythonPackage {
263 name = "URLObject-2.4.0";
263 name = "URLObject-2.4.0";
264 buildInputs = with self; [];
264 buildInputs = with self; [];
265 doCheck = false;
265 doCheck = false;
266 propagatedBuildInputs = with self; [];
266 propagatedBuildInputs = with self; [];
267 src = fetchurl {
267 src = fetchurl {
268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
268 url = "https://pypi.python.org/packages/cb/b6/e25e58500f9caef85d664bec71ec67c116897bfebf8622c32cb75d1ca199/URLObject-2.4.0.tar.gz";
269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
269 md5 = "2ed819738a9f0a3051f31dc9924e3065";
270 };
270 };
271 meta = {
271 meta = {
272 license = [ ];
272 license = [ ];
273 };
273 };
274 };
274 };
275 WebError = super.buildPythonPackage {
275 WebError = super.buildPythonPackage {
276 name = "WebError-0.10.3";
276 name = "WebError-0.10.3";
277 buildInputs = with self; [];
277 buildInputs = with self; [];
278 doCheck = false;
278 doCheck = false;
279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
279 propagatedBuildInputs = with self; [WebOb Tempita Pygments Paste];
280 src = fetchurl {
280 src = fetchurl {
281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
281 url = "https://pypi.python.org/packages/35/76/e7e5c2ce7e9c7f31b54c1ff295a495886d1279a002557d74dd8957346a79/WebError-0.10.3.tar.gz";
282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
282 md5 = "84b9990b0baae6fd440b1e60cdd06f9a";
283 };
283 };
284 meta = {
284 meta = {
285 license = [ pkgs.lib.licenses.mit ];
285 license = [ pkgs.lib.licenses.mit ];
286 };
286 };
287 };
287 };
288 WebHelpers = super.buildPythonPackage {
288 WebHelpers = super.buildPythonPackage {
289 name = "WebHelpers-1.3";
289 name = "WebHelpers-1.3";
290 buildInputs = with self; [];
290 buildInputs = with self; [];
291 doCheck = false;
291 doCheck = false;
292 propagatedBuildInputs = with self; [MarkupSafe];
292 propagatedBuildInputs = with self; [MarkupSafe];
293 src = fetchurl {
293 src = fetchurl {
294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
294 url = "https://pypi.python.org/packages/ee/68/4d07672821d514184357f1552f2dad923324f597e722de3b016ca4f7844f/WebHelpers-1.3.tar.gz";
295 md5 = "32749ffadfc40fea51075a7def32588b";
295 md5 = "32749ffadfc40fea51075a7def32588b";
296 };
296 };
297 meta = {
297 meta = {
298 license = [ pkgs.lib.licenses.bsdOriginal ];
298 license = [ pkgs.lib.licenses.bsdOriginal ];
299 };
299 };
300 };
300 };
301 WebHelpers2 = super.buildPythonPackage {
301 WebHelpers2 = super.buildPythonPackage {
302 name = "WebHelpers2-2.0";
302 name = "WebHelpers2-2.0";
303 buildInputs = with self; [];
303 buildInputs = with self; [];
304 doCheck = false;
304 doCheck = false;
305 propagatedBuildInputs = with self; [MarkupSafe six];
305 propagatedBuildInputs = with self; [MarkupSafe six];
306 src = fetchurl {
306 src = fetchurl {
307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
307 url = "https://pypi.python.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
308 md5 = "0f6b68d70c12ee0aed48c00b24da13d3";
309 };
309 };
310 meta = {
310 meta = {
311 license = [ pkgs.lib.licenses.mit ];
311 license = [ pkgs.lib.licenses.mit ];
312 };
312 };
313 };
313 };
314 WebOb = super.buildPythonPackage {
314 WebOb = super.buildPythonPackage {
315 name = "WebOb-1.3.1";
315 name = "WebOb-1.3.1";
316 buildInputs = with self; [];
316 buildInputs = with self; [];
317 doCheck = false;
317 doCheck = false;
318 propagatedBuildInputs = with self; [];
318 propagatedBuildInputs = with self; [];
319 src = fetchurl {
319 src = fetchurl {
320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
320 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
321 md5 = "20918251c5726956ba8fef22d1556177";
321 md5 = "20918251c5726956ba8fef22d1556177";
322 };
322 };
323 meta = {
323 meta = {
324 license = [ pkgs.lib.licenses.mit ];
324 license = [ pkgs.lib.licenses.mit ];
325 };
325 };
326 };
326 };
327 WebTest = super.buildPythonPackage {
327 WebTest = super.buildPythonPackage {
328 name = "WebTest-1.4.3";
328 name = "WebTest-1.4.3";
329 buildInputs = with self; [];
329 buildInputs = with self; [];
330 doCheck = false;
330 doCheck = false;
331 propagatedBuildInputs = with self; [WebOb];
331 propagatedBuildInputs = with self; [WebOb];
332 src = fetchurl {
332 src = fetchurl {
333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
333 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
334 md5 = "631ce728bed92c681a4020a36adbc353";
334 md5 = "631ce728bed92c681a4020a36adbc353";
335 };
335 };
336 meta = {
336 meta = {
337 license = [ pkgs.lib.licenses.mit ];
337 license = [ pkgs.lib.licenses.mit ];
338 };
338 };
339 };
339 };
340 Whoosh = super.buildPythonPackage {
340 Whoosh = super.buildPythonPackage {
341 name = "Whoosh-2.7.0";
341 name = "Whoosh-2.7.0";
342 buildInputs = with self; [];
342 buildInputs = with self; [];
343 doCheck = false;
343 doCheck = false;
344 propagatedBuildInputs = with self; [];
344 propagatedBuildInputs = with self; [];
345 src = fetchurl {
345 src = fetchurl {
346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
346 url = "https://pypi.python.org/packages/1c/dc/2f0231ff3875ded36df8c1ab851451e51a237dc0e5a86d3d96036158da94/Whoosh-2.7.0.zip";
347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
347 md5 = "7abfd970f16fadc7311960f3fa0bc7a9";
348 };
348 };
349 meta = {
349 meta = {
350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
350 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
351 };
351 };
352 };
352 };
353 alembic = super.buildPythonPackage {
353 alembic = super.buildPythonPackage {
354 name = "alembic-0.8.4";
354 name = "alembic-0.8.4";
355 buildInputs = with self; [];
355 buildInputs = with self; [];
356 doCheck = false;
356 doCheck = false;
357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
357 propagatedBuildInputs = with self; [SQLAlchemy Mako python-editor];
358 src = fetchurl {
358 src = fetchurl {
359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
359 url = "https://pypi.python.org/packages/ca/7e/299b4499b5c75e5a38c5845145ad24755bebfb8eec07a2e1c366b7181eeb/alembic-0.8.4.tar.gz";
360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
360 md5 = "5f95d8ee62b443f9b37eb5bee76c582d";
361 };
361 };
362 meta = {
362 meta = {
363 license = [ pkgs.lib.licenses.mit ];
363 license = [ pkgs.lib.licenses.mit ];
364 };
364 };
365 };
365 };
366 amqplib = super.buildPythonPackage {
366 amqplib = super.buildPythonPackage {
367 name = "amqplib-1.0.2";
367 name = "amqplib-1.0.2";
368 buildInputs = with self; [];
368 buildInputs = with self; [];
369 doCheck = false;
369 doCheck = false;
370 propagatedBuildInputs = with self; [];
370 propagatedBuildInputs = with self; [];
371 src = fetchurl {
371 src = fetchurl {
372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
372 url = "https://pypi.python.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz";
373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
373 md5 = "5c92f17fbedd99b2b4a836d4352d1e2f";
374 };
374 };
375 meta = {
375 meta = {
376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
376 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
377 };
377 };
378 };
378 };
379 anyjson = super.buildPythonPackage {
379 anyjson = super.buildPythonPackage {
380 name = "anyjson-0.3.3";
380 name = "anyjson-0.3.3";
381 buildInputs = with self; [];
381 buildInputs = with self; [];
382 doCheck = false;
382 doCheck = false;
383 propagatedBuildInputs = with self; [];
383 propagatedBuildInputs = with self; [];
384 src = fetchurl {
384 src = fetchurl {
385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
385 url = "https://pypi.python.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz";
386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
386 md5 = "2ea28d6ec311aeeebaf993cb3008b27c";
387 };
387 };
388 meta = {
388 meta = {
389 license = [ pkgs.lib.licenses.bsdOriginal ];
389 license = [ pkgs.lib.licenses.bsdOriginal ];
390 };
390 };
391 };
391 };
392 appenlight-client = super.buildPythonPackage {
392 appenlight-client = super.buildPythonPackage {
393 name = "appenlight-client-0.6.14";
393 name = "appenlight-client-0.6.14";
394 buildInputs = with self; [];
394 buildInputs = with self; [];
395 doCheck = false;
395 doCheck = false;
396 propagatedBuildInputs = with self; [WebOb requests];
396 propagatedBuildInputs = with self; [WebOb requests];
397 src = fetchurl {
397 src = fetchurl {
398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
398 url = "https://pypi.python.org/packages/4d/e0/23fee3ebada8143f707e65c06bcb82992040ee64ea8355e044ed55ebf0c1/appenlight_client-0.6.14.tar.gz";
399 md5 = "578c69b09f4356d898fff1199b98a95c";
399 md5 = "578c69b09f4356d898fff1199b98a95c";
400 };
400 };
401 meta = {
401 meta = {
402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
402 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "DFSG approved"; } ];
403 };
403 };
404 };
404 };
405 authomatic = super.buildPythonPackage {
405 authomatic = super.buildPythonPackage {
406 name = "authomatic-0.1.0.post1";
406 name = "authomatic-0.1.0.post1";
407 buildInputs = with self; [];
407 buildInputs = with self; [];
408 doCheck = false;
408 doCheck = false;
409 propagatedBuildInputs = with self; [];
409 propagatedBuildInputs = with self; [];
410 src = fetchurl {
410 src = fetchurl {
411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
411 url = "https://pypi.python.org/packages/08/1a/8a930461e604c2d5a7a871e1ac59fa82ccf994c32e807230c8d2fb07815a/Authomatic-0.1.0.post1.tar.gz";
412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
412 md5 = "be3f3ce08747d776aae6d6cc8dcb49a9";
413 };
413 };
414 meta = {
414 meta = {
415 license = [ pkgs.lib.licenses.mit ];
415 license = [ pkgs.lib.licenses.mit ];
416 };
416 };
417 };
417 };
418 backport-ipaddress = super.buildPythonPackage {
418 backport-ipaddress = super.buildPythonPackage {
419 name = "backport-ipaddress-0.1";
419 name = "backport-ipaddress-0.1";
420 buildInputs = with self; [];
420 buildInputs = with self; [];
421 doCheck = false;
421 doCheck = false;
422 propagatedBuildInputs = with self; [];
422 propagatedBuildInputs = with self; [];
423 src = fetchurl {
423 src = fetchurl {
424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
424 url = "https://pypi.python.org/packages/d3/30/54c6dab05a4dec44db25ff309f1fbb6b7a8bde3f2bade38bb9da67bbab8f/backport_ipaddress-0.1.tar.gz";
425 md5 = "9c1f45f4361f71b124d7293a60006c05";
425 md5 = "9c1f45f4361f71b124d7293a60006c05";
426 };
426 };
427 meta = {
427 meta = {
428 license = [ pkgs.lib.licenses.psfl ];
428 license = [ pkgs.lib.licenses.psfl ];
429 };
429 };
430 };
430 };
431 bottle = super.buildPythonPackage {
431 bottle = super.buildPythonPackage {
432 name = "bottle-0.12.8";
432 name = "bottle-0.12.8";
433 buildInputs = with self; [];
433 buildInputs = with self; [];
434 doCheck = false;
434 doCheck = false;
435 propagatedBuildInputs = with self; [];
435 propagatedBuildInputs = with self; [];
436 src = fetchurl {
436 src = fetchurl {
437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
437 url = "https://pypi.python.org/packages/52/df/e4a408f3a7af396d186d4ecd3b389dd764f0f943b4fa8d257bfe7b49d343/bottle-0.12.8.tar.gz";
438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
438 md5 = "13132c0a8f607bf860810a6ee9064c5b";
439 };
439 };
440 meta = {
440 meta = {
441 license = [ pkgs.lib.licenses.mit ];
441 license = [ pkgs.lib.licenses.mit ];
442 };
442 };
443 };
443 };
444 bumpversion = super.buildPythonPackage {
444 bumpversion = super.buildPythonPackage {
445 name = "bumpversion-0.5.3";
445 name = "bumpversion-0.5.3";
446 buildInputs = with self; [];
446 buildInputs = with self; [];
447 doCheck = false;
447 doCheck = false;
448 propagatedBuildInputs = with self; [];
448 propagatedBuildInputs = with self; [];
449 src = fetchurl {
449 src = fetchurl {
450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
450 url = "https://pypi.python.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
451 md5 = "c66a3492eafcf5ad4b024be9fca29820";
452 };
452 };
453 meta = {
453 meta = {
454 license = [ pkgs.lib.licenses.mit ];
454 license = [ pkgs.lib.licenses.mit ];
455 };
455 };
456 };
456 };
457 celery = super.buildPythonPackage {
457 celery = super.buildPythonPackage {
458 name = "celery-2.2.10";
458 name = "celery-2.2.10";
459 buildInputs = with self; [];
459 buildInputs = with self; [];
460 doCheck = false;
460 doCheck = false;
461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
461 propagatedBuildInputs = with self; [python-dateutil anyjson kombu pyparsing];
462 src = fetchurl {
462 src = fetchurl {
463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
463 url = "https://pypi.python.org/packages/b1/64/860fd50e45844c83442e7953effcddeff66b2851d90b2d784f7201c111b8/celery-2.2.10.tar.gz";
464 md5 = "898bc87e54f278055b561316ba73e222";
464 md5 = "898bc87e54f278055b561316ba73e222";
465 };
465 };
466 meta = {
466 meta = {
467 license = [ pkgs.lib.licenses.bsdOriginal ];
467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 };
468 };
469 };
469 };
470 click = super.buildPythonPackage {
470 click = super.buildPythonPackage {
471 name = "click-5.1";
471 name = "click-5.1";
472 buildInputs = with self; [];
472 buildInputs = with self; [];
473 doCheck = false;
473 doCheck = false;
474 propagatedBuildInputs = with self; [];
474 propagatedBuildInputs = with self; [];
475 src = fetchurl {
475 src = fetchurl {
476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
476 url = "https://pypi.python.org/packages/b7/34/a496632c4fb6c1ee76efedf77bb8d28b29363d839953d95095b12defe791/click-5.1.tar.gz";
477 md5 = "9c5323008cccfe232a8b161fc8196d41";
477 md5 = "9c5323008cccfe232a8b161fc8196d41";
478 };
478 };
479 meta = {
479 meta = {
480 license = [ pkgs.lib.licenses.bsdOriginal ];
480 license = [ pkgs.lib.licenses.bsdOriginal ];
481 };
481 };
482 };
482 };
483 colander = super.buildPythonPackage {
483 colander = super.buildPythonPackage {
484 name = "colander-1.2";
484 name = "colander-1.2";
485 buildInputs = with self; [];
485 buildInputs = with self; [];
486 doCheck = false;
486 doCheck = false;
487 propagatedBuildInputs = with self; [translationstring iso8601];
487 propagatedBuildInputs = with self; [translationstring iso8601];
488 src = fetchurl {
488 src = fetchurl {
489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
489 url = "https://pypi.python.org/packages/14/23/c9ceba07a6a1dc0eefbb215fc0dc64aabc2b22ee756bc0f0c13278fa0887/colander-1.2.tar.gz";
490 md5 = "83db21b07936a0726e588dae1914b9ed";
490 md5 = "83db21b07936a0726e588dae1914b9ed";
491 };
491 };
492 meta = {
492 meta = {
493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
493 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
494 };
494 };
495 };
495 };
496 configobj = super.buildPythonPackage {
496 configobj = super.buildPythonPackage {
497 name = "configobj-5.0.6";
497 name = "configobj-5.0.6";
498 buildInputs = with self; [];
498 buildInputs = with self; [];
499 doCheck = false;
499 doCheck = false;
500 propagatedBuildInputs = with self; [six];
500 propagatedBuildInputs = with self; [six];
501 src = fetchurl {
501 src = fetchurl {
502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
502 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
503 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
504 };
504 };
505 meta = {
505 meta = {
506 license = [ pkgs.lib.licenses.bsdOriginal ];
506 license = [ pkgs.lib.licenses.bsdOriginal ];
507 };
507 };
508 };
508 };
509 cov-core = super.buildPythonPackage {
509 cov-core = super.buildPythonPackage {
510 name = "cov-core-1.15.0";
510 name = "cov-core-1.15.0";
511 buildInputs = with self; [];
511 buildInputs = with self; [];
512 doCheck = false;
512 doCheck = false;
513 propagatedBuildInputs = with self; [coverage];
513 propagatedBuildInputs = with self; [coverage];
514 src = fetchurl {
514 src = fetchurl {
515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
515 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
516 md5 = "f519d4cb4c4e52856afb14af52919fe6";
517 };
517 };
518 meta = {
518 meta = {
519 license = [ pkgs.lib.licenses.mit ];
519 license = [ pkgs.lib.licenses.mit ];
520 };
520 };
521 };
521 };
522 coverage = super.buildPythonPackage {
522 coverage = super.buildPythonPackage {
523 name = "coverage-3.7.1";
523 name = "coverage-3.7.1";
524 buildInputs = with self; [];
524 buildInputs = with self; [];
525 doCheck = false;
525 doCheck = false;
526 propagatedBuildInputs = with self; [];
526 propagatedBuildInputs = with self; [];
527 src = fetchurl {
527 src = fetchurl {
528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
528 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
529 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
530 };
530 };
531 meta = {
531 meta = {
532 license = [ pkgs.lib.licenses.bsdOriginal ];
532 license = [ pkgs.lib.licenses.bsdOriginal ];
533 };
533 };
534 };
534 };
535 cssselect = super.buildPythonPackage {
535 cssselect = super.buildPythonPackage {
536 name = "cssselect-0.9.1";
536 name = "cssselect-0.9.1";
537 buildInputs = with self; [];
537 buildInputs = with self; [];
538 doCheck = false;
538 doCheck = false;
539 propagatedBuildInputs = with self; [];
539 propagatedBuildInputs = with self; [];
540 src = fetchurl {
540 src = fetchurl {
541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
541 url = "https://pypi.python.org/packages/aa/e5/9ee1460d485b94a6d55732eb7ad5b6c084caf73dd6f9cb0bb7d2a78fafe8/cssselect-0.9.1.tar.gz";
542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
542 md5 = "c74f45966277dc7a0f768b9b0f3522ac";
543 };
543 };
544 meta = {
544 meta = {
545 license = [ pkgs.lib.licenses.bsdOriginal ];
545 license = [ pkgs.lib.licenses.bsdOriginal ];
546 };
546 };
547 };
547 };
548 decorator = super.buildPythonPackage {
548 decorator = super.buildPythonPackage {
549 name = "decorator-3.4.2";
549 name = "decorator-3.4.2";
550 buildInputs = with self; [];
550 buildInputs = with self; [];
551 doCheck = false;
551 doCheck = false;
552 propagatedBuildInputs = with self; [];
552 propagatedBuildInputs = with self; [];
553 src = fetchurl {
553 src = fetchurl {
554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
554 url = "https://pypi.python.org/packages/35/3a/42566eb7a2cbac774399871af04e11d7ae3fc2579e7dae85213b8d1d1c57/decorator-3.4.2.tar.gz";
555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
555 md5 = "9e0536870d2b83ae27d58dbf22582f4d";
556 };
556 };
557 meta = {
557 meta = {
558 license = [ pkgs.lib.licenses.bsdOriginal ];
558 license = [ pkgs.lib.licenses.bsdOriginal ];
559 };
559 };
560 };
560 };
561 docutils = super.buildPythonPackage {
561 docutils = super.buildPythonPackage {
562 name = "docutils-0.12";
562 name = "docutils-0.12";
563 buildInputs = with self; [];
563 buildInputs = with self; [];
564 doCheck = false;
564 doCheck = false;
565 propagatedBuildInputs = with self; [];
565 propagatedBuildInputs = with self; [];
566 src = fetchurl {
566 src = fetchurl {
567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
567 url = "https://pypi.python.org/packages/37/38/ceda70135b9144d84884ae2fc5886c6baac4edea39550f28bcd144c1234d/docutils-0.12.tar.gz";
568 md5 = "4622263b62c5c771c03502afa3157768";
568 md5 = "4622263b62c5c771c03502afa3157768";
569 };
569 };
570 meta = {
570 meta = {
571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
571 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
572 };
572 };
573 };
573 };
574 dogpile.cache = super.buildPythonPackage {
574 dogpile.cache = super.buildPythonPackage {
575 name = "dogpile.cache-0.6.1";
575 name = "dogpile.cache-0.6.1";
576 buildInputs = with self; [];
576 buildInputs = with self; [];
577 doCheck = false;
577 doCheck = false;
578 propagatedBuildInputs = with self; [dogpile.core];
578 propagatedBuildInputs = with self; [dogpile.core];
579 src = fetchurl {
579 src = fetchurl {
580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
580 url = "https://pypi.python.org/packages/f6/a0/6f2142c58c6588d17c734265b103ae1cd0741e1681dd9483a63f22033375/dogpile.cache-0.6.1.tar.gz";
581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
581 md5 = "35d7fb30f22bbd0685763d894dd079a9";
582 };
582 };
583 meta = {
583 meta = {
584 license = [ pkgs.lib.licenses.bsdOriginal ];
584 license = [ pkgs.lib.licenses.bsdOriginal ];
585 };
585 };
586 };
586 };
587 dogpile.core = super.buildPythonPackage {
587 dogpile.core = super.buildPythonPackage {
588 name = "dogpile.core-0.4.1";
588 name = "dogpile.core-0.4.1";
589 buildInputs = with self; [];
589 buildInputs = with self; [];
590 doCheck = false;
590 doCheck = false;
591 propagatedBuildInputs = with self; [];
591 propagatedBuildInputs = with self; [];
592 src = fetchurl {
592 src = fetchurl {
593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
593 url = "https://pypi.python.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
594 md5 = "01cb19f52bba3e95c9b560f39341f045";
594 md5 = "01cb19f52bba3e95c9b560f39341f045";
595 };
595 };
596 meta = {
596 meta = {
597 license = [ pkgs.lib.licenses.bsdOriginal ];
597 license = [ pkgs.lib.licenses.bsdOriginal ];
598 };
598 };
599 };
599 };
600 dulwich = super.buildPythonPackage {
600 dulwich = super.buildPythonPackage {
601 name = "dulwich-0.12.0";
601 name = "dulwich-0.12.0";
602 buildInputs = with self; [];
602 buildInputs = with self; [];
603 doCheck = false;
603 doCheck = false;
604 propagatedBuildInputs = with self; [];
604 propagatedBuildInputs = with self; [];
605 src = fetchurl {
605 src = fetchurl {
606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
606 url = "https://pypi.python.org/packages/6f/04/fbe561b6d45c0ec758330d5b7f5ba4b6cb4f1ca1ab49859d2fc16320da75/dulwich-0.12.0.tar.gz";
607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
607 md5 = "f3a8a12bd9f9dd8c233e18f3d49436fa";
608 };
608 };
609 meta = {
609 meta = {
610 license = [ pkgs.lib.licenses.gpl2Plus ];
610 license = [ pkgs.lib.licenses.gpl2Plus ];
611 };
611 };
612 };
612 };
613 ecdsa = super.buildPythonPackage {
613 ecdsa = super.buildPythonPackage {
614 name = "ecdsa-0.11";
614 name = "ecdsa-0.11";
615 buildInputs = with self; [];
615 buildInputs = with self; [];
616 doCheck = false;
616 doCheck = false;
617 propagatedBuildInputs = with self; [];
617 propagatedBuildInputs = with self; [];
618 src = fetchurl {
618 src = fetchurl {
619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
619 url = "https://pypi.python.org/packages/6c/3f/92fe5dcdcaa7bd117be21e5520c9a54375112b66ec000d209e9e9519fad1/ecdsa-0.11.tar.gz";
620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
620 md5 = "8ef586fe4dbb156697d756900cb41d7c";
621 };
621 };
622 meta = {
622 meta = {
623 license = [ pkgs.lib.licenses.mit ];
623 license = [ pkgs.lib.licenses.mit ];
624 };
624 };
625 };
625 };
626 elasticsearch = super.buildPythonPackage {
626 elasticsearch = super.buildPythonPackage {
627 name = "elasticsearch-2.3.0";
627 name = "elasticsearch-2.3.0";
628 buildInputs = with self; [];
628 buildInputs = with self; [];
629 doCheck = false;
629 doCheck = false;
630 propagatedBuildInputs = with self; [urllib3];
630 propagatedBuildInputs = with self; [urllib3];
631 src = fetchurl {
631 src = fetchurl {
632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
632 url = "https://pypi.python.org/packages/10/35/5fd52c5f0b0ee405ed4b5195e8bce44c5e041787680dc7b94b8071cac600/elasticsearch-2.3.0.tar.gz";
633 md5 = "2550f3b51629cf1ef9636608af92c340";
633 md5 = "2550f3b51629cf1ef9636608af92c340";
634 };
634 };
635 meta = {
635 meta = {
636 license = [ pkgs.lib.licenses.asl20 ];
636 license = [ pkgs.lib.licenses.asl20 ];
637 };
637 };
638 };
638 };
639 elasticsearch-dsl = super.buildPythonPackage {
639 elasticsearch-dsl = super.buildPythonPackage {
640 name = "elasticsearch-dsl-2.0.0";
640 name = "elasticsearch-dsl-2.0.0";
641 buildInputs = with self; [];
641 buildInputs = with self; [];
642 doCheck = false;
642 doCheck = false;
643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
643 propagatedBuildInputs = with self; [six python-dateutil elasticsearch];
644 src = fetchurl {
644 src = fetchurl {
645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
645 url = "https://pypi.python.org/packages/4e/5d/e788ae8dbe2ff4d13426db0a027533386a5c276c77a2654dc0e2007ce04a/elasticsearch-dsl-2.0.0.tar.gz";
646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
646 md5 = "4cdfec81bb35383dd3b7d02d7dc5ee68";
647 };
647 };
648 meta = {
648 meta = {
649 license = [ pkgs.lib.licenses.asl20 ];
649 license = [ pkgs.lib.licenses.asl20 ];
650 };
650 };
651 };
651 };
652 flake8 = super.buildPythonPackage {
652 flake8 = super.buildPythonPackage {
653 name = "flake8-2.4.1";
653 name = "flake8-2.4.1";
654 buildInputs = with self; [];
654 buildInputs = with self; [];
655 doCheck = false;
655 doCheck = false;
656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
656 propagatedBuildInputs = with self; [pyflakes pep8 mccabe];
657 src = fetchurl {
657 src = fetchurl {
658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
658 url = "https://pypi.python.org/packages/8f/b5/9a73c66c7dba273bac8758398f060c008a25f3e84531063b42503b5d0a95/flake8-2.4.1.tar.gz";
659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
659 md5 = "ed45d3db81a3b7c88bd63c6e37ca1d65";
660 };
660 };
661 meta = {
661 meta = {
662 license = [ pkgs.lib.licenses.mit ];
662 license = [ pkgs.lib.licenses.mit ];
663 };
663 };
664 };
664 };
665 future = super.buildPythonPackage {
665 future = super.buildPythonPackage {
666 name = "future-0.14.3";
666 name = "future-0.14.3";
667 buildInputs = with self; [];
667 buildInputs = with self; [];
668 doCheck = false;
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
671 url = "https://pypi.python.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
672 md5 = "e94079b0bd1fc054929e8769fc0f6083";
673 };
673 };
674 meta = {
674 meta = {
675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
675 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
676 };
676 };
677 };
677 };
678 futures = super.buildPythonPackage {
678 futures = super.buildPythonPackage {
679 name = "futures-3.0.2";
679 name = "futures-3.0.2";
680 buildInputs = with self; [];
680 buildInputs = with self; [];
681 doCheck = false;
681 doCheck = false;
682 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
683 src = fetchurl {
684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
684 url = "https://pypi.python.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
685 md5 = "42aaf1e4de48d6e871d77dc1f9d96d5a";
686 };
686 };
687 meta = {
687 meta = {
688 license = [ pkgs.lib.licenses.bsdOriginal ];
688 license = [ pkgs.lib.licenses.bsdOriginal ];
689 };
689 };
690 };
690 };
691 gnureadline = super.buildPythonPackage {
691 gnureadline = super.buildPythonPackage {
692 name = "gnureadline-6.3.3";
692 name = "gnureadline-6.3.3";
693 buildInputs = with self; [];
693 buildInputs = with self; [];
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = with self; [];
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
696 src = fetchurl {
697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
697 url = "https://pypi.python.org/packages/3a/ee/2c3f568b0a74974791ac590ec742ef6133e2fbd287a074ba72a53fa5e97c/gnureadline-6.3.3.tar.gz";
698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
698 md5 = "c4af83c9a3fbeac8f2da9b5a7c60e51c";
699 };
699 };
700 meta = {
700 meta = {
701 license = [ pkgs.lib.licenses.gpl1 ];
701 license = [ pkgs.lib.licenses.gpl1 ];
702 };
702 };
703 };
703 };
704 gprof2dot = super.buildPythonPackage {
704 gprof2dot = super.buildPythonPackage {
705 name = "gprof2dot-2015.12.1";
705 name = "gprof2dot-2015.12.1";
706 buildInputs = with self; [];
706 buildInputs = with self; [];
707 doCheck = false;
707 doCheck = false;
708 propagatedBuildInputs = with self; [];
708 propagatedBuildInputs = with self; [];
709 src = fetchurl {
709 src = fetchurl {
710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
710 url = "https://pypi.python.org/packages/b9/34/7bf93c1952d40fa5c95ad963f4d8344b61ef58558632402eca18e6c14127/gprof2dot-2015.12.1.tar.gz";
711 md5 = "e23bf4e2f94db032750c193384b4165b";
711 md5 = "e23bf4e2f94db032750c193384b4165b";
712 };
712 };
713 meta = {
713 meta = {
714 license = [ { fullName = "LGPL"; } ];
714 license = [ { fullName = "LGPL"; } ];
715 };
715 };
716 };
716 };
717 gunicorn = super.buildPythonPackage {
717 gunicorn = super.buildPythonPackage {
718 name = "gunicorn-19.6.0";
718 name = "gunicorn-19.6.0";
719 buildInputs = with self; [];
719 buildInputs = with self; [];
720 doCheck = false;
720 doCheck = false;
721 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
723 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
724 md5 = "338e5e8a83ea0f0625f768dba4597530";
724 md5 = "338e5e8a83ea0f0625f768dba4597530";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
728 };
728 };
729 };
729 };
730 infrae.cache = super.buildPythonPackage {
730 infrae.cache = super.buildPythonPackage {
731 name = "infrae.cache-1.0.1";
731 name = "infrae.cache-1.0.1";
732 buildInputs = with self; [];
732 buildInputs = with self; [];
733 doCheck = false;
733 doCheck = false;
734 propagatedBuildInputs = with self; [Beaker repoze.lru];
734 propagatedBuildInputs = with self; [Beaker repoze.lru];
735 src = fetchurl {
735 src = fetchurl {
736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
736 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
737 md5 = "b09076a766747e6ed2a755cc62088e32";
737 md5 = "b09076a766747e6ed2a755cc62088e32";
738 };
738 };
739 meta = {
739 meta = {
740 license = [ pkgs.lib.licenses.zpt21 ];
740 license = [ pkgs.lib.licenses.zpt21 ];
741 };
741 };
742 };
742 };
743 invoke = super.buildPythonPackage {
743 invoke = super.buildPythonPackage {
744 name = "invoke-0.13.0";
744 name = "invoke-0.13.0";
745 buildInputs = with self; [];
745 buildInputs = with self; [];
746 doCheck = false;
746 doCheck = false;
747 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
748 src = fetchurl {
749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
749 url = "https://pypi.python.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
750 md5 = "c0d1ed4bfb34eaab551662d8cfee6540";
751 };
751 };
752 meta = {
752 meta = {
753 license = [ pkgs.lib.licenses.bsdOriginal ];
753 license = [ pkgs.lib.licenses.bsdOriginal ];
754 };
754 };
755 };
755 };
756 ipdb = super.buildPythonPackage {
756 ipdb = super.buildPythonPackage {
757 name = "ipdb-0.8";
757 name = "ipdb-0.8";
758 buildInputs = with self; [];
758 buildInputs = with self; [];
759 doCheck = false;
759 doCheck = false;
760 propagatedBuildInputs = with self; [ipython];
760 propagatedBuildInputs = with self; [ipython];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
762 url = "https://pypi.python.org/packages/f0/25/d7dd430ced6cd8dc242a933c8682b5dbf32eb4011d82f87e34209e5ec845/ipdb-0.8.zip";
763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
763 md5 = "96dca0712efa01aa5eaf6b22071dd3ed";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.gpl1 ];
766 license = [ pkgs.lib.licenses.gpl1 ];
767 };
767 };
768 };
768 };
769 ipython = super.buildPythonPackage {
769 ipython = super.buildPythonPackage {
770 name = "ipython-3.1.0";
770 name = "ipython-3.1.0";
771 buildInputs = with self; [];
771 buildInputs = with self; [];
772 doCheck = false;
772 doCheck = false;
773 propagatedBuildInputs = with self; [];
773 propagatedBuildInputs = with self; [];
774 src = fetchurl {
774 src = fetchurl {
775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
775 url = "https://pypi.python.org/packages/06/91/120c0835254c120af89f066afaabf81289bc2726c1fc3ca0555df6882f58/ipython-3.1.0.tar.gz";
776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
776 md5 = "a749d90c16068687b0ec45a27e72ef8f";
777 };
777 };
778 meta = {
778 meta = {
779 license = [ pkgs.lib.licenses.bsdOriginal ];
779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 };
780 };
781 };
781 };
782 iso8601 = super.buildPythonPackage {
782 iso8601 = super.buildPythonPackage {
783 name = "iso8601-0.1.11";
783 name = "iso8601-0.1.11";
784 buildInputs = with self; [];
784 buildInputs = with self; [];
785 doCheck = false;
785 doCheck = false;
786 propagatedBuildInputs = with self; [];
786 propagatedBuildInputs = with self; [];
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
788 url = "https://pypi.python.org/packages/c0/75/c9209ee4d1b5975eb8c2cba4428bde6b61bd55664a98290dd015cdb18e98/iso8601-0.1.11.tar.gz";
789 md5 = "b06d11cd14a64096f907086044f0fe38";
789 md5 = "b06d11cd14a64096f907086044f0fe38";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ pkgs.lib.licenses.mit ];
792 license = [ pkgs.lib.licenses.mit ];
793 };
793 };
794 };
794 };
795 itsdangerous = super.buildPythonPackage {
795 itsdangerous = super.buildPythonPackage {
796 name = "itsdangerous-0.24";
796 name = "itsdangerous-0.24";
797 buildInputs = with self; [];
797 buildInputs = with self; [];
798 doCheck = false;
798 doCheck = false;
799 propagatedBuildInputs = with self; [];
799 propagatedBuildInputs = with self; [];
800 src = fetchurl {
800 src = fetchurl {
801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
801 url = "https://pypi.python.org/packages/dc/b4/a60bcdba945c00f6d608d8975131ab3f25b22f2bcfe1dab221165194b2d4/itsdangerous-0.24.tar.gz";
802 md5 = "a3d55aa79369aef5345c036a8a26307f";
802 md5 = "a3d55aa79369aef5345c036a8a26307f";
803 };
803 };
804 meta = {
804 meta = {
805 license = [ pkgs.lib.licenses.bsdOriginal ];
805 license = [ pkgs.lib.licenses.bsdOriginal ];
806 };
806 };
807 };
807 };
808 kombu = super.buildPythonPackage {
808 kombu = super.buildPythonPackage {
809 name = "kombu-1.5.1";
809 name = "kombu-1.5.1";
810 buildInputs = with self; [];
810 buildInputs = with self; [];
811 doCheck = false;
811 doCheck = false;
812 propagatedBuildInputs = with self; [anyjson amqplib];
812 propagatedBuildInputs = with self; [anyjson amqplib];
813 src = fetchurl {
813 src = fetchurl {
814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
814 url = "https://pypi.python.org/packages/19/53/74bf2a624644b45f0850a638752514fc10a8e1cbd738f10804951a6df3f5/kombu-1.5.1.tar.gz";
815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
815 md5 = "50662f3c7e9395b3d0721fb75d100b63";
816 };
816 };
817 meta = {
817 meta = {
818 license = [ pkgs.lib.licenses.bsdOriginal ];
818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 };
819 };
820 };
820 };
821 lxml = super.buildPythonPackage {
821 lxml = super.buildPythonPackage {
822 name = "lxml-3.4.4";
822 name = "lxml-3.4.4";
823 buildInputs = with self; [];
823 buildInputs = with self; [];
824 doCheck = false;
824 doCheck = false;
825 propagatedBuildInputs = with self; [];
825 propagatedBuildInputs = with self; [];
826 src = fetchurl {
826 src = fetchurl {
827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
827 url = "https://pypi.python.org/packages/63/c7/4f2a2a4ad6c6fa99b14be6b3c1cece9142e2d915aa7c43c908677afc8fa4/lxml-3.4.4.tar.gz";
828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
828 md5 = "a9a65972afc173ec7a39c585f4eea69c";
829 };
829 };
830 meta = {
830 meta = {
831 license = [ pkgs.lib.licenses.bsdOriginal ];
831 license = [ pkgs.lib.licenses.bsdOriginal ];
832 };
832 };
833 };
833 };
834 marshmallow = super.buildPythonPackage {
835 name = "marshmallow-2.8.0";
836 buildInputs = with self; [];
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/4f/64/9393d77847d86981c84b88bbea627d30ff71b5ab1402636b366f73737817/marshmallow-2.8.0.tar.gz";
841 md5 = "204513fc123a3d9bdd7b63b9747f02e6";
842 };
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
845 };
846 };
834 mccabe = super.buildPythonPackage {
847 mccabe = super.buildPythonPackage {
835 name = "mccabe-0.3";
848 name = "mccabe-0.3";
836 buildInputs = with self; [];
849 buildInputs = with self; [];
837 doCheck = false;
850 doCheck = false;
838 propagatedBuildInputs = with self; [];
851 propagatedBuildInputs = with self; [];
839 src = fetchurl {
852 src = fetchurl {
840 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
853 url = "https://pypi.python.org/packages/c9/2e/75231479e11a906b64ac43bad9d0bb534d00080b18bdca8db9da46e1faf7/mccabe-0.3.tar.gz";
841 md5 = "81640948ff226f8c12b3277059489157";
854 md5 = "81640948ff226f8c12b3277059489157";
842 };
855 };
843 meta = {
856 meta = {
844 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
857 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
845 };
858 };
846 };
859 };
847 meld3 = super.buildPythonPackage {
860 meld3 = super.buildPythonPackage {
848 name = "meld3-1.0.2";
861 name = "meld3-1.0.2";
849 buildInputs = with self; [];
862 buildInputs = with self; [];
850 doCheck = false;
863 doCheck = false;
851 propagatedBuildInputs = with self; [];
864 propagatedBuildInputs = with self; [];
852 src = fetchurl {
865 src = fetchurl {
853 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
866 url = "https://pypi.python.org/packages/45/a0/317c6422b26c12fe0161e936fc35f36552069ba8e6f7ecbd99bbffe32a5f/meld3-1.0.2.tar.gz";
854 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
867 md5 = "3ccc78cd79cffd63a751ad7684c02c91";
855 };
868 };
856 meta = {
869 meta = {
857 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
870 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
858 };
871 };
859 };
872 };
860 mock = super.buildPythonPackage {
873 mock = super.buildPythonPackage {
861 name = "mock-1.0.1";
874 name = "mock-1.0.1";
862 buildInputs = with self; [];
875 buildInputs = with self; [];
863 doCheck = false;
876 doCheck = false;
864 propagatedBuildInputs = with self; [];
877 propagatedBuildInputs = with self; [];
865 src = fetchurl {
878 src = fetchurl {
866 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
879 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
867 md5 = "869f08d003c289a97c1a6610faf5e913";
880 md5 = "869f08d003c289a97c1a6610faf5e913";
868 };
881 };
869 meta = {
882 meta = {
870 license = [ pkgs.lib.licenses.bsdOriginal ];
883 license = [ pkgs.lib.licenses.bsdOriginal ];
871 };
884 };
872 };
885 };
873 msgpack-python = super.buildPythonPackage {
886 msgpack-python = super.buildPythonPackage {
874 name = "msgpack-python-0.4.6";
887 name = "msgpack-python-0.4.6";
875 buildInputs = with self; [];
888 buildInputs = with self; [];
876 doCheck = false;
889 doCheck = false;
877 propagatedBuildInputs = with self; [];
890 propagatedBuildInputs = with self; [];
878 src = fetchurl {
891 src = fetchurl {
879 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
892 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
880 md5 = "8b317669314cf1bc881716cccdaccb30";
893 md5 = "8b317669314cf1bc881716cccdaccb30";
881 };
894 };
882 meta = {
895 meta = {
883 license = [ pkgs.lib.licenses.asl20 ];
896 license = [ pkgs.lib.licenses.asl20 ];
884 };
897 };
885 };
898 };
886 nose = super.buildPythonPackage {
899 nose = super.buildPythonPackage {
887 name = "nose-1.3.6";
900 name = "nose-1.3.6";
888 buildInputs = with self; [];
901 buildInputs = with self; [];
889 doCheck = false;
902 doCheck = false;
890 propagatedBuildInputs = with self; [];
903 propagatedBuildInputs = with self; [];
891 src = fetchurl {
904 src = fetchurl {
892 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
905 url = "https://pypi.python.org/packages/70/c7/469e68148d17a0d3db5ed49150242fd70a74a8147b8f3f8b87776e028d99/nose-1.3.6.tar.gz";
893 md5 = "0ca546d81ca8309080fc80cb389e7a16";
906 md5 = "0ca546d81ca8309080fc80cb389e7a16";
894 };
907 };
895 meta = {
908 meta = {
896 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
909 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "GNU LGPL"; } ];
897 };
910 };
898 };
911 };
899 objgraph = super.buildPythonPackage {
912 objgraph = super.buildPythonPackage {
900 name = "objgraph-2.0.0";
913 name = "objgraph-2.0.0";
901 buildInputs = with self; [];
914 buildInputs = with self; [];
902 doCheck = false;
915 doCheck = false;
903 propagatedBuildInputs = with self; [];
916 propagatedBuildInputs = with self; [];
904 src = fetchurl {
917 src = fetchurl {
905 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
918 url = "https://pypi.python.org/packages/d7/33/ace750b59247496ed769b170586c5def7202683f3d98e737b75b767ff29e/objgraph-2.0.0.tar.gz";
906 md5 = "25b0d5e5adc74aa63ead15699614159c";
919 md5 = "25b0d5e5adc74aa63ead15699614159c";
907 };
920 };
908 meta = {
921 meta = {
909 license = [ pkgs.lib.licenses.mit ];
922 license = [ pkgs.lib.licenses.mit ];
910 };
923 };
911 };
924 };
912 packaging = super.buildPythonPackage {
925 packaging = super.buildPythonPackage {
913 name = "packaging-15.2";
926 name = "packaging-15.2";
914 buildInputs = with self; [];
927 buildInputs = with self; [];
915 doCheck = false;
928 doCheck = false;
916 propagatedBuildInputs = with self; [];
929 propagatedBuildInputs = with self; [];
917 src = fetchurl {
930 src = fetchurl {
918 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
931 url = "https://pypi.python.org/packages/24/c4/185da1304f07047dc9e0c46c31db75c0351bd73458ac3efad7da3dbcfbe1/packaging-15.2.tar.gz";
919 md5 = "c16093476f6ced42128bf610e5db3784";
932 md5 = "c16093476f6ced42128bf610e5db3784";
920 };
933 };
921 meta = {
934 meta = {
922 license = [ pkgs.lib.licenses.asl20 ];
935 license = [ pkgs.lib.licenses.asl20 ];
923 };
936 };
924 };
937 };
925 paramiko = super.buildPythonPackage {
938 paramiko = super.buildPythonPackage {
926 name = "paramiko-1.15.1";
939 name = "paramiko-1.15.1";
927 buildInputs = with self; [];
940 buildInputs = with self; [];
928 doCheck = false;
941 doCheck = false;
929 propagatedBuildInputs = with self; [pycrypto ecdsa];
942 propagatedBuildInputs = with self; [pycrypto ecdsa];
930 src = fetchurl {
943 src = fetchurl {
931 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
944 url = "https://pypi.python.org/packages/04/2b/a22d2a560c1951abbbf95a0628e245945565f70dc082d9e784666887222c/paramiko-1.15.1.tar.gz";
932 md5 = "48c274c3f9b1282932567b21f6acf3b5";
945 md5 = "48c274c3f9b1282932567b21f6acf3b5";
933 };
946 };
934 meta = {
947 meta = {
935 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
948 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
936 };
949 };
937 };
950 };
938 pep8 = super.buildPythonPackage {
951 pep8 = super.buildPythonPackage {
939 name = "pep8-1.5.7";
952 name = "pep8-1.5.7";
940 buildInputs = with self; [];
953 buildInputs = with self; [];
941 doCheck = false;
954 doCheck = false;
942 propagatedBuildInputs = with self; [];
955 propagatedBuildInputs = with self; [];
943 src = fetchurl {
956 src = fetchurl {
944 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
957 url = "https://pypi.python.org/packages/8b/de/259f5e735897ada1683489dd514b2a1c91aaa74e5e6b68f80acf128a6368/pep8-1.5.7.tar.gz";
945 md5 = "f6adbdd69365ecca20513c709f9b7c93";
958 md5 = "f6adbdd69365ecca20513c709f9b7c93";
946 };
959 };
947 meta = {
960 meta = {
948 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
961 license = [ { fullName = "Expat license"; } pkgs.lib.licenses.mit ];
949 };
962 };
950 };
963 };
951 psutil = super.buildPythonPackage {
964 psutil = super.buildPythonPackage {
952 name = "psutil-2.2.1";
965 name = "psutil-2.2.1";
953 buildInputs = with self; [];
966 buildInputs = with self; [];
954 doCheck = false;
967 doCheck = false;
955 propagatedBuildInputs = with self; [];
968 propagatedBuildInputs = with self; [];
956 src = fetchurl {
969 src = fetchurl {
957 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
970 url = "https://pypi.python.org/packages/df/47/ee54ef14dd40f8ce831a7581001a5096494dc99fe71586260ca6b531fe86/psutil-2.2.1.tar.gz";
958 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
971 md5 = "1a2b58cd9e3a53528bb6148f0c4d5244";
959 };
972 };
960 meta = {
973 meta = {
961 license = [ pkgs.lib.licenses.bsdOriginal ];
974 license = [ pkgs.lib.licenses.bsdOriginal ];
962 };
975 };
963 };
976 };
964 psycopg2 = super.buildPythonPackage {
977 psycopg2 = super.buildPythonPackage {
965 name = "psycopg2-2.6.1";
978 name = "psycopg2-2.6.1";
966 buildInputs = with self; [];
979 buildInputs = with self; [];
967 doCheck = false;
980 doCheck = false;
968 propagatedBuildInputs = with self; [];
981 propagatedBuildInputs = with self; [];
969 src = fetchurl {
982 src = fetchurl {
970 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
983 url = "https://pypi.python.org/packages/86/fd/cc8315be63a41fe000cce20482a917e874cdc1151e62cb0141f5e55f711e/psycopg2-2.6.1.tar.gz";
971 md5 = "842b44f8c95517ed5b792081a2370da1";
984 md5 = "842b44f8c95517ed5b792081a2370da1";
972 };
985 };
973 meta = {
986 meta = {
974 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
987 license = [ pkgs.lib.licenses.zpt21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
975 };
988 };
976 };
989 };
977 py = super.buildPythonPackage {
990 py = super.buildPythonPackage {
978 name = "py-1.4.29";
991 name = "py-1.4.29";
979 buildInputs = with self; [];
992 buildInputs = with self; [];
980 doCheck = false;
993 doCheck = false;
981 propagatedBuildInputs = with self; [];
994 propagatedBuildInputs = with self; [];
982 src = fetchurl {
995 src = fetchurl {
983 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
996 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
984 md5 = "c28e0accba523a29b35a48bb703fb96c";
997 md5 = "c28e0accba523a29b35a48bb703fb96c";
985 };
998 };
986 meta = {
999 meta = {
987 license = [ pkgs.lib.licenses.mit ];
1000 license = [ pkgs.lib.licenses.mit ];
988 };
1001 };
989 };
1002 };
990 py-bcrypt = super.buildPythonPackage {
1003 py-bcrypt = super.buildPythonPackage {
991 name = "py-bcrypt-0.4";
1004 name = "py-bcrypt-0.4";
992 buildInputs = with self; [];
1005 buildInputs = with self; [];
993 doCheck = false;
1006 doCheck = false;
994 propagatedBuildInputs = with self; [];
1007 propagatedBuildInputs = with self; [];
995 src = fetchurl {
1008 src = fetchurl {
996 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1009 url = "https://pypi.python.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
997 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
1010 md5 = "dd8b367d6b716a2ea2e72392525f4e36";
998 };
1011 };
999 meta = {
1012 meta = {
1000 license = [ pkgs.lib.licenses.bsdOriginal ];
1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1001 };
1014 };
1002 };
1015 };
1003 py-gfm = super.buildPythonPackage {
1016 py-gfm = super.buildPythonPackage {
1004 name = "py-gfm-0.1.3";
1017 name = "py-gfm-0.1.3";
1005 buildInputs = with self; [];
1018 buildInputs = with self; [];
1006 doCheck = false;
1019 doCheck = false;
1007 propagatedBuildInputs = with self; [setuptools Markdown];
1020 propagatedBuildInputs = with self; [setuptools Markdown];
1008 src = fetchurl {
1021 src = fetchurl {
1009 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1022 url = "https://pypi.python.org/packages/12/e4/6b3d8678da04f97d7490d8264d8de51c2dc9fb91209ccee9c515c95e14c5/py-gfm-0.1.3.tar.gz";
1010 md5 = "e588d9e69640a241b97e2c59c22527a6";
1023 md5 = "e588d9e69640a241b97e2c59c22527a6";
1011 };
1024 };
1012 meta = {
1025 meta = {
1013 license = [ pkgs.lib.licenses.bsdOriginal ];
1026 license = [ pkgs.lib.licenses.bsdOriginal ];
1014 };
1027 };
1015 };
1028 };
1016 pycrypto = super.buildPythonPackage {
1029 pycrypto = super.buildPythonPackage {
1017 name = "pycrypto-2.6.1";
1030 name = "pycrypto-2.6.1";
1018 buildInputs = with self; [];
1031 buildInputs = with self; [];
1019 doCheck = false;
1032 doCheck = false;
1020 propagatedBuildInputs = with self; [];
1033 propagatedBuildInputs = with self; [];
1021 src = fetchurl {
1034 src = fetchurl {
1022 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1035 url = "https://pypi.python.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1023 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1036 md5 = "55a61a054aa66812daf5161a0d5d7eda";
1024 };
1037 };
1025 meta = {
1038 meta = {
1026 license = [ pkgs.lib.licenses.publicDomain ];
1039 license = [ pkgs.lib.licenses.publicDomain ];
1027 };
1040 };
1028 };
1041 };
1029 pycurl = super.buildPythonPackage {
1042 pycurl = super.buildPythonPackage {
1030 name = "pycurl-7.19.5";
1043 name = "pycurl-7.19.5";
1031 buildInputs = with self; [];
1044 buildInputs = with self; [];
1032 doCheck = false;
1045 doCheck = false;
1033 propagatedBuildInputs = with self; [];
1046 propagatedBuildInputs = with self; [];
1034 src = fetchurl {
1047 src = fetchurl {
1035 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1048 url = "https://pypi.python.org/packages/6c/48/13bad289ef6f4869b1d8fc11ae54de8cfb3cc4a2eb9f7419c506f763be46/pycurl-7.19.5.tar.gz";
1036 md5 = "47b4eac84118e2606658122104e62072";
1049 md5 = "47b4eac84118e2606658122104e62072";
1037 };
1050 };
1038 meta = {
1051 meta = {
1039 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1052 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1040 };
1053 };
1041 };
1054 };
1042 pyflakes = super.buildPythonPackage {
1055 pyflakes = super.buildPythonPackage {
1043 name = "pyflakes-0.8.1";
1056 name = "pyflakes-0.8.1";
1044 buildInputs = with self; [];
1057 buildInputs = with self; [];
1045 doCheck = false;
1058 doCheck = false;
1046 propagatedBuildInputs = with self; [];
1059 propagatedBuildInputs = with self; [];
1047 src = fetchurl {
1060 src = fetchurl {
1048 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1061 url = "https://pypi.python.org/packages/75/22/a90ec0252f4f87f3ffb6336504de71fe16a49d69c4538dae2f12b9360a38/pyflakes-0.8.1.tar.gz";
1049 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1062 md5 = "905fe91ad14b912807e8fdc2ac2e2c23";
1050 };
1063 };
1051 meta = {
1064 meta = {
1052 license = [ pkgs.lib.licenses.mit ];
1065 license = [ pkgs.lib.licenses.mit ];
1053 };
1066 };
1054 };
1067 };
1055 pyparsing = super.buildPythonPackage {
1068 pyparsing = super.buildPythonPackage {
1056 name = "pyparsing-1.5.7";
1069 name = "pyparsing-1.5.7";
1057 buildInputs = with self; [];
1070 buildInputs = with self; [];
1058 doCheck = false;
1071 doCheck = false;
1059 propagatedBuildInputs = with self; [];
1072 propagatedBuildInputs = with self; [];
1060 src = fetchurl {
1073 src = fetchurl {
1061 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1074 url = "https://pypi.python.org/packages/2e/26/e8fb5b4256a5f5036be7ce115ef8db8d06bc537becfbdc46c6af008314ee/pyparsing-1.5.7.zip";
1062 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1075 md5 = "b86854857a368d6ccb4d5b6e76d0637f";
1063 };
1076 };
1064 meta = {
1077 meta = {
1065 license = [ pkgs.lib.licenses.mit ];
1078 license = [ pkgs.lib.licenses.mit ];
1066 };
1079 };
1067 };
1080 };
1068 pyramid = super.buildPythonPackage {
1081 pyramid = super.buildPythonPackage {
1069 name = "pyramid-1.6.1";
1082 name = "pyramid-1.6.1";
1070 buildInputs = with self; [];
1083 buildInputs = with self; [];
1071 doCheck = false;
1084 doCheck = false;
1072 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1085 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
1073 src = fetchurl {
1086 src = fetchurl {
1074 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1087 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
1075 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1088 md5 = "b18688ff3cc33efdbb098a35b45dd122";
1076 };
1089 };
1077 meta = {
1090 meta = {
1078 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1091 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1079 };
1092 };
1080 };
1093 };
1081 pyramid-beaker = super.buildPythonPackage {
1094 pyramid-beaker = super.buildPythonPackage {
1082 name = "pyramid-beaker-0.8";
1095 name = "pyramid-beaker-0.8";
1083 buildInputs = with self; [];
1096 buildInputs = with self; [];
1084 doCheck = false;
1097 doCheck = false;
1085 propagatedBuildInputs = with self; [pyramid Beaker];
1098 propagatedBuildInputs = with self; [pyramid Beaker];
1086 src = fetchurl {
1099 src = fetchurl {
1087 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1100 url = "https://pypi.python.org/packages/d9/6e/b85426e00fd3d57f4545f74e1c3828552d8700f13ededeef9233f7bca8be/pyramid_beaker-0.8.tar.gz";
1088 md5 = "22f14be31b06549f80890e2c63a93834";
1101 md5 = "22f14be31b06549f80890e2c63a93834";
1089 };
1102 };
1090 meta = {
1103 meta = {
1091 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1104 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1092 };
1105 };
1093 };
1106 };
1094 pyramid-debugtoolbar = super.buildPythonPackage {
1107 pyramid-debugtoolbar = super.buildPythonPackage {
1095 name = "pyramid-debugtoolbar-2.4.2";
1108 name = "pyramid-debugtoolbar-2.4.2";
1096 buildInputs = with self; [];
1109 buildInputs = with self; [];
1097 doCheck = false;
1110 doCheck = false;
1098 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1111 propagatedBuildInputs = with self; [pyramid pyramid-mako repoze.lru Pygments];
1099 src = fetchurl {
1112 src = fetchurl {
1100 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1113 url = "https://pypi.python.org/packages/89/00/ed5426ee41ed747ba3ffd30e8230841a6878286ea67d480b1444d24f06a2/pyramid_debugtoolbar-2.4.2.tar.gz";
1101 md5 = "073ea67086cc4bd5decc3a000853642d";
1114 md5 = "073ea67086cc4bd5decc3a000853642d";
1102 };
1115 };
1103 meta = {
1116 meta = {
1104 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1117 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1105 };
1118 };
1106 };
1119 };
1107 pyramid-jinja2 = super.buildPythonPackage {
1120 pyramid-jinja2 = super.buildPythonPackage {
1108 name = "pyramid-jinja2-2.5";
1121 name = "pyramid-jinja2-2.5";
1109 buildInputs = with self; [];
1122 buildInputs = with self; [];
1110 doCheck = false;
1123 doCheck = false;
1111 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1124 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
1112 src = fetchurl {
1125 src = fetchurl {
1113 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1126 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
1114 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1127 md5 = "07cb6547204ac5e6f0b22a954ccee928";
1115 };
1128 };
1116 meta = {
1129 meta = {
1117 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1118 };
1131 };
1119 };
1132 };
1120 pyramid-mako = super.buildPythonPackage {
1133 pyramid-mako = super.buildPythonPackage {
1121 name = "pyramid-mako-1.0.2";
1134 name = "pyramid-mako-1.0.2";
1122 buildInputs = with self; [];
1135 buildInputs = with self; [];
1123 doCheck = false;
1136 doCheck = false;
1124 propagatedBuildInputs = with self; [pyramid Mako];
1137 propagatedBuildInputs = with self; [pyramid Mako];
1125 src = fetchurl {
1138 src = fetchurl {
1126 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1139 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
1127 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1140 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
1128 };
1141 };
1129 meta = {
1142 meta = {
1130 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1143 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1131 };
1144 };
1132 };
1145 };
1133 pysqlite = super.buildPythonPackage {
1146 pysqlite = super.buildPythonPackage {
1134 name = "pysqlite-2.6.3";
1147 name = "pysqlite-2.6.3";
1135 buildInputs = with self; [];
1148 buildInputs = with self; [];
1136 doCheck = false;
1149 doCheck = false;
1137 propagatedBuildInputs = with self; [];
1150 propagatedBuildInputs = with self; [];
1138 src = fetchurl {
1151 src = fetchurl {
1139 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1152 url = "https://pypi.python.org/packages/5c/a6/1c429cd4c8069cf4bfbd0eb4d592b3f4042155a8202df83d7e9b93aa3dc2/pysqlite-2.6.3.tar.gz";
1140 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1153 md5 = "7ff1cedee74646b50117acff87aa1cfa";
1141 };
1154 };
1142 meta = {
1155 meta = {
1143 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1156 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1144 };
1157 };
1145 };
1158 };
1146 pytest = super.buildPythonPackage {
1159 pytest = super.buildPythonPackage {
1147 name = "pytest-2.8.5";
1160 name = "pytest-2.8.5";
1148 buildInputs = with self; [];
1161 buildInputs = with self; [];
1149 doCheck = false;
1162 doCheck = false;
1150 propagatedBuildInputs = with self; [py];
1163 propagatedBuildInputs = with self; [py];
1151 src = fetchurl {
1164 src = fetchurl {
1152 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1165 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
1153 md5 = "8493b06f700862f1294298d6c1b715a9";
1166 md5 = "8493b06f700862f1294298d6c1b715a9";
1154 };
1167 };
1155 meta = {
1168 meta = {
1156 license = [ pkgs.lib.licenses.mit ];
1169 license = [ pkgs.lib.licenses.mit ];
1157 };
1170 };
1158 };
1171 };
1159 pytest-catchlog = super.buildPythonPackage {
1172 pytest-catchlog = super.buildPythonPackage {
1160 name = "pytest-catchlog-1.2.2";
1173 name = "pytest-catchlog-1.2.2";
1161 buildInputs = with self; [];
1174 buildInputs = with self; [];
1162 doCheck = false;
1175 doCheck = false;
1163 propagatedBuildInputs = with self; [py pytest];
1176 propagatedBuildInputs = with self; [py pytest];
1164 src = fetchurl {
1177 src = fetchurl {
1165 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1178 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
1166 md5 = "09d890c54c7456c818102b7ff8c182c8";
1179 md5 = "09d890c54c7456c818102b7ff8c182c8";
1167 };
1180 };
1168 meta = {
1181 meta = {
1169 license = [ pkgs.lib.licenses.mit ];
1182 license = [ pkgs.lib.licenses.mit ];
1170 };
1183 };
1171 };
1184 };
1172 pytest-cov = super.buildPythonPackage {
1185 pytest-cov = super.buildPythonPackage {
1173 name = "pytest-cov-1.8.1";
1186 name = "pytest-cov-1.8.1";
1174 buildInputs = with self; [];
1187 buildInputs = with self; [];
1175 doCheck = false;
1188 doCheck = false;
1176 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1189 propagatedBuildInputs = with self; [py pytest coverage cov-core];
1177 src = fetchurl {
1190 src = fetchurl {
1178 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1191 url = "https://pypi.python.org/packages/11/4b/b04646e97f1721878eb21e9f779102d84dd044d324382263b1770a3e4838/pytest-cov-1.8.1.tar.gz";
1179 md5 = "76c778afa2494088270348be42d759fc";
1192 md5 = "76c778afa2494088270348be42d759fc";
1180 };
1193 };
1181 meta = {
1194 meta = {
1182 license = [ pkgs.lib.licenses.mit ];
1195 license = [ pkgs.lib.licenses.mit ];
1183 };
1196 };
1184 };
1197 };
1185 pytest-profiling = super.buildPythonPackage {
1198 pytest-profiling = super.buildPythonPackage {
1186 name = "pytest-profiling-1.0.1";
1199 name = "pytest-profiling-1.0.1";
1187 buildInputs = with self; [];
1200 buildInputs = with self; [];
1188 doCheck = false;
1201 doCheck = false;
1189 propagatedBuildInputs = with self; [six pytest gprof2dot];
1202 propagatedBuildInputs = with self; [six pytest gprof2dot];
1190 src = fetchurl {
1203 src = fetchurl {
1191 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1204 url = "https://pypi.python.org/packages/d8/67/8ffab73406e22870e07fa4dc8dce1d7689b26dba8efd00161c9b6fc01ec0/pytest-profiling-1.0.1.tar.gz";
1192 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1205 md5 = "354404eb5b3fd4dc5eb7fffbb3d9b68b";
1193 };
1206 };
1194 meta = {
1207 meta = {
1195 license = [ pkgs.lib.licenses.mit ];
1208 license = [ pkgs.lib.licenses.mit ];
1196 };
1209 };
1197 };
1210 };
1198 pytest-runner = super.buildPythonPackage {
1211 pytest-runner = super.buildPythonPackage {
1199 name = "pytest-runner-2.7.1";
1212 name = "pytest-runner-2.7.1";
1200 buildInputs = with self; [];
1213 buildInputs = with self; [];
1201 doCheck = false;
1214 doCheck = false;
1202 propagatedBuildInputs = with self; [];
1215 propagatedBuildInputs = with self; [];
1203 src = fetchurl {
1216 src = fetchurl {
1204 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1217 url = "https://pypi.python.org/packages/99/6b/c4ff4418d3424d4475b7af60724fd4a5cdd91ed8e489dc9443281f0052bc/pytest-runner-2.7.1.tar.gz";
1205 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1218 md5 = "e56f0bc8d79a6bd91772b44ef4215c7e";
1206 };
1219 };
1207 meta = {
1220 meta = {
1208 license = [ pkgs.lib.licenses.mit ];
1221 license = [ pkgs.lib.licenses.mit ];
1209 };
1222 };
1210 };
1223 };
1211 pytest-timeout = super.buildPythonPackage {
1224 pytest-timeout = super.buildPythonPackage {
1212 name = "pytest-timeout-0.4";
1225 name = "pytest-timeout-0.4";
1213 buildInputs = with self; [];
1226 buildInputs = with self; [];
1214 doCheck = false;
1227 doCheck = false;
1215 propagatedBuildInputs = with self; [pytest];
1228 propagatedBuildInputs = with self; [pytest];
1216 src = fetchurl {
1229 src = fetchurl {
1217 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1230 url = "https://pypi.python.org/packages/24/48/5f6bd4b8026a26e1dd427243d560a29a0f1b24a5c7cffca4bf049a7bb65b/pytest-timeout-0.4.tar.gz";
1218 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1231 md5 = "03b28aff69cbbfb959ed35ade5fde262";
1219 };
1232 };
1220 meta = {
1233 meta = {
1221 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1234 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1222 };
1235 };
1223 };
1236 };
1224 python-dateutil = super.buildPythonPackage {
1237 python-dateutil = super.buildPythonPackage {
1225 name = "python-dateutil-1.5";
1238 name = "python-dateutil-1.5";
1226 buildInputs = with self; [];
1239 buildInputs = with self; [];
1227 doCheck = false;
1240 doCheck = false;
1228 propagatedBuildInputs = with self; [];
1241 propagatedBuildInputs = with self; [];
1229 src = fetchurl {
1242 src = fetchurl {
1230 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1243 url = "https://pypi.python.org/packages/b4/7c/df59c89a753eb33c7c44e1dd42de0e9bc2ccdd5a4d576e0bfad97cc280cb/python-dateutil-1.5.tar.gz";
1231 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1244 md5 = "0dcb1de5e5cad69490a3b6ab63f0cfa5";
1232 };
1245 };
1233 meta = {
1246 meta = {
1234 license = [ pkgs.lib.licenses.psfl ];
1247 license = [ pkgs.lib.licenses.psfl ];
1235 };
1248 };
1236 };
1249 };
1237 python-editor = super.buildPythonPackage {
1250 python-editor = super.buildPythonPackage {
1238 name = "python-editor-1.0.1";
1251 name = "python-editor-1.0.1";
1239 buildInputs = with self; [];
1252 buildInputs = with self; [];
1240 doCheck = false;
1253 doCheck = false;
1241 propagatedBuildInputs = with self; [];
1254 propagatedBuildInputs = with self; [];
1242 src = fetchurl {
1255 src = fetchurl {
1243 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1256 url = "https://pypi.python.org/packages/2b/c0/df7b87d5cf016f82eab3b05cd35f53287c1178ad8c42bfb6fa61b89b22f6/python-editor-1.0.1.tar.gz";
1244 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1257 md5 = "e1fa63535b40e022fa4fd646fd8b511a";
1245 };
1258 };
1246 meta = {
1259 meta = {
1247 license = [ pkgs.lib.licenses.asl20 ];
1260 license = [ pkgs.lib.licenses.asl20 ];
1248 };
1261 };
1249 };
1262 };
1250 python-ldap = super.buildPythonPackage {
1263 python-ldap = super.buildPythonPackage {
1251 name = "python-ldap-2.4.19";
1264 name = "python-ldap-2.4.19";
1252 buildInputs = with self; [];
1265 buildInputs = with self; [];
1253 doCheck = false;
1266 doCheck = false;
1254 propagatedBuildInputs = with self; [setuptools];
1267 propagatedBuildInputs = with self; [setuptools];
1255 src = fetchurl {
1268 src = fetchurl {
1256 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1269 url = "https://pypi.python.org/packages/42/81/1b64838c82e64f14d4e246ff00b52e650a35c012551b891ada2b85d40737/python-ldap-2.4.19.tar.gz";
1257 md5 = "b941bf31d09739492aa19ef679e94ae3";
1270 md5 = "b941bf31d09739492aa19ef679e94ae3";
1258 };
1271 };
1259 meta = {
1272 meta = {
1260 license = [ pkgs.lib.licenses.psfl ];
1273 license = [ pkgs.lib.licenses.psfl ];
1261 };
1274 };
1262 };
1275 };
1263 python-memcached = super.buildPythonPackage {
1276 python-memcached = super.buildPythonPackage {
1264 name = "python-memcached-1.57";
1277 name = "python-memcached-1.57";
1265 buildInputs = with self; [];
1278 buildInputs = with self; [];
1266 doCheck = false;
1279 doCheck = false;
1267 propagatedBuildInputs = with self; [six];
1280 propagatedBuildInputs = with self; [six];
1268 src = fetchurl {
1281 src = fetchurl {
1269 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1282 url = "https://pypi.python.org/packages/52/9d/eebc0dcbc5c7c66840ad207dfc1baa376dadb74912484bff73819cce01e6/python-memcached-1.57.tar.gz";
1270 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1283 md5 = "de21f64b42b2d961f3d4ad7beb5468a1";
1271 };
1284 };
1272 meta = {
1285 meta = {
1273 license = [ pkgs.lib.licenses.psfl ];
1286 license = [ pkgs.lib.licenses.psfl ];
1274 };
1287 };
1275 };
1288 };
1276 python-pam = super.buildPythonPackage {
1289 python-pam = super.buildPythonPackage {
1277 name = "python-pam-1.8.2";
1290 name = "python-pam-1.8.2";
1278 buildInputs = with self; [];
1291 buildInputs = with self; [];
1279 doCheck = false;
1292 doCheck = false;
1280 propagatedBuildInputs = with self; [];
1293 propagatedBuildInputs = with self; [];
1281 src = fetchurl {
1294 src = fetchurl {
1282 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1295 url = "https://pypi.python.org/packages/de/8c/f8f5d38b4f26893af267ea0b39023d4951705ab0413a39e0cf7cf4900505/python-pam-1.8.2.tar.gz";
1283 md5 = "db71b6b999246fb05d78ecfbe166629d";
1296 md5 = "db71b6b999246fb05d78ecfbe166629d";
1284 };
1297 };
1285 meta = {
1298 meta = {
1286 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1299 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1287 };
1300 };
1288 };
1301 };
1289 pytz = super.buildPythonPackage {
1302 pytz = super.buildPythonPackage {
1290 name = "pytz-2015.4";
1303 name = "pytz-2015.4";
1291 buildInputs = with self; [];
1304 buildInputs = with self; [];
1292 doCheck = false;
1305 doCheck = false;
1293 propagatedBuildInputs = with self; [];
1306 propagatedBuildInputs = with self; [];
1294 src = fetchurl {
1307 src = fetchurl {
1295 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1308 url = "https://pypi.python.org/packages/7e/1a/f43b5c92df7b156822030fed151327ea096bcf417e45acc23bd1df43472f/pytz-2015.4.zip";
1296 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1309 md5 = "233f2a2b370d03f9b5911700cc9ebf3c";
1297 };
1310 };
1298 meta = {
1311 meta = {
1299 license = [ pkgs.lib.licenses.mit ];
1312 license = [ pkgs.lib.licenses.mit ];
1300 };
1313 };
1301 };
1314 };
1302 pyzmq = super.buildPythonPackage {
1315 pyzmq = super.buildPythonPackage {
1303 name = "pyzmq-14.6.0";
1316 name = "pyzmq-14.6.0";
1304 buildInputs = with self; [];
1317 buildInputs = with self; [];
1305 doCheck = false;
1318 doCheck = false;
1306 propagatedBuildInputs = with self; [];
1319 propagatedBuildInputs = with self; [];
1307 src = fetchurl {
1320 src = fetchurl {
1308 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1321 url = "https://pypi.python.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1309 md5 = "395b5de95a931afa5b14c9349a5b8024";
1322 md5 = "395b5de95a931afa5b14c9349a5b8024";
1310 };
1323 };
1311 meta = {
1324 meta = {
1312 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1325 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1313 };
1326 };
1314 };
1327 };
1315 recaptcha-client = super.buildPythonPackage {
1328 recaptcha-client = super.buildPythonPackage {
1316 name = "recaptcha-client-1.0.6";
1329 name = "recaptcha-client-1.0.6";
1317 buildInputs = with self; [];
1330 buildInputs = with self; [];
1318 doCheck = false;
1331 doCheck = false;
1319 propagatedBuildInputs = with self; [];
1332 propagatedBuildInputs = with self; [];
1320 src = fetchurl {
1333 src = fetchurl {
1321 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1334 url = "https://pypi.python.org/packages/0a/ea/5f2fbbfd894bdac1c68ef8d92019066cfcf9fbff5fe3d728d2b5c25c8db4/recaptcha-client-1.0.6.tar.gz";
1322 md5 = "74228180f7e1fb76c4d7089160b0d919";
1335 md5 = "74228180f7e1fb76c4d7089160b0d919";
1323 };
1336 };
1324 meta = {
1337 meta = {
1325 license = [ { fullName = "MIT/X11"; } ];
1338 license = [ { fullName = "MIT/X11"; } ];
1326 };
1339 };
1327 };
1340 };
1328 repoze.lru = super.buildPythonPackage {
1341 repoze.lru = super.buildPythonPackage {
1329 name = "repoze.lru-0.6";
1342 name = "repoze.lru-0.6";
1330 buildInputs = with self; [];
1343 buildInputs = with self; [];
1331 doCheck = false;
1344 doCheck = false;
1332 propagatedBuildInputs = with self; [];
1345 propagatedBuildInputs = with self; [];
1333 src = fetchurl {
1346 src = fetchurl {
1334 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1347 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
1335 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1348 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
1336 };
1349 };
1337 meta = {
1350 meta = {
1338 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1351 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1339 };
1352 };
1340 };
1353 };
1341 requests = super.buildPythonPackage {
1354 requests = super.buildPythonPackage {
1342 name = "requests-2.9.1";
1355 name = "requests-2.9.1";
1343 buildInputs = with self; [];
1356 buildInputs = with self; [];
1344 doCheck = false;
1357 doCheck = false;
1345 propagatedBuildInputs = with self; [];
1358 propagatedBuildInputs = with self; [];
1346 src = fetchurl {
1359 src = fetchurl {
1347 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1360 url = "https://pypi.python.org/packages/f9/6d/07c44fb1ebe04d069459a189e7dab9e4abfe9432adcd4477367c25332748/requests-2.9.1.tar.gz";
1348 md5 = "0b7f480d19012ec52bab78292efd976d";
1361 md5 = "0b7f480d19012ec52bab78292efd976d";
1349 };
1362 };
1350 meta = {
1363 meta = {
1351 license = [ pkgs.lib.licenses.asl20 ];
1364 license = [ pkgs.lib.licenses.asl20 ];
1352 };
1365 };
1353 };
1366 };
1354 rhodecode-enterprise-ce = super.buildPythonPackage {
1367 rhodecode-enterprise-ce = super.buildPythonPackage {
1355 name = "rhodecode-enterprise-ce-4.3.0";
1368 name = "rhodecode-enterprise-ce-4.3.0";
1356 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1369 buildInputs = with self; [WebTest configobj cssselect flake8 lxml mock pytest pytest-cov pytest-runner];
1357 doCheck = true;
1370 doCheck = true;
1358 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1371 propagatedBuildInputs = with self; [Babel Beaker FormEncode Mako Markdown MarkupSafe MySQL-python Paste PasteDeploy PasteScript Pygments Pylons Pyro4 Routes SQLAlchemy Tempita URLObject WebError WebHelpers WebHelpers2 WebOb WebTest Whoosh alembic amqplib anyjson appenlight-client authomatic backport-ipaddress celery colander decorator docutils gunicorn infrae.cache ipython iso8601 kombu marshmallow msgpack-python packaging psycopg2 py-gfm pycrypto pycurl pyparsing pyramid pyramid-debugtoolbar pyramid-mako pyramid-beaker pysqlite python-dateutil python-ldap python-memcached python-pam recaptcha-client repoze.lru requests simplejson waitress zope.cachedescriptors dogpile.cache dogpile.core psutil py-bcrypt];
1359 src = ./.;
1372 src = ./.;
1360 meta = {
1373 meta = {
1361 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1374 license = [ { fullName = "AGPLv3, and Commercial License"; } ];
1362 };
1375 };
1363 };
1376 };
1364 rhodecode-tools = super.buildPythonPackage {
1377 rhodecode-tools = super.buildPythonPackage {
1365 name = "rhodecode-tools-0.8.3";
1378 name = "rhodecode-tools-0.8.3";
1366 buildInputs = with self; [];
1379 buildInputs = with self; [];
1367 doCheck = false;
1380 doCheck = false;
1368 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1381 propagatedBuildInputs = with self; [click future six Mako MarkupSafe requests Whoosh elasticsearch elasticsearch-dsl];
1369 src = fetchurl {
1382 src = fetchurl {
1370 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1383 url = "https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip";
1371 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1384 md5 = "9acdfd71b8ddf4056057065f37ab9ccb";
1372 };
1385 };
1373 meta = {
1386 meta = {
1374 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1387 license = [ { fullName = "AGPLv3 and Proprietary"; } ];
1375 };
1388 };
1376 };
1389 };
1377 serpent = super.buildPythonPackage {
1390 serpent = super.buildPythonPackage {
1378 name = "serpent-1.12";
1391 name = "serpent-1.12";
1379 buildInputs = with self; [];
1392 buildInputs = with self; [];
1380 doCheck = false;
1393 doCheck = false;
1381 propagatedBuildInputs = with self; [];
1394 propagatedBuildInputs = with self; [];
1382 src = fetchurl {
1395 src = fetchurl {
1383 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1396 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
1384 md5 = "05869ac7b062828b34f8f927f0457b65";
1397 md5 = "05869ac7b062828b34f8f927f0457b65";
1385 };
1398 };
1386 meta = {
1399 meta = {
1387 license = [ pkgs.lib.licenses.mit ];
1400 license = [ pkgs.lib.licenses.mit ];
1388 };
1401 };
1389 };
1402 };
1390 setproctitle = super.buildPythonPackage {
1403 setproctitle = super.buildPythonPackage {
1391 name = "setproctitle-1.1.8";
1404 name = "setproctitle-1.1.8";
1392 buildInputs = with self; [];
1405 buildInputs = with self; [];
1393 doCheck = false;
1406 doCheck = false;
1394 propagatedBuildInputs = with self; [];
1407 propagatedBuildInputs = with self; [];
1395 src = fetchurl {
1408 src = fetchurl {
1396 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1409 url = "https://pypi.python.org/packages/33/c3/ad367a4f4f1ca90468863ae727ac62f6edb558fc09a003d344a02cfc6ea6/setproctitle-1.1.8.tar.gz";
1397 md5 = "728f4c8c6031bbe56083a48594027edd";
1410 md5 = "728f4c8c6031bbe56083a48594027edd";
1398 };
1411 };
1399 meta = {
1412 meta = {
1400 license = [ pkgs.lib.licenses.bsdOriginal ];
1413 license = [ pkgs.lib.licenses.bsdOriginal ];
1401 };
1414 };
1402 };
1415 };
1403 setuptools = super.buildPythonPackage {
1416 setuptools = super.buildPythonPackage {
1404 name = "setuptools-20.8.1";
1417 name = "setuptools-20.8.1";
1405 buildInputs = with self; [];
1418 buildInputs = with self; [];
1406 doCheck = false;
1419 doCheck = false;
1407 propagatedBuildInputs = with self; [];
1420 propagatedBuildInputs = with self; [];
1408 src = fetchurl {
1421 src = fetchurl {
1409 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1422 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
1410 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1423 md5 = "fe58a5cac0df20bb83942b252a4b0543";
1411 };
1424 };
1412 meta = {
1425 meta = {
1413 license = [ pkgs.lib.licenses.mit ];
1426 license = [ pkgs.lib.licenses.mit ];
1414 };
1427 };
1415 };
1428 };
1416 setuptools-scm = super.buildPythonPackage {
1429 setuptools-scm = super.buildPythonPackage {
1417 name = "setuptools-scm-1.11.0";
1430 name = "setuptools-scm-1.11.0";
1418 buildInputs = with self; [];
1431 buildInputs = with self; [];
1419 doCheck = false;
1432 doCheck = false;
1420 propagatedBuildInputs = with self; [];
1433 propagatedBuildInputs = with self; [];
1421 src = fetchurl {
1434 src = fetchurl {
1422 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1435 url = "https://pypi.python.org/packages/cd/5f/e3a038292358058d83d764a47d09114aa5a8003ed4529518f9e580f1a94f/setuptools_scm-1.11.0.tar.gz";
1423 md5 = "4c5c896ba52e134bbc3507bac6400087";
1436 md5 = "4c5c896ba52e134bbc3507bac6400087";
1424 };
1437 };
1425 meta = {
1438 meta = {
1426 license = [ pkgs.lib.licenses.mit ];
1439 license = [ pkgs.lib.licenses.mit ];
1427 };
1440 };
1428 };
1441 };
1429 simplejson = super.buildPythonPackage {
1442 simplejson = super.buildPythonPackage {
1430 name = "simplejson-3.7.2";
1443 name = "simplejson-3.7.2";
1431 buildInputs = with self; [];
1444 buildInputs = with self; [];
1432 doCheck = false;
1445 doCheck = false;
1433 propagatedBuildInputs = with self; [];
1446 propagatedBuildInputs = with self; [];
1434 src = fetchurl {
1447 src = fetchurl {
1435 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1448 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
1436 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1449 md5 = "a5fc7d05d4cb38492285553def5d4b46";
1437 };
1450 };
1438 meta = {
1451 meta = {
1439 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1452 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
1440 };
1453 };
1441 };
1454 };
1442 six = super.buildPythonPackage {
1455 six = super.buildPythonPackage {
1443 name = "six-1.9.0";
1456 name = "six-1.9.0";
1444 buildInputs = with self; [];
1457 buildInputs = with self; [];
1445 doCheck = false;
1458 doCheck = false;
1446 propagatedBuildInputs = with self; [];
1459 propagatedBuildInputs = with self; [];
1447 src = fetchurl {
1460 src = fetchurl {
1448 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1461 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
1449 md5 = "476881ef4012262dfc8adc645ee786c4";
1462 md5 = "476881ef4012262dfc8adc645ee786c4";
1450 };
1463 };
1451 meta = {
1464 meta = {
1452 license = [ pkgs.lib.licenses.mit ];
1465 license = [ pkgs.lib.licenses.mit ];
1453 };
1466 };
1454 };
1467 };
1455 subprocess32 = super.buildPythonPackage {
1468 subprocess32 = super.buildPythonPackage {
1456 name = "subprocess32-3.2.6";
1469 name = "subprocess32-3.2.6";
1457 buildInputs = with self; [];
1470 buildInputs = with self; [];
1458 doCheck = false;
1471 doCheck = false;
1459 propagatedBuildInputs = with self; [];
1472 propagatedBuildInputs = with self; [];
1460 src = fetchurl {
1473 src = fetchurl {
1461 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1474 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
1462 md5 = "754c5ab9f533e764f931136974b618f1";
1475 md5 = "754c5ab9f533e764f931136974b618f1";
1463 };
1476 };
1464 meta = {
1477 meta = {
1465 license = [ pkgs.lib.licenses.psfl ];
1478 license = [ pkgs.lib.licenses.psfl ];
1466 };
1479 };
1467 };
1480 };
1468 supervisor = super.buildPythonPackage {
1481 supervisor = super.buildPythonPackage {
1469 name = "supervisor-3.3.0";
1482 name = "supervisor-3.3.0";
1470 buildInputs = with self; [];
1483 buildInputs = with self; [];
1471 doCheck = false;
1484 doCheck = false;
1472 propagatedBuildInputs = with self; [meld3];
1485 propagatedBuildInputs = with self; [meld3];
1473 src = fetchurl {
1486 src = fetchurl {
1474 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1487 url = "https://pypi.python.org/packages/44/80/d28047d120bfcc8158b4e41127706731ee6a3419c661e0a858fb0e7c4b2d/supervisor-3.3.0.tar.gz";
1475 md5 = "46bac00378d1eddb616752b990c67416";
1488 md5 = "46bac00378d1eddb616752b990c67416";
1476 };
1489 };
1477 meta = {
1490 meta = {
1478 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1491 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1479 };
1492 };
1480 };
1493 };
1481 transifex-client = super.buildPythonPackage {
1494 transifex-client = super.buildPythonPackage {
1482 name = "transifex-client-0.10";
1495 name = "transifex-client-0.10";
1483 buildInputs = with self; [];
1496 buildInputs = with self; [];
1484 doCheck = false;
1497 doCheck = false;
1485 propagatedBuildInputs = with self; [];
1498 propagatedBuildInputs = with self; [];
1486 src = fetchurl {
1499 src = fetchurl {
1487 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1500 url = "https://pypi.python.org/packages/f3/4e/7b925192aee656fb3e04fa6381c8b3dc40198047c3b4a356f6cfd642c809/transifex-client-0.10.tar.gz";
1488 md5 = "5549538d84b8eede6b254cd81ae024fa";
1501 md5 = "5549538d84b8eede6b254cd81ae024fa";
1489 };
1502 };
1490 meta = {
1503 meta = {
1491 license = [ pkgs.lib.licenses.gpl2 ];
1504 license = [ pkgs.lib.licenses.gpl2 ];
1492 };
1505 };
1493 };
1506 };
1494 translationstring = super.buildPythonPackage {
1507 translationstring = super.buildPythonPackage {
1495 name = "translationstring-1.3";
1508 name = "translationstring-1.3";
1496 buildInputs = with self; [];
1509 buildInputs = with self; [];
1497 doCheck = false;
1510 doCheck = false;
1498 propagatedBuildInputs = with self; [];
1511 propagatedBuildInputs = with self; [];
1499 src = fetchurl {
1512 src = fetchurl {
1500 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1513 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
1501 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1514 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
1502 };
1515 };
1503 meta = {
1516 meta = {
1504 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1517 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
1505 };
1518 };
1506 };
1519 };
1507 trollius = super.buildPythonPackage {
1520 trollius = super.buildPythonPackage {
1508 name = "trollius-1.0.4";
1521 name = "trollius-1.0.4";
1509 buildInputs = with self; [];
1522 buildInputs = with self; [];
1510 doCheck = false;
1523 doCheck = false;
1511 propagatedBuildInputs = with self; [futures];
1524 propagatedBuildInputs = with self; [futures];
1512 src = fetchurl {
1525 src = fetchurl {
1513 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1526 url = "https://pypi.python.org/packages/aa/e6/4141db437f55e6ee7a3fb69663239e3fde7841a811b4bef293145ad6c836/trollius-1.0.4.tar.gz";
1514 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1527 md5 = "3631a464d49d0cbfd30ab2918ef2b783";
1515 };
1528 };
1516 meta = {
1529 meta = {
1517 license = [ pkgs.lib.licenses.asl20 ];
1530 license = [ pkgs.lib.licenses.asl20 ];
1518 };
1531 };
1519 };
1532 };
1520 uWSGI = super.buildPythonPackage {
1533 uWSGI = super.buildPythonPackage {
1521 name = "uWSGI-2.0.11.2";
1534 name = "uWSGI-2.0.11.2";
1522 buildInputs = with self; [];
1535 buildInputs = with self; [];
1523 doCheck = false;
1536 doCheck = false;
1524 propagatedBuildInputs = with self; [];
1537 propagatedBuildInputs = with self; [];
1525 src = fetchurl {
1538 src = fetchurl {
1526 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1539 url = "https://pypi.python.org/packages/9b/78/918db0cfab0546afa580c1e565209c49aaf1476bbfe491314eadbe47c556/uwsgi-2.0.11.2.tar.gz";
1527 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1540 md5 = "1f02dcbee7f6f61de4b1fd68350cf16f";
1528 };
1541 };
1529 meta = {
1542 meta = {
1530 license = [ pkgs.lib.licenses.gpl2 ];
1543 license = [ pkgs.lib.licenses.gpl2 ];
1531 };
1544 };
1532 };
1545 };
1533 urllib3 = super.buildPythonPackage {
1546 urllib3 = super.buildPythonPackage {
1534 name = "urllib3-1.16";
1547 name = "urllib3-1.16";
1535 buildInputs = with self; [];
1548 buildInputs = with self; [];
1536 doCheck = false;
1549 doCheck = false;
1537 propagatedBuildInputs = with self; [];
1550 propagatedBuildInputs = with self; [];
1538 src = fetchurl {
1551 src = fetchurl {
1539 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1552 url = "https://pypi.python.org/packages/3b/f0/e763169124e3f5db0926bc3dbfcd580a105f9ca44cf5d8e6c7a803c9f6b5/urllib3-1.16.tar.gz";
1540 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1553 md5 = "fcaab1c5385c57deeb7053d3d7d81d59";
1541 };
1554 };
1542 meta = {
1555 meta = {
1543 license = [ pkgs.lib.licenses.mit ];
1556 license = [ pkgs.lib.licenses.mit ];
1544 };
1557 };
1545 };
1558 };
1546 venusian = super.buildPythonPackage {
1559 venusian = super.buildPythonPackage {
1547 name = "venusian-1.0";
1560 name = "venusian-1.0";
1548 buildInputs = with self; [];
1561 buildInputs = with self; [];
1549 doCheck = false;
1562 doCheck = false;
1550 propagatedBuildInputs = with self; [];
1563 propagatedBuildInputs = with self; [];
1551 src = fetchurl {
1564 src = fetchurl {
1552 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1565 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
1553 md5 = "dccf2eafb7113759d60c86faf5538756";
1566 md5 = "dccf2eafb7113759d60c86faf5538756";
1554 };
1567 };
1555 meta = {
1568 meta = {
1556 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1569 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1557 };
1570 };
1558 };
1571 };
1559 waitress = super.buildPythonPackage {
1572 waitress = super.buildPythonPackage {
1560 name = "waitress-0.8.9";
1573 name = "waitress-0.8.9";
1561 buildInputs = with self; [];
1574 buildInputs = with self; [];
1562 doCheck = false;
1575 doCheck = false;
1563 propagatedBuildInputs = with self; [setuptools];
1576 propagatedBuildInputs = with self; [setuptools];
1564 src = fetchurl {
1577 src = fetchurl {
1565 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1578 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
1566 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1579 md5 = "da3f2e62b3676be5dd630703a68e2a04";
1567 };
1580 };
1568 meta = {
1581 meta = {
1569 license = [ pkgs.lib.licenses.zpt21 ];
1582 license = [ pkgs.lib.licenses.zpt21 ];
1570 };
1583 };
1571 };
1584 };
1572 wsgiref = super.buildPythonPackage {
1585 wsgiref = super.buildPythonPackage {
1573 name = "wsgiref-0.1.2";
1586 name = "wsgiref-0.1.2";
1574 buildInputs = with self; [];
1587 buildInputs = with self; [];
1575 doCheck = false;
1588 doCheck = false;
1576 propagatedBuildInputs = with self; [];
1589 propagatedBuildInputs = with self; [];
1577 src = fetchurl {
1590 src = fetchurl {
1578 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1591 url = "https://pypi.python.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
1579 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1592 md5 = "29b146e6ebd0f9fb119fe321f7bcf6cb";
1580 };
1593 };
1581 meta = {
1594 meta = {
1582 license = [ { fullName = "PSF or ZPL"; } ];
1595 license = [ { fullName = "PSF or ZPL"; } ];
1583 };
1596 };
1584 };
1597 };
1585 zope.cachedescriptors = super.buildPythonPackage {
1598 zope.cachedescriptors = super.buildPythonPackage {
1586 name = "zope.cachedescriptors-4.0.0";
1599 name = "zope.cachedescriptors-4.0.0";
1587 buildInputs = with self; [];
1600 buildInputs = with self; [];
1588 doCheck = false;
1601 doCheck = false;
1589 propagatedBuildInputs = with self; [setuptools];
1602 propagatedBuildInputs = with self; [setuptools];
1590 src = fetchurl {
1603 src = fetchurl {
1591 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1604 url = "https://pypi.python.org/packages/40/33/694b6644c37f28553f4b9f20b3c3a20fb709a22574dff20b5bdffb09ecd5/zope.cachedescriptors-4.0.0.tar.gz";
1592 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1605 md5 = "8d308de8c936792c8e758058fcb7d0f0";
1593 };
1606 };
1594 meta = {
1607 meta = {
1595 license = [ pkgs.lib.licenses.zpt21 ];
1608 license = [ pkgs.lib.licenses.zpt21 ];
1596 };
1609 };
1597 };
1610 };
1598 zope.deprecation = super.buildPythonPackage {
1611 zope.deprecation = super.buildPythonPackage {
1599 name = "zope.deprecation-4.1.2";
1612 name = "zope.deprecation-4.1.2";
1600 buildInputs = with self; [];
1613 buildInputs = with self; [];
1601 doCheck = false;
1614 doCheck = false;
1602 propagatedBuildInputs = with self; [setuptools];
1615 propagatedBuildInputs = with self; [setuptools];
1603 src = fetchurl {
1616 src = fetchurl {
1604 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1617 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
1605 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1618 md5 = "e9a663ded58f4f9f7881beb56cae2782";
1606 };
1619 };
1607 meta = {
1620 meta = {
1608 license = [ pkgs.lib.licenses.zpt21 ];
1621 license = [ pkgs.lib.licenses.zpt21 ];
1609 };
1622 };
1610 };
1623 };
1611 zope.event = super.buildPythonPackage {
1624 zope.event = super.buildPythonPackage {
1612 name = "zope.event-4.0.3";
1625 name = "zope.event-4.0.3";
1613 buildInputs = with self; [];
1626 buildInputs = with self; [];
1614 doCheck = false;
1627 doCheck = false;
1615 propagatedBuildInputs = with self; [setuptools];
1628 propagatedBuildInputs = with self; [setuptools];
1616 src = fetchurl {
1629 src = fetchurl {
1617 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1630 url = "https://pypi.python.org/packages/c1/29/91ba884d7d6d96691df592e9e9c2bfa57a47040ec1ff47eff18c85137152/zope.event-4.0.3.tar.gz";
1618 md5 = "9a3780916332b18b8b85f522bcc3e249";
1631 md5 = "9a3780916332b18b8b85f522bcc3e249";
1619 };
1632 };
1620 meta = {
1633 meta = {
1621 license = [ pkgs.lib.licenses.zpt21 ];
1634 license = [ pkgs.lib.licenses.zpt21 ];
1622 };
1635 };
1623 };
1636 };
1624 zope.interface = super.buildPythonPackage {
1637 zope.interface = super.buildPythonPackage {
1625 name = "zope.interface-4.1.3";
1638 name = "zope.interface-4.1.3";
1626 buildInputs = with self; [];
1639 buildInputs = with self; [];
1627 doCheck = false;
1640 doCheck = false;
1628 propagatedBuildInputs = with self; [setuptools];
1641 propagatedBuildInputs = with self; [setuptools];
1629 src = fetchurl {
1642 src = fetchurl {
1630 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1643 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
1631 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1644 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
1632 };
1645 };
1633 meta = {
1646 meta = {
1634 license = [ pkgs.lib.licenses.zpt21 ];
1647 license = [ pkgs.lib.licenses.zpt21 ];
1635 };
1648 };
1636 };
1649 };
1637
1650
1638 ### Test requirements
1651 ### Test requirements
1639
1652
1640
1653
1641 }
1654 }
@@ -1,151 +1,152 b''
1 Babel==1.3
1 Babel==1.3
2 Beaker==1.7.0
2 Beaker==1.7.0
3 CProfileV==1.0.6
3 CProfileV==1.0.6
4 Fabric==1.10.0
4 Fabric==1.10.0
5 FormEncode==1.2.4
5 FormEncode==1.2.4
6 Jinja2==2.7.3
6 Jinja2==2.7.3
7 Mako==1.0.1
7 Mako==1.0.1
8 Markdown==2.6.2
8 Markdown==2.6.2
9 MarkupSafe==0.23
9 MarkupSafe==0.23
10 MySQL-python==1.2.5
10 MySQL-python==1.2.5
11 Paste==2.0.2
11 Paste==2.0.2
12 PasteDeploy==1.5.2
12 PasteDeploy==1.5.2
13 PasteScript==1.7.5
13 PasteScript==1.7.5
14 Pygments==2.1.3
14 Pygments==2.1.3
15
15
16 # TODO: This version is not available on PyPI
16 # TODO: This version is not available on PyPI
17 # Pylons==1.0.2.dev20160108
17 # Pylons==1.0.2.dev20160108
18 Pylons==1.0.1
18 Pylons==1.0.1
19
19
20 # TODO: This version is not available, but newer ones are
20 # TODO: This version is not available, but newer ones are
21 # Pyro4==4.35
21 # Pyro4==4.35
22 Pyro4==4.41
22 Pyro4==4.41
23
23
24 # TODO: This should probably not be in here
24 # TODO: This should probably not be in here
25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
25 # -e hg+https://johbo@code.rhodecode.com/johbo/rhodecode-fork@3a454bd1f17c0b2b2a951cf2b111e0320d7942a9#egg=RhodeCodeEnterprise-dev
26
26
27 # TODO: This is not really a dependency, we should add it only
27 # TODO: This is not really a dependency, we should add it only
28 # into the development environment, since there it is useful.
28 # into the development environment, since there it is useful.
29 # RhodeCodeVCSServer==3.9.0
29 # RhodeCodeVCSServer==3.9.0
30
30
31 Routes==1.13
31 Routes==1.13
32 SQLAlchemy==0.9.9
32 SQLAlchemy==0.9.9
33 Sphinx==1.2.2
33 Sphinx==1.2.2
34 Tempita==0.5.2
34 Tempita==0.5.2
35 URLObject==2.4.0
35 URLObject==2.4.0
36 WebError==0.10.3
36 WebError==0.10.3
37
37
38 # TODO: This is modified by us, needs a better integration. For now
38 # TODO: This is modified by us, needs a better integration. For now
39 # using the latest version before.
39 # using the latest version before.
40 # WebHelpers==1.3.dev20150807
40 # WebHelpers==1.3.dev20150807
41 WebHelpers==1.3
41 WebHelpers==1.3
42
42
43 WebHelpers2==2.0
43 WebHelpers2==2.0
44 WebOb==1.3.1
44 WebOb==1.3.1
45 WebTest==1.4.3
45 WebTest==1.4.3
46 Whoosh==2.7.0
46 Whoosh==2.7.0
47 alembic==0.8.4
47 alembic==0.8.4
48 amqplib==1.0.2
48 amqplib==1.0.2
49 anyjson==0.3.3
49 anyjson==0.3.3
50 appenlight-client==0.6.14
50 appenlight-client==0.6.14
51 authomatic==0.1.0.post1;
51 authomatic==0.1.0.post1;
52 backport-ipaddress==0.1
52 backport-ipaddress==0.1
53 bottle==0.12.8
53 bottle==0.12.8
54 bumpversion==0.5.3
54 bumpversion==0.5.3
55 celery==2.2.10
55 celery==2.2.10
56 click==5.1
56 click==5.1
57 colander==1.2
57 colander==1.2
58 configobj==5.0.6
58 configobj==5.0.6
59 cov-core==1.15.0
59 cov-core==1.15.0
60 coverage==3.7.1
60 coverage==3.7.1
61 cssselect==0.9.1
61 cssselect==0.9.1
62 decorator==3.4.2
62 decorator==3.4.2
63 docutils==0.12
63 docutils==0.12
64 dogpile.cache==0.6.1
64 dogpile.cache==0.6.1
65 dogpile.core==0.4.1
65 dogpile.core==0.4.1
66 dulwich==0.12.0
66 dulwich==0.12.0
67 ecdsa==0.11
67 ecdsa==0.11
68 flake8==2.4.1
68 flake8==2.4.1
69 future==0.14.3
69 future==0.14.3
70 futures==3.0.2
70 futures==3.0.2
71 gprof2dot==2015.12.1
71 gprof2dot==2015.12.1
72 gunicorn==19.6.0
72 gunicorn==19.6.0
73
73
74 # TODO: Needs subvertpy and blows up without Subversion headers,
74 # TODO: Needs subvertpy and blows up without Subversion headers,
75 # actually we should not need this for Enterprise at all.
75 # actually we should not need this for Enterprise at all.
76 # hgsubversion==1.8.2
76 # hgsubversion==1.8.2
77
77
78 gnureadline==6.3.3
78 gnureadline==6.3.3
79 infrae.cache==1.0.1
79 infrae.cache==1.0.1
80 invoke==0.13.0
80 invoke==0.13.0
81 ipdb==0.8
81 ipdb==0.8
82 ipython==3.1.0
82 ipython==3.1.0
83 iso8601==0.1.11
83 iso8601==0.1.11
84 itsdangerous==0.24
84 itsdangerous==0.24
85 kombu==1.5.1
85 kombu==1.5.1
86 lxml==3.4.4
86 lxml==3.4.4
87 marshmallow==2.8.0
87 mccabe==0.3
88 mccabe==0.3
88 meld3==1.0.2
89 meld3==1.0.2
89 mock==1.0.1
90 mock==1.0.1
90 msgpack-python==0.4.6
91 msgpack-python==0.4.6
91 nose==1.3.6
92 nose==1.3.6
92 objgraph==2.0.0
93 objgraph==2.0.0
93 packaging==15.2
94 packaging==15.2
94 paramiko==1.15.1
95 paramiko==1.15.1
95 pep8==1.5.7
96 pep8==1.5.7
96 psutil==2.2.1
97 psutil==2.2.1
97 psycopg2==2.6.1
98 psycopg2==2.6.1
98 py==1.4.29
99 py==1.4.29
99 py-bcrypt==0.4
100 py-bcrypt==0.4
100 py-gfm==0.1.3
101 py-gfm==0.1.3
101 pycrypto==2.6.1
102 pycrypto==2.6.1
102 pycurl==7.19.5
103 pycurl==7.19.5
103 pyflakes==0.8.1
104 pyflakes==0.8.1
104 pyparsing==1.5.7
105 pyparsing==1.5.7
105 pyramid==1.6.1
106 pyramid==1.6.1
106 pyramid-beaker==0.8
107 pyramid-beaker==0.8
107 pyramid-debugtoolbar==2.4.2
108 pyramid-debugtoolbar==2.4.2
108 pyramid-jinja2==2.5
109 pyramid-jinja2==2.5
109 pyramid-mako==1.0.2
110 pyramid-mako==1.0.2
110 pysqlite==2.6.3
111 pysqlite==2.6.3
111 pytest==2.8.5
112 pytest==2.8.5
112 pytest-runner==2.7.1
113 pytest-runner==2.7.1
113 pytest-catchlog==1.2.2
114 pytest-catchlog==1.2.2
114 pytest-cov==1.8.1
115 pytest-cov==1.8.1
115 pytest-profiling==1.0.1
116 pytest-profiling==1.0.1
116 pytest-timeout==0.4
117 pytest-timeout==0.4
117 python-dateutil==1.5
118 python-dateutil==1.5
118 python-ldap==2.4.19
119 python-ldap==2.4.19
119 python-memcached==1.57
120 python-memcached==1.57
120 python-pam==1.8.2
121 python-pam==1.8.2
121 pytz==2015.4
122 pytz==2015.4
122 pyzmq==14.6.0
123 pyzmq==14.6.0
123
124
124 # TODO: This is not available in public
125 # TODO: This is not available in public
125 # rc-testdata==0.2.0
126 # rc-testdata==0.2.0
126
127
127 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128 https://code.rhodecode.com/rhodecode-tools-ce/archive/v0.8.3.zip#md5=9acdfd71b8ddf4056057065f37ab9ccb
128
129
129
130
130 recaptcha-client==1.0.6
131 recaptcha-client==1.0.6
131 repoze.lru==0.6
132 repoze.lru==0.6
132 requests==2.9.1
133 requests==2.9.1
133 serpent==1.12
134 serpent==1.12
134 setproctitle==1.1.8
135 setproctitle==1.1.8
135 setuptools==20.8.1
136 setuptools==20.8.1
136 setuptools-scm==1.11.0
137 setuptools-scm==1.11.0
137 simplejson==3.7.2
138 simplejson==3.7.2
138 six==1.9.0
139 six==1.9.0
139 subprocess32==3.2.6
140 subprocess32==3.2.6
140 supervisor==3.3.0
141 supervisor==3.3.0
141 transifex-client==0.10
142 transifex-client==0.10
142 translationstring==1.3
143 translationstring==1.3
143 trollius==1.0.4
144 trollius==1.0.4
144 uWSGI==2.0.11.2
145 uWSGI==2.0.11.2
145 venusian==1.0
146 venusian==1.0
146 waitress==0.8.9
147 waitress==0.8.9
147 wsgiref==0.1.2
148 wsgiref==0.1.2
148 zope.cachedescriptors==4.0.0
149 zope.cachedescriptors==4.0.0
149 zope.deprecation==4.1.2
150 zope.deprecation==4.1.2
150 zope.event==4.0.3
151 zope.event==4.0.3
151 zope.interface==4.1.3
152 zope.interface==4.1.3
@@ -1,158 +1,163 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixture import Fixture
29
29
30
30
31 fixture = Fixture()
31 fixture = Fixture()
32
32
33 UPDATE_REPO_NAME = 'api_update_me'
33 UPDATE_REPO_NAME = 'api_update_me'
34
34
35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
35 class SAME_AS_UPDATES(object): """ Constant used for tests below """
36
36
37 @pytest.mark.usefixtures("testuser_api", "app")
37 @pytest.mark.usefixtures("testuser_api", "app")
38 class TestApiUpdateRepo(object):
38 class TestApiUpdateRepo(object):
39
39
40 @pytest.mark.parametrize("updates, expected", [
40 @pytest.mark.parametrize("updates, expected", [
41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
41 ({'owner': TEST_USER_REGULAR_LOGIN}, SAME_AS_UPDATES),
42 ({'description': 'new description'}, SAME_AS_UPDATES),
42 ({'description': 'new description'}, SAME_AS_UPDATES),
43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
43 ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES),
44 ({'clone_uri': None}, {'clone_uri': ''}),
44 ({'clone_uri': None}, {'clone_uri': ''}),
45 ({'clone_uri': ''}, {'clone_uri': ''}),
45 ({'clone_uri': ''}, {'clone_uri': ''}),
46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
46 ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}),
47 ({'enable_statistics': True}, SAME_AS_UPDATES),
47 ({'enable_statistics': True}, SAME_AS_UPDATES),
48 ({'enable_locking': True}, SAME_AS_UPDATES),
48 ({'enable_locking': True}, SAME_AS_UPDATES),
49 ({'enable_downloads': True}, SAME_AS_UPDATES),
49 ({'enable_downloads': True}, SAME_AS_UPDATES),
50 ({'name': 'new_repo_name'}, {'repo_name': 'new_repo_name'}),
50 ({'name': 'new_repo_name'}, {
51 ({'group': 'test_group_for_update'},
51 'repo_name': 'new_repo_name',
52 {'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME}),
52 'url': 'http://test.example.com:80/new_repo_name',
53 }),
54 ({'group': 'test_group_for_update'}, {
55 'repo_name': 'test_group_for_update/%s' % UPDATE_REPO_NAME,
56 'url': 'http://test.example.com:80/test_group_for_update/%s' % UPDATE_REPO_NAME
57 }),
53 ])
58 ])
54 def test_api_update_repo(self, updates, expected, backend):
59 def test_api_update_repo(self, updates, expected, backend):
55 repo_name = UPDATE_REPO_NAME
60 repo_name = UPDATE_REPO_NAME
56 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
61 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
57 if updates.get('group'):
62 if updates.get('group'):
58 fixture.create_repo_group(updates['group'])
63 fixture.create_repo_group(updates['group'])
59
64
60 expected_api_data = repo.get_api_data(include_secrets=True)
65 expected_api_data = repo.get_api_data(include_secrets=True)
61 if expected is SAME_AS_UPDATES:
66 if expected is SAME_AS_UPDATES:
62 expected_api_data.update(updates)
67 expected_api_data.update(updates)
63 else:
68 else:
64 expected_api_data.update(expected)
69 expected_api_data.update(expected)
65
70
66
71
67 id_, params = build_data(
72 id_, params = build_data(
68 self.apikey, 'update_repo', repoid=repo_name, **updates)
73 self.apikey, 'update_repo', repoid=repo_name, **updates)
69 response = api_call(self.app, params)
74 response = api_call(self.app, params)
70
75
71 if updates.get('name'):
76 if updates.get('name'):
72 repo_name = updates['name']
77 repo_name = updates['name']
73 if updates.get('group'):
78 if updates.get('group'):
74 repo_name = '/'.join([updates['group'], repo_name])
79 repo_name = '/'.join([updates['group'], repo_name])
75
80
76 try:
81 try:
77 expected = {
82 expected = {
78 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
83 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
79 'repository': jsonify(expected_api_data)
84 'repository': jsonify(expected_api_data)
80 }
85 }
81 assert_ok(id_, expected, given=response.body)
86 assert_ok(id_, expected, given=response.body)
82 finally:
87 finally:
83 fixture.destroy_repo(repo_name)
88 fixture.destroy_repo(repo_name)
84 if updates.get('group'):
89 if updates.get('group'):
85 fixture.destroy_repo_group(updates['group'])
90 fixture.destroy_repo_group(updates['group'])
86
91
87 def test_api_update_repo_fork_of_field(self, backend):
92 def test_api_update_repo_fork_of_field(self, backend):
88 master_repo = backend.create_repo()
93 master_repo = backend.create_repo()
89 repo = backend.create_repo()
94 repo = backend.create_repo()
90 updates = {
95 updates = {
91 'fork_of': master_repo.repo_name
96 'fork_of': master_repo.repo_name
92 }
97 }
93 expected_api_data = repo.get_api_data(include_secrets=True)
98 expected_api_data = repo.get_api_data(include_secrets=True)
94 expected_api_data.update(updates)
99 expected_api_data.update(updates)
95
100
96 id_, params = build_data(
101 id_, params = build_data(
97 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
102 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
98 response = api_call(self.app, params)
103 response = api_call(self.app, params)
99 expected = {
104 expected = {
100 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
105 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
101 'repository': jsonify(expected_api_data)
106 'repository': jsonify(expected_api_data)
102 }
107 }
103 assert_ok(id_, expected, given=response.body)
108 assert_ok(id_, expected, given=response.body)
104 result = response.json['result']['repository']
109 result = response.json['result']['repository']
105 assert result['fork_of'] == master_repo.repo_name
110 assert result['fork_of'] == master_repo.repo_name
106
111
107 def test_api_update_repo_fork_of_not_found(self, backend):
112 def test_api_update_repo_fork_of_not_found(self, backend):
108 master_repo_name = 'fake-parent-repo'
113 master_repo_name = 'fake-parent-repo'
109 repo = backend.create_repo()
114 repo = backend.create_repo()
110 updates = {
115 updates = {
111 'fork_of': master_repo_name
116 'fork_of': master_repo_name
112 }
117 }
113 id_, params = build_data(
118 id_, params = build_data(
114 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
119 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
115 response = api_call(self.app, params)
120 response = api_call(self.app, params)
116 expected = 'repository `{}` does not exist'.format(master_repo_name)
121 expected = 'repository `{}` does not exist'.format(master_repo_name)
117 assert_error(id_, expected, given=response.body)
122 assert_error(id_, expected, given=response.body)
118
123
119 def test_api_update_repo_with_repo_group_not_existing(self):
124 def test_api_update_repo_with_repo_group_not_existing(self):
120 repo_name = 'admin_owned'
125 repo_name = 'admin_owned'
121 fixture.create_repo(repo_name)
126 fixture.create_repo(repo_name)
122 updates = {'group': 'test_group_for_update'}
127 updates = {'group': 'test_group_for_update'}
123 id_, params = build_data(
128 id_, params = build_data(
124 self.apikey, 'update_repo', repoid=repo_name, **updates)
129 self.apikey, 'update_repo', repoid=repo_name, **updates)
125 response = api_call(self.app, params)
130 response = api_call(self.app, params)
126 try:
131 try:
127 expected = 'repository group `%s` does not exist' % (
132 expected = 'repository group `%s` does not exist' % (
128 updates['group'],)
133 updates['group'],)
129 assert_error(id_, expected, given=response.body)
134 assert_error(id_, expected, given=response.body)
130 finally:
135 finally:
131 fixture.destroy_repo(repo_name)
136 fixture.destroy_repo(repo_name)
132
137
133 def test_api_update_repo_regular_user_not_allowed(self):
138 def test_api_update_repo_regular_user_not_allowed(self):
134 repo_name = 'admin_owned'
139 repo_name = 'admin_owned'
135 fixture.create_repo(repo_name)
140 fixture.create_repo(repo_name)
136 updates = {'active': False}
141 updates = {'active': False}
137 id_, params = build_data(
142 id_, params = build_data(
138 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
143 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
139 response = api_call(self.app, params)
144 response = api_call(self.app, params)
140 try:
145 try:
141 expected = 'repository `%s` does not exist' % (repo_name,)
146 expected = 'repository `%s` does not exist' % (repo_name,)
142 assert_error(id_, expected, given=response.body)
147 assert_error(id_, expected, given=response.body)
143 finally:
148 finally:
144 fixture.destroy_repo(repo_name)
149 fixture.destroy_repo(repo_name)
145
150
146 @mock.patch.object(RepoModel, 'update', crash)
151 @mock.patch.object(RepoModel, 'update', crash)
147 def test_api_update_repo_exception_occurred(self, backend):
152 def test_api_update_repo_exception_occurred(self, backend):
148 repo_name = UPDATE_REPO_NAME
153 repo_name = UPDATE_REPO_NAME
149 fixture.create_repo(repo_name, repo_type=backend.alias)
154 fixture.create_repo(repo_name, repo_type=backend.alias)
150 id_, params = build_data(
155 id_, params = build_data(
151 self.apikey, 'update_repo', repoid=repo_name,
156 self.apikey, 'update_repo', repoid=repo_name,
152 owner=TEST_USER_ADMIN_LOGIN,)
157 owner=TEST_USER_ADMIN_LOGIN,)
153 response = api_call(self.app, params)
158 response = api_call(self.app, params)
154 try:
159 try:
155 expected = 'failed to update repo `%s`' % (repo_name,)
160 expected = 'failed to update repo `%s`' % (repo_name,)
156 assert_error(id_, expected, given=response.body)
161 assert_error(id_, expected, given=response.body)
157 finally:
162 finally:
158 fixture.destroy_repo(repo_name)
163 fixture.destroy_repo(repo_name)
@@ -1,846 +1,847 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 pull requests controller for rhodecode for initializing pull requests
22 pull requests controller for rhodecode for initializing pull requests
23 """
23 """
24
24
25 import formencode
25 import formencode
26 import logging
26 import logging
27
27
28 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
28 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
29 from pylons import request, tmpl_context as c, url
29 from pylons import request, tmpl_context as c, url
30 from pylons.controllers.util import redirect
30 from pylons.controllers.util import redirect
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from sqlalchemy.sql import func
32 from sqlalchemy.sql import func
33 from sqlalchemy.sql.expression import or_
33 from sqlalchemy.sql.expression import or_
34
34
35 from rhodecode.lib import auth, diffs, helpers as h
35 from rhodecode.lib import auth, diffs, helpers as h
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.base import (
37 from rhodecode.lib.base import (
38 BaseRepoController, render, vcs_operation_context)
38 BaseRepoController, render, vcs_operation_context)
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
40 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
41 HasAcceptedRepoType, XHRRequired)
41 HasAcceptedRepoType, XHRRequired)
42 from rhodecode.lib.utils import jsonify
42 from rhodecode.lib.utils import jsonify
43 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
43 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError)
46 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError)
47 from rhodecode.lib.diffs import LimitedDiffContainer
47 from rhodecode.lib.diffs import LimitedDiffContainer
48 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.comment import ChangesetCommentsModel
49 from rhodecode.model.comment import ChangesetCommentsModel
50 from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
50 from rhodecode.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
51 Repository
51 Repository
52 from rhodecode.model.forms import PullRequestForm
52 from rhodecode.model.forms import PullRequestForm
53 from rhodecode.model.meta import Session
53 from rhodecode.model.meta import Session
54 from rhodecode.model.pull_request import PullRequestModel
54 from rhodecode.model.pull_request import PullRequestModel
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class PullrequestsController(BaseRepoController):
59 class PullrequestsController(BaseRepoController):
60 def __before__(self):
60 def __before__(self):
61 super(PullrequestsController, self).__before__()
61 super(PullrequestsController, self).__before__()
62
62
63 def _load_compare_data(self, pull_request, enable_comments=True):
63 def _load_compare_data(self, pull_request, enable_comments=True):
64 """
64 """
65 Load context data needed for generating compare diff
65 Load context data needed for generating compare diff
66
66
67 :param pull_request: object related to the request
67 :param pull_request: object related to the request
68 :param enable_comments: flag to determine if comments are included
68 :param enable_comments: flag to determine if comments are included
69 """
69 """
70 source_repo = pull_request.source_repo
70 source_repo = pull_request.source_repo
71 source_ref_id = pull_request.source_ref_parts.commit_id
71 source_ref_id = pull_request.source_ref_parts.commit_id
72
72
73 target_repo = pull_request.target_repo
73 target_repo = pull_request.target_repo
74 target_ref_id = pull_request.target_ref_parts.commit_id
74 target_ref_id = pull_request.target_ref_parts.commit_id
75
75
76 # despite opening commits for bookmarks/branches/tags, we always
76 # despite opening commits for bookmarks/branches/tags, we always
77 # convert this to rev to prevent changes after bookmark or branch change
77 # convert this to rev to prevent changes after bookmark or branch change
78 c.source_ref_type = 'rev'
78 c.source_ref_type = 'rev'
79 c.source_ref = source_ref_id
79 c.source_ref = source_ref_id
80
80
81 c.target_ref_type = 'rev'
81 c.target_ref_type = 'rev'
82 c.target_ref = target_ref_id
82 c.target_ref = target_ref_id
83
83
84 c.source_repo = source_repo
84 c.source_repo = source_repo
85 c.target_repo = target_repo
85 c.target_repo = target_repo
86
86
87 c.fulldiff = bool(request.GET.get('fulldiff'))
87 c.fulldiff = bool(request.GET.get('fulldiff'))
88
88
89 # diff_limit is the old behavior, will cut off the whole diff
89 # diff_limit is the old behavior, will cut off the whole diff
90 # if the limit is applied otherwise will just hide the
90 # if the limit is applied otherwise will just hide the
91 # big files from the front-end
91 # big files from the front-end
92 diff_limit = self.cut_off_limit_diff
92 diff_limit = self.cut_off_limit_diff
93 file_limit = self.cut_off_limit_file
93 file_limit = self.cut_off_limit_file
94
94
95 pre_load = ["author", "branch", "date", "message"]
95 pre_load = ["author", "branch", "date", "message"]
96
96
97 c.commit_ranges = []
97 c.commit_ranges = []
98 source_commit = EmptyCommit()
98 source_commit = EmptyCommit()
99 target_commit = EmptyCommit()
99 target_commit = EmptyCommit()
100 c.missing_requirements = False
100 c.missing_requirements = False
101 try:
101 try:
102 c.commit_ranges = [
102 c.commit_ranges = [
103 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
103 source_repo.get_commit(commit_id=rev, pre_load=pre_load)
104 for rev in pull_request.revisions]
104 for rev in pull_request.revisions]
105
105
106 c.statuses = source_repo.statuses(
106 c.statuses = source_repo.statuses(
107 [x.raw_id for x in c.commit_ranges])
107 [x.raw_id for x in c.commit_ranges])
108
108
109 target_commit = source_repo.get_commit(
109 target_commit = source_repo.get_commit(
110 commit_id=safe_str(target_ref_id))
110 commit_id=safe_str(target_ref_id))
111 source_commit = source_repo.get_commit(
111 source_commit = source_repo.get_commit(
112 commit_id=safe_str(source_ref_id))
112 commit_id=safe_str(source_ref_id))
113 except RepositoryRequirementError:
113 except RepositoryRequirementError:
114 c.missing_requirements = True
114 c.missing_requirements = True
115
115
116 c.missing_commits = False
116 c.missing_commits = False
117 if (c.missing_requirements or
117 if (c.missing_requirements or
118 isinstance(source_commit, EmptyCommit) or
118 isinstance(source_commit, EmptyCommit) or
119 source_commit == target_commit):
119 source_commit == target_commit):
120 _parsed = []
120 _parsed = []
121 c.missing_commits = True
121 c.missing_commits = True
122 else:
122 else:
123 vcs_diff = PullRequestModel().get_diff(pull_request)
123 vcs_diff = PullRequestModel().get_diff(pull_request)
124 diff_processor = diffs.DiffProcessor(
124 diff_processor = diffs.DiffProcessor(
125 vcs_diff, format='gitdiff', diff_limit=diff_limit,
125 vcs_diff, format='gitdiff', diff_limit=diff_limit,
126 file_limit=file_limit, show_full_diff=c.fulldiff)
126 file_limit=file_limit, show_full_diff=c.fulldiff)
127 _parsed = diff_processor.prepare()
127 _parsed = diff_processor.prepare()
128
128
129 c.limited_diff = isinstance(_parsed, LimitedDiffContainer)
129 c.limited_diff = isinstance(_parsed, LimitedDiffContainer)
130
130
131 c.files = []
131 c.files = []
132 c.changes = {}
132 c.changes = {}
133 c.lines_added = 0
133 c.lines_added = 0
134 c.lines_deleted = 0
134 c.lines_deleted = 0
135 c.included_files = []
135 c.included_files = []
136 c.deleted_files = []
136 c.deleted_files = []
137
137
138 for f in _parsed:
138 for f in _parsed:
139 st = f['stats']
139 st = f['stats']
140 c.lines_added += st['added']
140 c.lines_added += st['added']
141 c.lines_deleted += st['deleted']
141 c.lines_deleted += st['deleted']
142
142
143 fid = h.FID('', f['filename'])
143 fid = h.FID('', f['filename'])
144 c.files.append([fid, f['operation'], f['filename'], f['stats']])
144 c.files.append([fid, f['operation'], f['filename'], f['stats']])
145 c.included_files.append(f['filename'])
145 c.included_files.append(f['filename'])
146 html_diff = diff_processor.as_html(enable_comments=enable_comments,
146 html_diff = diff_processor.as_html(enable_comments=enable_comments,
147 parsed_lines=[f])
147 parsed_lines=[f])
148 c.changes[fid] = [f['operation'], f['filename'], html_diff, f]
148 c.changes[fid] = [f['operation'], f['filename'], html_diff, f]
149
149
150 def _extract_ordering(self, request):
150 def _extract_ordering(self, request):
151 column_index = safe_int(request.GET.get('order[0][column]'))
151 column_index = safe_int(request.GET.get('order[0][column]'))
152 order_dir = request.GET.get('order[0][dir]', 'desc')
152 order_dir = request.GET.get('order[0][dir]', 'desc')
153 order_by = request.GET.get(
153 order_by = request.GET.get(
154 'columns[%s][data][sort]' % column_index, 'name_raw')
154 'columns[%s][data][sort]' % column_index, 'name_raw')
155 return order_by, order_dir
155 return order_by, order_dir
156
156
157 @LoginRequired()
157 @LoginRequired()
158 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
158 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
159 'repository.admin')
159 'repository.admin')
160 @HasAcceptedRepoType('git', 'hg')
160 @HasAcceptedRepoType('git', 'hg')
161 def show_all(self, repo_name):
161 def show_all(self, repo_name):
162 # filter types
162 # filter types
163 c.active = 'open'
163 c.active = 'open'
164 c.source = str2bool(request.GET.get('source'))
164 c.source = str2bool(request.GET.get('source'))
165 c.closed = str2bool(request.GET.get('closed'))
165 c.closed = str2bool(request.GET.get('closed'))
166 c.my = str2bool(request.GET.get('my'))
166 c.my = str2bool(request.GET.get('my'))
167 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
167 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
168 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
168 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
169 c.repo_name = repo_name
169 c.repo_name = repo_name
170
170
171 opened_by = None
171 opened_by = None
172 if c.my:
172 if c.my:
173 c.active = 'my'
173 c.active = 'my'
174 opened_by = [c.rhodecode_user.user_id]
174 opened_by = [c.rhodecode_user.user_id]
175
175
176 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
176 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
177 if c.closed:
177 if c.closed:
178 c.active = 'closed'
178 c.active = 'closed'
179 statuses = [PullRequest.STATUS_CLOSED]
179 statuses = [PullRequest.STATUS_CLOSED]
180
180
181 if c.awaiting_review and not c.source:
181 if c.awaiting_review and not c.source:
182 c.active = 'awaiting'
182 c.active = 'awaiting'
183 if c.source and not c.awaiting_review:
183 if c.source and not c.awaiting_review:
184 c.active = 'source'
184 c.active = 'source'
185 if c.awaiting_my_review:
185 if c.awaiting_my_review:
186 c.active = 'awaiting_my'
186 c.active = 'awaiting_my'
187
187
188 data = self._get_pull_requests_list(
188 data = self._get_pull_requests_list(
189 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
189 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
190 if not request.is_xhr:
190 if not request.is_xhr:
191 c.data = json.dumps(data['data'])
191 c.data = json.dumps(data['data'])
192 c.records_total = data['recordsTotal']
192 c.records_total = data['recordsTotal']
193 return render('/pullrequests/pullrequests.html')
193 return render('/pullrequests/pullrequests.html')
194 else:
194 else:
195 return json.dumps(data)
195 return json.dumps(data)
196
196
197 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
197 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
198 # pagination
198 # pagination
199 start = safe_int(request.GET.get('start'), 0)
199 start = safe_int(request.GET.get('start'), 0)
200 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
200 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
201 order_by, order_dir = self._extract_ordering(request)
201 order_by, order_dir = self._extract_ordering(request)
202
202
203 if c.awaiting_review:
203 if c.awaiting_review:
204 pull_requests = PullRequestModel().get_awaiting_review(
204 pull_requests = PullRequestModel().get_awaiting_review(
205 repo_name, source=c.source, opened_by=opened_by,
205 repo_name, source=c.source, opened_by=opened_by,
206 statuses=statuses, offset=start, length=length,
206 statuses=statuses, offset=start, length=length,
207 order_by=order_by, order_dir=order_dir)
207 order_by=order_by, order_dir=order_dir)
208 pull_requests_total_count = PullRequestModel(
208 pull_requests_total_count = PullRequestModel(
209 ).count_awaiting_review(
209 ).count_awaiting_review(
210 repo_name, source=c.source, statuses=statuses,
210 repo_name, source=c.source, statuses=statuses,
211 opened_by=opened_by)
211 opened_by=opened_by)
212 elif c.awaiting_my_review:
212 elif c.awaiting_my_review:
213 pull_requests = PullRequestModel().get_awaiting_my_review(
213 pull_requests = PullRequestModel().get_awaiting_my_review(
214 repo_name, source=c.source, opened_by=opened_by,
214 repo_name, source=c.source, opened_by=opened_by,
215 user_id=c.rhodecode_user.user_id, statuses=statuses,
215 user_id=c.rhodecode_user.user_id, statuses=statuses,
216 offset=start, length=length, order_by=order_by,
216 offset=start, length=length, order_by=order_by,
217 order_dir=order_dir)
217 order_dir=order_dir)
218 pull_requests_total_count = PullRequestModel(
218 pull_requests_total_count = PullRequestModel(
219 ).count_awaiting_my_review(
219 ).count_awaiting_my_review(
220 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
220 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
221 statuses=statuses, opened_by=opened_by)
221 statuses=statuses, opened_by=opened_by)
222 else:
222 else:
223 pull_requests = PullRequestModel().get_all(
223 pull_requests = PullRequestModel().get_all(
224 repo_name, source=c.source, opened_by=opened_by,
224 repo_name, source=c.source, opened_by=opened_by,
225 statuses=statuses, offset=start, length=length,
225 statuses=statuses, offset=start, length=length,
226 order_by=order_by, order_dir=order_dir)
226 order_by=order_by, order_dir=order_dir)
227 pull_requests_total_count = PullRequestModel().count_all(
227 pull_requests_total_count = PullRequestModel().count_all(
228 repo_name, source=c.source, statuses=statuses,
228 repo_name, source=c.source, statuses=statuses,
229 opened_by=opened_by)
229 opened_by=opened_by)
230
230
231 from rhodecode.lib.utils import PartialRenderer
231 from rhodecode.lib.utils import PartialRenderer
232 _render = PartialRenderer('data_table/_dt_elements.html')
232 _render = PartialRenderer('data_table/_dt_elements.html')
233 data = []
233 data = []
234 for pr in pull_requests:
234 for pr in pull_requests:
235 comments = ChangesetCommentsModel().get_all_comments(
235 comments = ChangesetCommentsModel().get_all_comments(
236 c.rhodecode_db_repo.repo_id, pull_request=pr)
236 c.rhodecode_db_repo.repo_id, pull_request=pr)
237
237
238 data.append({
238 data.append({
239 'name': _render('pullrequest_name',
239 'name': _render('pullrequest_name',
240 pr.pull_request_id, pr.target_repo.repo_name),
240 pr.pull_request_id, pr.target_repo.repo_name),
241 'name_raw': pr.pull_request_id,
241 'name_raw': pr.pull_request_id,
242 'status': _render('pullrequest_status',
242 'status': _render('pullrequest_status',
243 pr.calculated_review_status()),
243 pr.calculated_review_status()),
244 'title': _render(
244 'title': _render(
245 'pullrequest_title', pr.title, pr.description),
245 'pullrequest_title', pr.title, pr.description),
246 'description': h.escape(pr.description),
246 'description': h.escape(pr.description),
247 'updated_on': _render('pullrequest_updated_on',
247 'updated_on': _render('pullrequest_updated_on',
248 h.datetime_to_time(pr.updated_on)),
248 h.datetime_to_time(pr.updated_on)),
249 'updated_on_raw': h.datetime_to_time(pr.updated_on),
249 'updated_on_raw': h.datetime_to_time(pr.updated_on),
250 'created_on': _render('pullrequest_updated_on',
250 'created_on': _render('pullrequest_updated_on',
251 h.datetime_to_time(pr.created_on)),
251 h.datetime_to_time(pr.created_on)),
252 'created_on_raw': h.datetime_to_time(pr.created_on),
252 'created_on_raw': h.datetime_to_time(pr.created_on),
253 'author': _render('pullrequest_author',
253 'author': _render('pullrequest_author',
254 pr.author.full_contact, ),
254 pr.author.full_contact, ),
255 'author_raw': pr.author.full_name,
255 'author_raw': pr.author.full_name,
256 'comments': _render('pullrequest_comments', len(comments)),
256 'comments': _render('pullrequest_comments', len(comments)),
257 'comments_raw': len(comments),
257 'comments_raw': len(comments),
258 'closed': pr.is_closed(),
258 'closed': pr.is_closed(),
259 })
259 })
260 # json used to render the grid
260 # json used to render the grid
261 data = ({
261 data = ({
262 'data': data,
262 'data': data,
263 'recordsTotal': pull_requests_total_count,
263 'recordsTotal': pull_requests_total_count,
264 'recordsFiltered': pull_requests_total_count,
264 'recordsFiltered': pull_requests_total_count,
265 })
265 })
266 return data
266 return data
267
267
268 @LoginRequired()
268 @LoginRequired()
269 @NotAnonymous()
269 @NotAnonymous()
270 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
270 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
271 'repository.admin')
271 'repository.admin')
272 @HasAcceptedRepoType('git', 'hg')
272 @HasAcceptedRepoType('git', 'hg')
273 def index(self):
273 def index(self):
274 source_repo = c.rhodecode_db_repo
274 source_repo = c.rhodecode_db_repo
275
275
276 try:
276 try:
277 source_repo.scm_instance().get_commit()
277 source_repo.scm_instance().get_commit()
278 except EmptyRepositoryError:
278 except EmptyRepositoryError:
279 h.flash(h.literal(_('There are no commits yet')),
279 h.flash(h.literal(_('There are no commits yet')),
280 category='warning')
280 category='warning')
281 redirect(url('summary_home', repo_name=source_repo.repo_name))
281 redirect(url('summary_home', repo_name=source_repo.repo_name))
282
282
283 commit_id = request.GET.get('commit')
283 commit_id = request.GET.get('commit')
284 branch_ref = request.GET.get('branch')
284 branch_ref = request.GET.get('branch')
285 bookmark_ref = request.GET.get('bookmark')
285 bookmark_ref = request.GET.get('bookmark')
286
286
287 try:
287 try:
288 source_repo_data = PullRequestModel().generate_repo_data(
288 source_repo_data = PullRequestModel().generate_repo_data(
289 source_repo, commit_id=commit_id,
289 source_repo, commit_id=commit_id,
290 branch=branch_ref, bookmark=bookmark_ref)
290 branch=branch_ref, bookmark=bookmark_ref)
291 except CommitDoesNotExistError as e:
291 except CommitDoesNotExistError as e:
292 log.exception(e)
292 log.exception(e)
293 h.flash(_('Commit does not exist'), 'error')
293 h.flash(_('Commit does not exist'), 'error')
294 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
294 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
295
295
296 default_target_repo = source_repo
296 default_target_repo = source_repo
297 if (source_repo.parent and
297 if (source_repo.parent and
298 not source_repo.parent.scm_instance().is_empty()):
298 not source_repo.parent.scm_instance().is_empty()):
299 # change default if we have a parent repo
299 # change default if we have a parent repo
300 default_target_repo = source_repo.parent
300 default_target_repo = source_repo.parent
301
301
302 target_repo_data = PullRequestModel().generate_repo_data(
302 target_repo_data = PullRequestModel().generate_repo_data(
303 default_target_repo)
303 default_target_repo)
304
304
305 selected_source_ref = source_repo_data['refs']['selected_ref']
305 selected_source_ref = source_repo_data['refs']['selected_ref']
306
306
307 title_source_ref = selected_source_ref.split(':', 2)[1]
307 title_source_ref = selected_source_ref.split(':', 2)[1]
308 c.default_title = PullRequestModel().generate_pullrequest_title(
308 c.default_title = PullRequestModel().generate_pullrequest_title(
309 source=source_repo.repo_name,
309 source=source_repo.repo_name,
310 source_ref=title_source_ref,
310 source_ref=title_source_ref,
311 target=default_target_repo.repo_name
311 target=default_target_repo.repo_name
312 )
312 )
313
313
314 c.default_repo_data = {
314 c.default_repo_data = {
315 'source_repo_name': source_repo.repo_name,
315 'source_repo_name': source_repo.repo_name,
316 'source_refs_json': json.dumps(source_repo_data),
316 'source_refs_json': json.dumps(source_repo_data),
317 'target_repo_name': default_target_repo.repo_name,
317 'target_repo_name': default_target_repo.repo_name,
318 'target_refs_json': json.dumps(target_repo_data),
318 'target_refs_json': json.dumps(target_repo_data),
319 }
319 }
320 c.default_source_ref = selected_source_ref
320 c.default_source_ref = selected_source_ref
321
321
322 return render('/pullrequests/pullrequest.html')
322 return render('/pullrequests/pullrequest.html')
323
323
324 @LoginRequired()
324 @LoginRequired()
325 @NotAnonymous()
325 @NotAnonymous()
326 @XHRRequired()
326 @XHRRequired()
327 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
327 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
328 'repository.admin')
328 'repository.admin')
329 @jsonify
329 @jsonify
330 def get_repo_refs(self, repo_name, target_repo_name):
330 def get_repo_refs(self, repo_name, target_repo_name):
331 repo = Repository.get_by_repo_name(target_repo_name)
331 repo = Repository.get_by_repo_name(target_repo_name)
332 if not repo:
332 if not repo:
333 raise HTTPNotFound
333 raise HTTPNotFound
334 return PullRequestModel().generate_repo_data(repo)
334 return PullRequestModel().generate_repo_data(repo)
335
335
336 @LoginRequired()
336 @LoginRequired()
337 @NotAnonymous()
337 @NotAnonymous()
338 @XHRRequired()
338 @XHRRequired()
339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
339 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
340 'repository.admin')
340 'repository.admin')
341 @jsonify
341 @jsonify
342 def get_repo_destinations(self, repo_name):
342 def get_repo_destinations(self, repo_name):
343 repo = Repository.get_by_repo_name(repo_name)
343 repo = Repository.get_by_repo_name(repo_name)
344 if not repo:
344 if not repo:
345 raise HTTPNotFound
345 raise HTTPNotFound
346 filter_query = request.GET.get('query')
346 filter_query = request.GET.get('query')
347
347
348 query = Repository.query() \
348 query = Repository.query() \
349 .order_by(func.length(Repository.repo_name)) \
349 .order_by(func.length(Repository.repo_name)) \
350 .filter(or_(
350 .filter(or_(
351 Repository.repo_name == repo.repo_name,
351 Repository.repo_name == repo.repo_name,
352 Repository.fork_id == repo.repo_id))
352 Repository.fork_id == repo.repo_id))
353
353
354 if filter_query:
354 if filter_query:
355 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
355 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
356 query = query.filter(
356 query = query.filter(
357 Repository.repo_name.ilike(ilike_expression))
357 Repository.repo_name.ilike(ilike_expression))
358
358
359 add_parent = False
359 add_parent = False
360 if repo.parent:
360 if repo.parent:
361 if filter_query in repo.parent.repo_name:
361 if filter_query in repo.parent.repo_name:
362 if not repo.parent.scm_instance().is_empty():
362 if not repo.parent.scm_instance().is_empty():
363 add_parent = True
363 add_parent = True
364
364
365 limit = 20 - 1 if add_parent else 20
365 limit = 20 - 1 if add_parent else 20
366 all_repos = query.limit(limit).all()
366 all_repos = query.limit(limit).all()
367 if add_parent:
367 if add_parent:
368 all_repos += [repo.parent]
368 all_repos += [repo.parent]
369
369
370 repos = []
370 repos = []
371 for obj in self.scm_model.get_repos(all_repos):
371 for obj in self.scm_model.get_repos(all_repos):
372 repos.append({
372 repos.append({
373 'id': obj['name'],
373 'id': obj['name'],
374 'text': obj['name'],
374 'text': obj['name'],
375 'type': 'repo',
375 'type': 'repo',
376 'obj': obj['dbrepo']
376 'obj': obj['dbrepo']
377 })
377 })
378
378
379 data = {
379 data = {
380 'more': False,
380 'more': False,
381 'results': [{
381 'results': [{
382 'text': _('Repositories'),
382 'text': _('Repositories'),
383 'children': repos
383 'children': repos
384 }] if repos else []
384 }] if repos else []
385 }
385 }
386 return data
386 return data
387
387
388 @LoginRequired()
388 @LoginRequired()
389 @NotAnonymous()
389 @NotAnonymous()
390 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
390 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
391 'repository.admin')
391 'repository.admin')
392 @HasAcceptedRepoType('git', 'hg')
392 @HasAcceptedRepoType('git', 'hg')
393 @auth.CSRFRequired()
393 @auth.CSRFRequired()
394 def create(self, repo_name):
394 def create(self, repo_name):
395 repo = Repository.get_by_repo_name(repo_name)
395 repo = Repository.get_by_repo_name(repo_name)
396 if not repo:
396 if not repo:
397 raise HTTPNotFound
397 raise HTTPNotFound
398
398
399 try:
399 try:
400 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
400 _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
401 except formencode.Invalid as errors:
401 except formencode.Invalid as errors:
402 if errors.error_dict.get('revisions'):
402 if errors.error_dict.get('revisions'):
403 msg = 'Revisions: %s' % errors.error_dict['revisions']
403 msg = 'Revisions: %s' % errors.error_dict['revisions']
404 elif errors.error_dict.get('pullrequest_title'):
404 elif errors.error_dict.get('pullrequest_title'):
405 msg = _('Pull request requires a title with min. 3 chars')
405 msg = _('Pull request requires a title with min. 3 chars')
406 else:
406 else:
407 msg = _('Error creating pull request: {}').format(errors)
407 msg = _('Error creating pull request: {}').format(errors)
408 log.exception(msg)
408 log.exception(msg)
409 h.flash(msg, 'error')
409 h.flash(msg, 'error')
410
410
411 # would rather just go back to form ...
411 # would rather just go back to form ...
412 return redirect(url('pullrequest_home', repo_name=repo_name))
412 return redirect(url('pullrequest_home', repo_name=repo_name))
413
413
414 source_repo = _form['source_repo']
414 source_repo = _form['source_repo']
415 source_ref = _form['source_ref']
415 source_ref = _form['source_ref']
416 target_repo = _form['target_repo']
416 target_repo = _form['target_repo']
417 target_ref = _form['target_ref']
417 target_ref = _form['target_ref']
418 commit_ids = _form['revisions'][::-1]
418 commit_ids = _form['revisions'][::-1]
419 reviewers = _form['review_members']
419 reviewers = _form['review_members']
420
420
421 # find the ancestor for this pr
421 # find the ancestor for this pr
422 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
422 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
423 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
423 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
424
424
425 source_scm = source_db_repo.scm_instance()
425 source_scm = source_db_repo.scm_instance()
426 target_scm = target_db_repo.scm_instance()
426 target_scm = target_db_repo.scm_instance()
427
427
428 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
428 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
429 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
429 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
430
430
431 ancestor = source_scm.get_common_ancestor(
431 ancestor = source_scm.get_common_ancestor(
432 source_commit.raw_id, target_commit.raw_id, target_scm)
432 source_commit.raw_id, target_commit.raw_id, target_scm)
433
433
434 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
434 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
435 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
435 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
436
436
437 pullrequest_title = _form['pullrequest_title']
437 pullrequest_title = _form['pullrequest_title']
438 title_source_ref = source_ref.split(':', 2)[1]
438 title_source_ref = source_ref.split(':', 2)[1]
439 if not pullrequest_title:
439 if not pullrequest_title:
440 pullrequest_title = PullRequestModel().generate_pullrequest_title(
440 pullrequest_title = PullRequestModel().generate_pullrequest_title(
441 source=source_repo,
441 source=source_repo,
442 source_ref=title_source_ref,
442 source_ref=title_source_ref,
443 target=target_repo
443 target=target_repo
444 )
444 )
445
445
446 description = _form['pullrequest_desc']
446 description = _form['pullrequest_desc']
447 try:
447 try:
448 pull_request = PullRequestModel().create(
448 pull_request = PullRequestModel().create(
449 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
449 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
450 target_ref, commit_ids, reviewers, pullrequest_title,
450 target_ref, commit_ids, reviewers, pullrequest_title,
451 description
451 description
452 )
452 )
453 Session().commit()
453 Session().commit()
454 h.flash(_('Successfully opened new pull request'),
454 h.flash(_('Successfully opened new pull request'),
455 category='success')
455 category='success')
456 except Exception as e:
456 except Exception as e:
457 raise
457 msg = _('Error occurred during sending pull request')
458 msg = _('Error occurred during sending pull request')
458 log.exception(msg)
459 log.exception(msg)
459 h.flash(msg, category='error')
460 h.flash(msg, category='error')
460 return redirect(url('pullrequest_home', repo_name=repo_name))
461 return redirect(url('pullrequest_home', repo_name=repo_name))
461
462
462 return redirect(url('pullrequest_show', repo_name=target_repo,
463 return redirect(url('pullrequest_show', repo_name=target_repo,
463 pull_request_id=pull_request.pull_request_id))
464 pull_request_id=pull_request.pull_request_id))
464
465
465 @LoginRequired()
466 @LoginRequired()
466 @NotAnonymous()
467 @NotAnonymous()
467 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 'repository.admin')
469 'repository.admin')
469 @auth.CSRFRequired()
470 @auth.CSRFRequired()
470 @jsonify
471 @jsonify
471 def update(self, repo_name, pull_request_id):
472 def update(self, repo_name, pull_request_id):
472 pull_request_id = safe_int(pull_request_id)
473 pull_request_id = safe_int(pull_request_id)
473 pull_request = PullRequest.get_or_404(pull_request_id)
474 pull_request = PullRequest.get_or_404(pull_request_id)
474 # only owner or admin can update it
475 # only owner or admin can update it
475 allowed_to_update = PullRequestModel().check_user_update(
476 allowed_to_update = PullRequestModel().check_user_update(
476 pull_request, c.rhodecode_user)
477 pull_request, c.rhodecode_user)
477 if allowed_to_update:
478 if allowed_to_update:
478 if 'reviewers_ids' in request.POST:
479 if 'reviewers_ids' in request.POST:
479 self._update_reviewers(pull_request_id)
480 self._update_reviewers(pull_request_id)
480 elif str2bool(request.POST.get('update_commits', 'false')):
481 elif str2bool(request.POST.get('update_commits', 'false')):
481 self._update_commits(pull_request)
482 self._update_commits(pull_request)
482 elif str2bool(request.POST.get('close_pull_request', 'false')):
483 elif str2bool(request.POST.get('close_pull_request', 'false')):
483 self._reject_close(pull_request)
484 self._reject_close(pull_request)
484 elif str2bool(request.POST.get('edit_pull_request', 'false')):
485 elif str2bool(request.POST.get('edit_pull_request', 'false')):
485 self._edit_pull_request(pull_request)
486 self._edit_pull_request(pull_request)
486 else:
487 else:
487 raise HTTPBadRequest()
488 raise HTTPBadRequest()
488 return True
489 return True
489 raise HTTPForbidden()
490 raise HTTPForbidden()
490
491
491 def _edit_pull_request(self, pull_request):
492 def _edit_pull_request(self, pull_request):
492 try:
493 try:
493 PullRequestModel().edit(
494 PullRequestModel().edit(
494 pull_request, request.POST.get('title'),
495 pull_request, request.POST.get('title'),
495 request.POST.get('description'))
496 request.POST.get('description'))
496 except ValueError:
497 except ValueError:
497 msg = _(u'Cannot update closed pull requests.')
498 msg = _(u'Cannot update closed pull requests.')
498 h.flash(msg, category='error')
499 h.flash(msg, category='error')
499 return
500 return
500 else:
501 else:
501 Session().commit()
502 Session().commit()
502
503
503 msg = _(u'Pull request title & description updated.')
504 msg = _(u'Pull request title & description updated.')
504 h.flash(msg, category='success')
505 h.flash(msg, category='success')
505 return
506 return
506
507
507 def _update_commits(self, pull_request):
508 def _update_commits(self, pull_request):
508 try:
509 try:
509 if PullRequestModel().has_valid_update_type(pull_request):
510 if PullRequestModel().has_valid_update_type(pull_request):
510 updated_version, changes = PullRequestModel().update_commits(
511 updated_version, changes = PullRequestModel().update_commits(
511 pull_request)
512 pull_request)
512 if updated_version:
513 if updated_version:
513 msg = _(
514 msg = _(
514 u'Pull request updated to "{source_commit_id}" with '
515 u'Pull request updated to "{source_commit_id}" with '
515 u'{count_added} added, {count_removed} removed '
516 u'{count_added} added, {count_removed} removed '
516 u'commits.'
517 u'commits.'
517 ).format(
518 ).format(
518 source_commit_id=pull_request.source_ref_parts.commit_id,
519 source_commit_id=pull_request.source_ref_parts.commit_id,
519 count_added=len(changes.added),
520 count_added=len(changes.added),
520 count_removed=len(changes.removed))
521 count_removed=len(changes.removed))
521 h.flash(msg, category='success')
522 h.flash(msg, category='success')
522 else:
523 else:
523 h.flash(_("Nothing changed in pull request."),
524 h.flash(_("Nothing changed in pull request."),
524 category='warning')
525 category='warning')
525 else:
526 else:
526 msg = _(
527 msg = _(
527 u"Skipping update of pull request due to reference "
528 u"Skipping update of pull request due to reference "
528 u"type: {reference_type}"
529 u"type: {reference_type}"
529 ).format(reference_type=pull_request.source_ref_parts.type)
530 ).format(reference_type=pull_request.source_ref_parts.type)
530 h.flash(msg, category='warning')
531 h.flash(msg, category='warning')
531 except CommitDoesNotExistError:
532 except CommitDoesNotExistError:
532 h.flash(
533 h.flash(
533 _(u'Update failed due to missing commits.'), category='error')
534 _(u'Update failed due to missing commits.'), category='error')
534
535
535 @auth.CSRFRequired()
536 @auth.CSRFRequired()
536 @LoginRequired()
537 @LoginRequired()
537 @NotAnonymous()
538 @NotAnonymous()
538 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
539 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
539 'repository.admin')
540 'repository.admin')
540 def merge(self, repo_name, pull_request_id):
541 def merge(self, repo_name, pull_request_id):
541 """
542 """
542 POST /{repo_name}/pull-request/{pull_request_id}
543 POST /{repo_name}/pull-request/{pull_request_id}
543
544
544 Merge will perform a server-side merge of the specified
545 Merge will perform a server-side merge of the specified
545 pull request, if the pull request is approved and mergeable.
546 pull request, if the pull request is approved and mergeable.
546 After succesfull merging, the pull request is automatically
547 After succesfull merging, the pull request is automatically
547 closed, with a relevant comment.
548 closed, with a relevant comment.
548 """
549 """
549 pull_request_id = safe_int(pull_request_id)
550 pull_request_id = safe_int(pull_request_id)
550 pull_request = PullRequest.get_or_404(pull_request_id)
551 pull_request = PullRequest.get_or_404(pull_request_id)
551 user = c.rhodecode_user
552 user = c.rhodecode_user
552
553
553 if self._meets_merge_pre_conditions(pull_request, user):
554 if self._meets_merge_pre_conditions(pull_request, user):
554 log.debug("Pre-conditions checked, trying to merge.")
555 log.debug("Pre-conditions checked, trying to merge.")
555 extras = vcs_operation_context(
556 extras = vcs_operation_context(
556 request.environ, repo_name=pull_request.target_repo.repo_name,
557 request.environ, repo_name=pull_request.target_repo.repo_name,
557 username=user.username, action='push',
558 username=user.username, action='push',
558 scm=pull_request.target_repo.repo_type)
559 scm=pull_request.target_repo.repo_type)
559 self._merge_pull_request(pull_request, user, extras)
560 self._merge_pull_request(pull_request, user, extras)
560
561
561 return redirect(url(
562 return redirect(url(
562 'pullrequest_show',
563 'pullrequest_show',
563 repo_name=pull_request.target_repo.repo_name,
564 repo_name=pull_request.target_repo.repo_name,
564 pull_request_id=pull_request.pull_request_id))
565 pull_request_id=pull_request.pull_request_id))
565
566
566 def _meets_merge_pre_conditions(self, pull_request, user):
567 def _meets_merge_pre_conditions(self, pull_request, user):
567 if not PullRequestModel().check_user_merge(pull_request, user):
568 if not PullRequestModel().check_user_merge(pull_request, user):
568 raise HTTPForbidden()
569 raise HTTPForbidden()
569
570
570 merge_status, msg = PullRequestModel().merge_status(pull_request)
571 merge_status, msg = PullRequestModel().merge_status(pull_request)
571 if not merge_status:
572 if not merge_status:
572 log.debug("Cannot merge, not mergeable.")
573 log.debug("Cannot merge, not mergeable.")
573 h.flash(msg, category='error')
574 h.flash(msg, category='error')
574 return False
575 return False
575
576
576 if (pull_request.calculated_review_status()
577 if (pull_request.calculated_review_status()
577 is not ChangesetStatus.STATUS_APPROVED):
578 is not ChangesetStatus.STATUS_APPROVED):
578 log.debug("Cannot merge, approval is pending.")
579 log.debug("Cannot merge, approval is pending.")
579 msg = _('Pull request reviewer approval is pending.')
580 msg = _('Pull request reviewer approval is pending.')
580 h.flash(msg, category='error')
581 h.flash(msg, category='error')
581 return False
582 return False
582 return True
583 return True
583
584
584 def _merge_pull_request(self, pull_request, user, extras):
585 def _merge_pull_request(self, pull_request, user, extras):
585 merge_resp = PullRequestModel().merge(
586 merge_resp = PullRequestModel().merge(
586 pull_request, user, extras=extras)
587 pull_request, user, extras=extras)
587
588
588 if merge_resp.executed:
589 if merge_resp.executed:
589 log.debug("The merge was successful, closing the pull request.")
590 log.debug("The merge was successful, closing the pull request.")
590 PullRequestModel().close_pull_request(
591 PullRequestModel().close_pull_request(
591 pull_request.pull_request_id, user)
592 pull_request.pull_request_id, user)
592 Session().commit()
593 Session().commit()
593 msg = _('Pull request was successfully merged and closed.')
594 msg = _('Pull request was successfully merged and closed.')
594 h.flash(msg, category='success')
595 h.flash(msg, category='success')
595 else:
596 else:
596 log.debug(
597 log.debug(
597 "The merge was not successful. Merge response: %s",
598 "The merge was not successful. Merge response: %s",
598 merge_resp)
599 merge_resp)
599 msg = PullRequestModel().merge_status_message(
600 msg = PullRequestModel().merge_status_message(
600 merge_resp.failure_reason)
601 merge_resp.failure_reason)
601 h.flash(msg, category='error')
602 h.flash(msg, category='error')
602
603
603 def _update_reviewers(self, pull_request_id):
604 def _update_reviewers(self, pull_request_id):
604 reviewers_ids = map(int, filter(
605 reviewers_ids = map(int, filter(
605 lambda v: v not in [None, ''],
606 lambda v: v not in [None, ''],
606 request.POST.get('reviewers_ids', '').split(',')))
607 request.POST.get('reviewers_ids', '').split(',')))
607 PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
608 PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
608 Session().commit()
609 Session().commit()
609
610
610 def _reject_close(self, pull_request):
611 def _reject_close(self, pull_request):
611 if pull_request.is_closed():
612 if pull_request.is_closed():
612 raise HTTPForbidden()
613 raise HTTPForbidden()
613
614
614 PullRequestModel().close_pull_request_with_comment(
615 PullRequestModel().close_pull_request_with_comment(
615 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
616 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
616 Session().commit()
617 Session().commit()
617
618
618 @LoginRequired()
619 @LoginRequired()
619 @NotAnonymous()
620 @NotAnonymous()
620 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
621 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
621 'repository.admin')
622 'repository.admin')
622 @auth.CSRFRequired()
623 @auth.CSRFRequired()
623 @jsonify
624 @jsonify
624 def delete(self, repo_name, pull_request_id):
625 def delete(self, repo_name, pull_request_id):
625 pull_request_id = safe_int(pull_request_id)
626 pull_request_id = safe_int(pull_request_id)
626 pull_request = PullRequest.get_or_404(pull_request_id)
627 pull_request = PullRequest.get_or_404(pull_request_id)
627 # only owner can delete it !
628 # only owner can delete it !
628 if pull_request.author.user_id == c.rhodecode_user.user_id:
629 if pull_request.author.user_id == c.rhodecode_user.user_id:
629 PullRequestModel().delete(pull_request)
630 PullRequestModel().delete(pull_request)
630 Session().commit()
631 Session().commit()
631 h.flash(_('Successfully deleted pull request'),
632 h.flash(_('Successfully deleted pull request'),
632 category='success')
633 category='success')
633 return redirect(url('my_account_pullrequests'))
634 return redirect(url('my_account_pullrequests'))
634 raise HTTPForbidden()
635 raise HTTPForbidden()
635
636
636 @LoginRequired()
637 @LoginRequired()
637 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
638 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
638 'repository.admin')
639 'repository.admin')
639 def show(self, repo_name, pull_request_id):
640 def show(self, repo_name, pull_request_id):
640 pull_request_id = safe_int(pull_request_id)
641 pull_request_id = safe_int(pull_request_id)
641 c.pull_request = PullRequest.get_or_404(pull_request_id)
642 c.pull_request = PullRequest.get_or_404(pull_request_id)
642
643
643 # pull_requests repo_name we opened it against
644 # pull_requests repo_name we opened it against
644 # ie. target_repo must match
645 # ie. target_repo must match
645 if repo_name != c.pull_request.target_repo.repo_name:
646 if repo_name != c.pull_request.target_repo.repo_name:
646 raise HTTPNotFound
647 raise HTTPNotFound
647
648
648 c.allowed_to_change_status = PullRequestModel(). \
649 c.allowed_to_change_status = PullRequestModel(). \
649 check_user_change_status(c.pull_request, c.rhodecode_user)
650 check_user_change_status(c.pull_request, c.rhodecode_user)
650 c.allowed_to_update = PullRequestModel().check_user_update(
651 c.allowed_to_update = PullRequestModel().check_user_update(
651 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
652 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
652 c.allowed_to_merge = PullRequestModel().check_user_merge(
653 c.allowed_to_merge = PullRequestModel().check_user_merge(
653 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
654 c.pull_request, c.rhodecode_user) and not c.pull_request.is_closed()
654
655
655 cc_model = ChangesetCommentsModel()
656 cc_model = ChangesetCommentsModel()
656
657
657 c.pull_request_reviewers = c.pull_request.reviewers_statuses()
658 c.pull_request_reviewers = c.pull_request.reviewers_statuses()
658
659
659 c.pull_request_review_status = c.pull_request.calculated_review_status()
660 c.pull_request_review_status = c.pull_request.calculated_review_status()
660 c.pr_merge_status, c.pr_merge_msg = PullRequestModel().merge_status(
661 c.pr_merge_status, c.pr_merge_msg = PullRequestModel().merge_status(
661 c.pull_request)
662 c.pull_request)
662 c.approval_msg = None
663 c.approval_msg = None
663 if c.pull_request_review_status != ChangesetStatus.STATUS_APPROVED:
664 if c.pull_request_review_status != ChangesetStatus.STATUS_APPROVED:
664 c.approval_msg = _('Reviewer approval is pending.')
665 c.approval_msg = _('Reviewer approval is pending.')
665 c.pr_merge_status = False
666 c.pr_merge_status = False
666 # load compare data into template context
667 # load compare data into template context
667 enable_comments = not c.pull_request.is_closed()
668 enable_comments = not c.pull_request.is_closed()
668 self._load_compare_data(c.pull_request, enable_comments=enable_comments)
669 self._load_compare_data(c.pull_request, enable_comments=enable_comments)
669
670
670 # this is a hack to properly display links, when creating PR, the
671 # this is a hack to properly display links, when creating PR, the
671 # compare view and others uses different notation, and
672 # compare view and others uses different notation, and
672 # compare_commits.html renders links based on the target_repo.
673 # compare_commits.html renders links based on the target_repo.
673 # We need to swap that here to generate it properly on the html side
674 # We need to swap that here to generate it properly on the html side
674 c.target_repo = c.source_repo
675 c.target_repo = c.source_repo
675
676
676 # inline comments
677 # inline comments
677 c.inline_cnt = 0
678 c.inline_cnt = 0
678 c.inline_comments = cc_model.get_inline_comments(
679 c.inline_comments = cc_model.get_inline_comments(
679 c.rhodecode_db_repo.repo_id,
680 c.rhodecode_db_repo.repo_id,
680 pull_request=pull_request_id).items()
681 pull_request=pull_request_id).items()
681 # count inline comments
682 # count inline comments
682 for __, lines in c.inline_comments:
683 for __, lines in c.inline_comments:
683 for comments in lines.values():
684 for comments in lines.values():
684 c.inline_cnt += len(comments)
685 c.inline_cnt += len(comments)
685
686
686 # outdated comments
687 # outdated comments
687 c.outdated_cnt = 0
688 c.outdated_cnt = 0
688 if ChangesetCommentsModel.use_outdated_comments(c.pull_request):
689 if ChangesetCommentsModel.use_outdated_comments(c.pull_request):
689 c.outdated_comments = cc_model.get_outdated_comments(
690 c.outdated_comments = cc_model.get_outdated_comments(
690 c.rhodecode_db_repo.repo_id,
691 c.rhodecode_db_repo.repo_id,
691 pull_request=c.pull_request)
692 pull_request=c.pull_request)
692 # Count outdated comments and check for deleted files
693 # Count outdated comments and check for deleted files
693 for file_name, lines in c.outdated_comments.iteritems():
694 for file_name, lines in c.outdated_comments.iteritems():
694 for comments in lines.values():
695 for comments in lines.values():
695 c.outdated_cnt += len(comments)
696 c.outdated_cnt += len(comments)
696 if file_name not in c.included_files:
697 if file_name not in c.included_files:
697 c.deleted_files.append(file_name)
698 c.deleted_files.append(file_name)
698 else:
699 else:
699 c.outdated_comments = {}
700 c.outdated_comments = {}
700
701
701 # comments
702 # comments
702 c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id,
703 c.comments = cc_model.get_comments(c.rhodecode_db_repo.repo_id,
703 pull_request=pull_request_id)
704 pull_request=pull_request_id)
704
705
705 if c.allowed_to_update:
706 if c.allowed_to_update:
706 force_close = ('forced_closed', _('Close Pull Request'))
707 force_close = ('forced_closed', _('Close Pull Request'))
707 statuses = ChangesetStatus.STATUSES + [force_close]
708 statuses = ChangesetStatus.STATUSES + [force_close]
708 else:
709 else:
709 statuses = ChangesetStatus.STATUSES
710 statuses = ChangesetStatus.STATUSES
710 c.commit_statuses = statuses
711 c.commit_statuses = statuses
711
712
712 c.ancestor = None # TODO: add ancestor here
713 c.ancestor = None # TODO: add ancestor here
713
714
714 return render('/pullrequests/pullrequest_show.html')
715 return render('/pullrequests/pullrequest_show.html')
715
716
716 @LoginRequired()
717 @LoginRequired()
717 @NotAnonymous()
718 @NotAnonymous()
718 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
719 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
719 'repository.admin')
720 'repository.admin')
720 @auth.CSRFRequired()
721 @auth.CSRFRequired()
721 @jsonify
722 @jsonify
722 def comment(self, repo_name, pull_request_id):
723 def comment(self, repo_name, pull_request_id):
723 pull_request_id = safe_int(pull_request_id)
724 pull_request_id = safe_int(pull_request_id)
724 pull_request = PullRequest.get_or_404(pull_request_id)
725 pull_request = PullRequest.get_or_404(pull_request_id)
725 if pull_request.is_closed():
726 if pull_request.is_closed():
726 raise HTTPForbidden()
727 raise HTTPForbidden()
727
728
728 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
729 # TODO: johbo: Re-think this bit, "approved_closed" does not exist
729 # as a changeset status, still we want to send it in one value.
730 # as a changeset status, still we want to send it in one value.
730 status = request.POST.get('changeset_status', None)
731 status = request.POST.get('changeset_status', None)
731 text = request.POST.get('text')
732 text = request.POST.get('text')
732 if status and '_closed' in status:
733 if status and '_closed' in status:
733 close_pr = True
734 close_pr = True
734 status = status.replace('_closed', '')
735 status = status.replace('_closed', '')
735 else:
736 else:
736 close_pr = False
737 close_pr = False
737
738
738 forced = (status == 'forced')
739 forced = (status == 'forced')
739 if forced:
740 if forced:
740 status = 'rejected'
741 status = 'rejected'
741
742
742 allowed_to_change_status = PullRequestModel().check_user_change_status(
743 allowed_to_change_status = PullRequestModel().check_user_change_status(
743 pull_request, c.rhodecode_user)
744 pull_request, c.rhodecode_user)
744
745
745 if status and allowed_to_change_status:
746 if status and allowed_to_change_status:
746 message = (_('Status change %(transition_icon)s %(status)s')
747 message = (_('Status change %(transition_icon)s %(status)s')
747 % {'transition_icon': '>',
748 % {'transition_icon': '>',
748 'status': ChangesetStatus.get_status_lbl(status)})
749 'status': ChangesetStatus.get_status_lbl(status)})
749 if close_pr:
750 if close_pr:
750 message = _('Closing with') + ' ' + message
751 message = _('Closing with') + ' ' + message
751 text = text or message
752 text = text or message
752 comm = ChangesetCommentsModel().create(
753 comm = ChangesetCommentsModel().create(
753 text=text,
754 text=text,
754 repo=c.rhodecode_db_repo.repo_id,
755 repo=c.rhodecode_db_repo.repo_id,
755 user=c.rhodecode_user.user_id,
756 user=c.rhodecode_user.user_id,
756 pull_request=pull_request_id,
757 pull_request=pull_request_id,
757 f_path=request.POST.get('f_path'),
758 f_path=request.POST.get('f_path'),
758 line_no=request.POST.get('line'),
759 line_no=request.POST.get('line'),
759 status_change=(ChangesetStatus.get_status_lbl(status)
760 status_change=(ChangesetStatus.get_status_lbl(status)
760 if status and allowed_to_change_status else None),
761 if status and allowed_to_change_status else None),
761 closing_pr=close_pr
762 closing_pr=close_pr
762 )
763 )
763
764
764 if allowed_to_change_status:
765 if allowed_to_change_status:
765 old_calculated_status = pull_request.calculated_review_status()
766 old_calculated_status = pull_request.calculated_review_status()
766 # get status if set !
767 # get status if set !
767 if status:
768 if status:
768 ChangesetStatusModel().set_status(
769 ChangesetStatusModel().set_status(
769 c.rhodecode_db_repo.repo_id,
770 c.rhodecode_db_repo.repo_id,
770 status,
771 status,
771 c.rhodecode_user.user_id,
772 c.rhodecode_user.user_id,
772 comm,
773 comm,
773 pull_request=pull_request_id
774 pull_request=pull_request_id
774 )
775 )
775
776
776 Session().flush()
777 Session().flush()
777 # we now calculate the status of pull request, and based on that
778 # we now calculate the status of pull request, and based on that
778 # calculation we set the commits status
779 # calculation we set the commits status
779 calculated_status = pull_request.calculated_review_status()
780 calculated_status = pull_request.calculated_review_status()
780 if old_calculated_status != calculated_status:
781 if old_calculated_status != calculated_status:
781 PullRequestModel()._trigger_pull_request_hook(
782 PullRequestModel()._trigger_pull_request_hook(
782 pull_request, c.rhodecode_user, 'review_status_change')
783 pull_request, c.rhodecode_user, 'review_status_change')
783
784
784 calculated_status_lbl = ChangesetStatus.get_status_lbl(
785 calculated_status_lbl = ChangesetStatus.get_status_lbl(
785 calculated_status)
786 calculated_status)
786
787
787 if close_pr:
788 if close_pr:
788 status_completed = (
789 status_completed = (
789 calculated_status in [ChangesetStatus.STATUS_APPROVED,
790 calculated_status in [ChangesetStatus.STATUS_APPROVED,
790 ChangesetStatus.STATUS_REJECTED])
791 ChangesetStatus.STATUS_REJECTED])
791 if forced or status_completed:
792 if forced or status_completed:
792 PullRequestModel().close_pull_request(
793 PullRequestModel().close_pull_request(
793 pull_request_id, c.rhodecode_user)
794 pull_request_id, c.rhodecode_user)
794 else:
795 else:
795 h.flash(_('Closing pull request on other statuses than '
796 h.flash(_('Closing pull request on other statuses than '
796 'rejected or approved is forbidden. '
797 'rejected or approved is forbidden. '
797 'Calculated status from all reviewers '
798 'Calculated status from all reviewers '
798 'is currently: %s') % calculated_status_lbl,
799 'is currently: %s') % calculated_status_lbl,
799 category='warning')
800 category='warning')
800
801
801 Session().commit()
802 Session().commit()
802
803
803 if not request.is_xhr:
804 if not request.is_xhr:
804 return redirect(h.url('pullrequest_show', repo_name=repo_name,
805 return redirect(h.url('pullrequest_show', repo_name=repo_name,
805 pull_request_id=pull_request_id))
806 pull_request_id=pull_request_id))
806
807
807 data = {
808 data = {
808 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
809 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
809 }
810 }
810 if comm:
811 if comm:
811 c.co = comm
812 c.co = comm
812 data.update(comm.get_dict())
813 data.update(comm.get_dict())
813 data.update({'rendered_text':
814 data.update({'rendered_text':
814 render('changeset/changeset_comment_block.html')})
815 render('changeset/changeset_comment_block.html')})
815
816
816 return data
817 return data
817
818
818 @LoginRequired()
819 @LoginRequired()
819 @NotAnonymous()
820 @NotAnonymous()
820 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
821 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
821 'repository.admin')
822 'repository.admin')
822 @auth.CSRFRequired()
823 @auth.CSRFRequired()
823 @jsonify
824 @jsonify
824 def delete_comment(self, repo_name, comment_id):
825 def delete_comment(self, repo_name, comment_id):
825 return self._delete_comment(comment_id)
826 return self._delete_comment(comment_id)
826
827
827 def _delete_comment(self, comment_id):
828 def _delete_comment(self, comment_id):
828 comment_id = safe_int(comment_id)
829 comment_id = safe_int(comment_id)
829 co = ChangesetComment.get_or_404(comment_id)
830 co = ChangesetComment.get_or_404(comment_id)
830 if co.pull_request.is_closed():
831 if co.pull_request.is_closed():
831 # don't allow deleting comments on closed pull request
832 # don't allow deleting comments on closed pull request
832 raise HTTPForbidden()
833 raise HTTPForbidden()
833
834
834 is_owner = co.author.user_id == c.rhodecode_user.user_id
835 is_owner = co.author.user_id == c.rhodecode_user.user_id
835 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
836 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
836 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
837 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
837 old_calculated_status = co.pull_request.calculated_review_status()
838 old_calculated_status = co.pull_request.calculated_review_status()
838 ChangesetCommentsModel().delete(comment=co)
839 ChangesetCommentsModel().delete(comment=co)
839 Session().commit()
840 Session().commit()
840 calculated_status = co.pull_request.calculated_review_status()
841 calculated_status = co.pull_request.calculated_review_status()
841 if old_calculated_status != calculated_status:
842 if old_calculated_status != calculated_status:
842 PullRequestModel()._trigger_pull_request_hook(
843 PullRequestModel()._trigger_pull_request_hook(
843 co.pull_request, c.rhodecode_user, 'review_status_change')
844 co.pull_request, c.rhodecode_user, 'review_status_change')
844 return True
845 return True
845 else:
846 else:
846 raise HTTPForbidden()
847 raise HTTPForbidden()
@@ -1,59 +1,57 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from pyramid.threadlocal import get_current_registry
19 from pyramid.threadlocal import get_current_registry
20
20
21
21
22 class RhodecodeEvent(object):
23 """
24 Base event class for all Rhodecode events
25 """
26
27
28 def trigger(event):
22 def trigger(event):
29 """
23 """
30 Helper method to send an event. This wraps the pyramid logic to send an
24 Helper method to send an event. This wraps the pyramid logic to send an
31 event.
25 event.
32 """
26 """
33 # For the first step we are using pyramids thread locals here. If the
27 # For the first step we are using pyramids thread locals here. If the
34 # event mechanism works out as a good solution we should think about
28 # event mechanism works out as a good solution we should think about
35 # passing the registry as an argument to get rid of it.
29 # passing the registry as an argument to get rid of it.
36 registry = get_current_registry()
30 registry = get_current_registry()
37 registry.notify(event)
31 registry.notify(event)
38
32
39
33
34 from rhodecode.events.base import RhodecodeEvent
35
40 from rhodecode.events.user import (
36 from rhodecode.events.user import (
41 UserPreCreate,
37 UserPreCreate,
42 UserPreUpdate,
38 UserPreUpdate,
43 UserRegistered
39 UserRegistered
44 )
40 )
45
41
46 from rhodecode.events.repo import (
42 from rhodecode.events.repo import (
43 RepoEvent,
47 RepoPreCreateEvent, RepoCreatedEvent,
44 RepoPreCreateEvent, RepoCreatedEvent,
48 RepoPreDeleteEvent, RepoDeletedEvent,
45 RepoPreDeleteEvent, RepoDeletedEvent,
49 RepoPrePushEvent, RepoPushEvent,
46 RepoPrePushEvent, RepoPushEvent,
50 RepoPrePullEvent, RepoPullEvent,
47 RepoPrePullEvent, RepoPullEvent,
51 )
48 )
52
49
53 from rhodecode.events.pullrequest import (
50 from rhodecode.events.pullrequest import (
51 PullRequestEvent,
54 PullRequestCreateEvent,
52 PullRequestCreateEvent,
55 PullRequestUpdateEvent,
53 PullRequestUpdateEvent,
56 PullRequestReviewEvent,
54 PullRequestReviewEvent,
57 PullRequestMergeEvent,
55 PullRequestMergeEvent,
58 PullRequestCloseEvent,
56 PullRequestCloseEvent,
59 ) No newline at end of file
57 )
@@ -1,72 +1,97 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
20
19 from rhodecode.events.repo import RepoEvent
21 from rhodecode.events.repo import RepoEvent
20
22
21
23
24 def get_pull_request_url(pull_request):
25 from rhodecode.model.pull_request import PullRequestModel
26 return PullRequestModel().get_url(pull_request)
27
28
29 class PullRequestSchema(Schema):
30 """
31 Marshmallow schema for a pull request
32 """
33 pull_request_id = fields.Integer()
34 url = fields.Function(get_pull_request_url)
35 title = fields.Str()
36
37
38 class PullRequestEventSchema(RepoEvent.MarshmallowSchema):
39 """
40 Marshmallow schema for a pull request event
41 """
42 pullrequest = fields.Nested(PullRequestSchema)
43
44
22 class PullRequestEvent(RepoEvent):
45 class PullRequestEvent(RepoEvent):
23 """
46 """
24 Base class for events acting on a repository.
47 Base class for pull request events.
25
48
26 :param repo: a :class:`Repository` instance
49 :param pullrequest: a :class:`PullRequest` instance
27 """
50 """
51 MarshmallowSchema = PullRequestEventSchema
52
28 def __init__(self, pullrequest):
53 def __init__(self, pullrequest):
29 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
54 super(PullRequestEvent, self).__init__(pullrequest.target_repo)
30 self.pullrequest = pullrequest
55 self.pullrequest = pullrequest
31
56
32
57
33 class PullRequestCreateEvent(PullRequestEvent):
58 class PullRequestCreateEvent(PullRequestEvent):
34 """
59 """
35 An instance of this class is emitted as an :term:`event` after a pull
60 An instance of this class is emitted as an :term:`event` after a pull
36 request is created.
61 request is created.
37 """
62 """
38 name = 'pullrequest-create'
63 name = 'pullrequest-create'
39
64
40
65
41 class PullRequestCloseEvent(PullRequestEvent):
66 class PullRequestCloseEvent(PullRequestEvent):
42 """
67 """
43 An instance of this class is emitted as an :term:`event` after a pull
68 An instance of this class is emitted as an :term:`event` after a pull
44 request is closed.
69 request is closed.
45 """
70 """
46 name = 'pullrequest-close'
71 name = 'pullrequest-close'
47
72
48
73
49 class PullRequestUpdateEvent(PullRequestEvent):
74 class PullRequestUpdateEvent(PullRequestEvent):
50 """
75 """
51 An instance of this class is emitted as an :term:`event` after a pull
76 An instance of this class is emitted as an :term:`event` after a pull
52 request is updated.
77 request is updated.
53 """
78 """
54 name = 'pullrequest-update'
79 name = 'pullrequest-update'
55
80
56
81
57 class PullRequestMergeEvent(PullRequestEvent):
82 class PullRequestMergeEvent(PullRequestEvent):
58 """
83 """
59 An instance of this class is emitted as an :term:`event` after a pull
84 An instance of this class is emitted as an :term:`event` after a pull
60 request is merged.
85 request is merged.
61 """
86 """
62 name = 'pullrequest-merge'
87 name = 'pullrequest-merge'
63
88
64
89
65 class PullRequestReviewEvent(PullRequestEvent):
90 class PullRequestReviewEvent(PullRequestEvent):
66 """
91 """
67 An instance of this class is emitted as an :term:`event` after a pull
92 An instance of this class is emitted as an :term:`event` after a pull
68 request is reviewed.
93 request is reviewed.
69 """
94 """
70 name = 'pullrequest-review'
95 name = 'pullrequest-review'
71
96
72
97
@@ -1,113 +1,149 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from marshmallow import Schema, fields
20
19 from rhodecode.model.db import Repository, Session
21 from rhodecode.model.db import Repository, Session
20 from rhodecode.events import RhodecodeEvent
22 from rhodecode.events.base import RhodecodeEvent
23
24
25 def get_pull_request_url(repo):
26 from rhodecode.model.repo import RepoModel
27 return RepoModel().get_url(repo)
28
29
30 class RepositorySchema(Schema):
31 """
32 Marshmallow schema for a repository
33 """
34 repo_id = fields.Integer()
35 repo_name = fields.Str()
36 url = fields.Function(get_pull_request_url)
37
38
39 class RepoEventSchema(RhodecodeEvent.MarshmallowSchema):
40 """
41 Marshmallow schema for a repository event
42 """
43 repository = fields.Nested(RepositorySchema)
21
44
22
45
23 class RepoEvent(RhodecodeEvent):
46 class RepoEvent(RhodecodeEvent):
24 """
47 """
25 Base class for events acting on a repository.
48 Base class for events acting on a repository.
26
49
27 :param repo: a :class:`Repository` instance
50 :param repo: a :class:`Repository` instance
28 """
51 """
52 MarshmallowSchema = RepoEventSchema
53
29 def __init__(self, repo):
54 def __init__(self, repo):
55 super(RepoEvent, self).__init__()
30 self.repo = repo
56 self.repo = repo
31
57
32
58
33 class RepoPreCreateEvent(RepoEvent):
59 class RepoPreCreateEvent(RepoEvent):
34 """
60 """
35 An instance of this class is emitted as an :term:`event` before a repo is
61 An instance of this class is emitted as an :term:`event` before a repo is
36 created.
62 created.
37 """
63 """
38 name = 'repo-pre-create'
64 name = 'repo-pre-create'
39
65
40
66
41 class RepoCreatedEvent(RepoEvent):
67 class RepoCreatedEvent(RepoEvent):
42 """
68 """
43 An instance of this class is emitted as an :term:`event` whenever a repo is
69 An instance of this class is emitted as an :term:`event` whenever a repo is
44 created.
70 created.
45 """
71 """
46 name = 'repo-created'
72 name = 'repo-created'
47
73
48
74
49 class RepoPreDeleteEvent(RepoEvent):
75 class RepoPreDeleteEvent(RepoEvent):
50 """
76 """
51 An instance of this class is emitted as an :term:`event` whenever a repo is
77 An instance of this class is emitted as an :term:`event` whenever a repo is
52 created.
78 created.
53 """
79 """
54 name = 'repo-pre-delete'
80 name = 'repo-pre-delete'
55
81
56
82
57 class RepoDeletedEvent(RepoEvent):
83 class RepoDeletedEvent(RepoEvent):
58 """
84 """
59 An instance of this class is emitted as an :term:`event` whenever a repo is
85 An instance of this class is emitted as an :term:`event` whenever a repo is
60 created.
86 created.
61 """
87 """
62 name = 'repo-deleted'
88 name = 'repo-deleted'
63
89
64
90
65 class RepoVCSEvent(RepoEvent):
91 class RepoVCSEvent(RepoEvent):
66 """
92 """
67 Base class for events triggered by the VCS
93 Base class for events triggered by the VCS
68 """
94 """
69 def __init__(self, repo_name, extras):
95 def __init__(self, repo_name, extras):
70 self.repo = Repository.get_by_repo_name(repo_name)
96 self.repo = Repository.get_by_repo_name(repo_name)
71 if not self.repo:
97 if not self.repo:
72 raise Exception('repo by this name %s does not exist' % repo_name)
98 raise Exception('repo by this name %s does not exist' % repo_name)
73 self.extras = extras
99 self.extras = extras
74 super(RepoVCSEvent, self).__init__(self.repo)
100 super(RepoVCSEvent, self).__init__(self.repo)
75
101
102 @property
103 def acting_user(self):
104 if self.extras.get('username'):
105 return User.get_by_username(extras['username'])
106
107 @property
108 def acting_ip(self):
109 if self.extras.get('ip'):
110 return User.get_by_username(extras['ip'])
111
76
112
77 class RepoPrePullEvent(RepoVCSEvent):
113 class RepoPrePullEvent(RepoVCSEvent):
78 """
114 """
79 An instance of this class is emitted as an :term:`event` before commits
115 An instance of this class is emitted as an :term:`event` before commits
80 are pulled from a repo.
116 are pulled from a repo.
81 """
117 """
82 name = 'repo-pre-pull'
118 name = 'repo-pre-pull'
83
119
84
120
85 class RepoPullEvent(RepoVCSEvent):
121 class RepoPullEvent(RepoVCSEvent):
86 """
122 """
87 An instance of this class is emitted as an :term:`event` after commits
123 An instance of this class is emitted as an :term:`event` after commits
88 are pulled from a repo.
124 are pulled from a repo.
89 """
125 """
90 name = 'repo-pull'
126 name = 'repo-pull'
91
127
92
128
93 class RepoPrePushEvent(RepoVCSEvent):
129 class RepoPrePushEvent(RepoVCSEvent):
94 """
130 """
95 An instance of this class is emitted as an :term:`event` before commits
131 An instance of this class is emitted as an :term:`event` before commits
96 are pushed to a repo.
132 are pushed to a repo.
97 """
133 """
98 name = 'repo-pre-push'
134 name = 'repo-pre-push'
99
135
100
136
101 class RepoPushEvent(RepoVCSEvent):
137 class RepoPushEvent(RepoVCSEvent):
102 """
138 """
103 An instance of this class is emitted as an :term:`event` after commits
139 An instance of this class is emitted as an :term:`event` after commits
104 are pushed to a repo.
140 are pushed to a repo.
105
141
106 :param extras: (optional) dict of data from proxied VCS actions
142 :param extras: (optional) dict of data from proxied VCS actions
107 """
143 """
108 name = 'repo-push'
144 name = 'repo-push'
109
145
110 def __init__(self, repo_name, pushed_commit_ids, extras):
146 def __init__(self, repo_name, pushed_commit_ids, extras):
111 super(RepoPushEvent, self).__init__(repo_name, extras)
147 super(RepoPushEvent, self).__init__(repo_name, extras)
112 self.pushed_commit_ids = pushed_commit_ids
148 self.pushed_commit_ids = pushed_commit_ids
113
149
@@ -1,54 +1,55 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from zope.interface import implementer
19 from zope.interface import implementer
20 from rhodecode.events import RhodecodeEvent
20
21 from rhodecode.events.base import RhodecodeEvent
21 from rhodecode.events.interfaces import (
22 from rhodecode.events.interfaces import (
22 IUserRegistered, IUserPreCreate, IUserPreUpdate)
23 IUserRegistered, IUserPreCreate, IUserPreUpdate)
23
24
24
25
25 @implementer(IUserRegistered)
26 @implementer(IUserRegistered)
26 class UserRegistered(RhodecodeEvent):
27 class UserRegistered(RhodecodeEvent):
27 """
28 """
28 An instance of this class is emitted as an :term:`event` whenever a user
29 An instance of this class is emitted as an :term:`event` whenever a user
29 account is registered.
30 account is registered.
30 """
31 """
31 def __init__(self, user, session):
32 def __init__(self, user, session):
32 self.user = user
33 self.user = user
33 self.session = session
34 self.session = session
34
35
35
36
36 @implementer(IUserPreCreate)
37 @implementer(IUserPreCreate)
37 class UserPreCreate(RhodecodeEvent):
38 class UserPreCreate(RhodecodeEvent):
38 """
39 """
39 An instance of this class is emitted as an :term:`event` before a new user
40 An instance of this class is emitted as an :term:`event` before a new user
40 object is created.
41 object is created.
41 """
42 """
42 def __init__(self, user_data):
43 def __init__(self, user_data):
43 self.user_data = user_data
44 self.user_data = user_data
44
45
45
46
46 @implementer(IUserPreUpdate)
47 @implementer(IUserPreUpdate)
47 class UserPreUpdate(RhodecodeEvent):
48 class UserPreUpdate(RhodecodeEvent):
48 """
49 """
49 An instance of this class is emitted as an :term:`event` before a user
50 An instance of this class is emitted as an :term:`event` before a user
50 object is updated.
51 object is updated.
51 """
52 """
52 def __init__(self, user, user_data):
53 def __init__(self, user, user_data):
53 self.user = user
54 self.user = user
54 self.user_data = user_data
55 self.user_data = user_data
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1148 +1,1153 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26 from collections import namedtuple
26 from collections import namedtuple
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30
30
31 from pylons.i18n.translation import _
31 from pylons.i18n.translation import _
32 from pylons.i18n.translation import lazy_ugettext
32 from pylons.i18n.translation import lazy_ugettext
33
33
34 import rhodecode
34 import rhodecode
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
35 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 from rhodecode.lib.compat import OrderedDict
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
37 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 from rhodecode.lib.markup_renderer import (
38 from rhodecode.lib.markup_renderer import (
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
39 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 from rhodecode.lib.utils import action_logger
40 from rhodecode.lib.utils import action_logger
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
41 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 from rhodecode.lib.vcs.backends.base import (
42 from rhodecode.lib.vcs.backends.base import (
43 Reference, MergeResponse, MergeFailureReason)
43 Reference, MergeResponse, MergeFailureReason)
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError)
45 CommitDoesNotExistError, EmptyRepositoryError)
46 from rhodecode.model import BaseModel
46 from rhodecode.model import BaseModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.comment import ChangesetCommentsModel
48 from rhodecode.model.comment import ChangesetCommentsModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
50 PullRequest, PullRequestReviewers, Notification, ChangesetStatus,
51 PullRequestVersion, ChangesetComment)
51 PullRequestVersion, ChangesetComment)
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53 from rhodecode.model.notification import NotificationModel, \
53 from rhodecode.model.notification import NotificationModel, \
54 EmailNotificationModel
54 EmailNotificationModel
55 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.scm import ScmModel
56 from rhodecode.model.settings import VcsSettingsModel
56 from rhodecode.model.settings import VcsSettingsModel
57
57
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class PullRequestModel(BaseModel):
62 class PullRequestModel(BaseModel):
63
63
64 cls = PullRequest
64 cls = PullRequest
65
65
66 DIFF_CONTEXT = 3
66 DIFF_CONTEXT = 3
67
67
68 MERGE_STATUS_MESSAGES = {
68 MERGE_STATUS_MESSAGES = {
69 MergeFailureReason.NONE: lazy_ugettext(
69 MergeFailureReason.NONE: lazy_ugettext(
70 'This pull request can be automatically merged.'),
70 'This pull request can be automatically merged.'),
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 'This pull request cannot be merged because of an unhandled'
72 'This pull request cannot be merged because of an unhandled'
73 ' exception.'),
73 ' exception.'),
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 'This pull request cannot be merged because of conflicts.'),
75 'This pull request cannot be merged because of conflicts.'),
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 'This pull request could not be merged because push to target'
77 'This pull request could not be merged because push to target'
78 ' failed.'),
78 ' failed.'),
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 'This pull request cannot be merged because the target is not a'
80 'This pull request cannot be merged because the target is not a'
81 ' head.'),
81 ' head.'),
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 'This pull request cannot be merged because the source contains'
83 'This pull request cannot be merged because the source contains'
84 ' more branches than the target.'),
84 ' more branches than the target.'),
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 'This pull request cannot be merged because the target has'
86 'This pull request cannot be merged because the target has'
87 ' multiple heads.'),
87 ' multiple heads.'),
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 'This pull request cannot be merged because the target repository'
89 'This pull request cannot be merged because the target repository'
90 ' is locked.'),
90 ' is locked.'),
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 'This pull request cannot be merged because the target or the '
92 'This pull request cannot be merged because the target or the '
93 'source reference is missing.'),
93 'source reference is missing.'),
94 }
94 }
95
95
96 def __get_pull_request(self, pull_request):
96 def __get_pull_request(self, pull_request):
97 return self._get_instance(PullRequest, pull_request)
97 return self._get_instance(PullRequest, pull_request)
98
98
99 def _check_perms(self, perms, pull_request, user, api=False):
99 def _check_perms(self, perms, pull_request, user, api=False):
100 if not api:
100 if not api:
101 return h.HasRepoPermissionAny(*perms)(
101 return h.HasRepoPermissionAny(*perms)(
102 user=user, repo_name=pull_request.target_repo.repo_name)
102 user=user, repo_name=pull_request.target_repo.repo_name)
103 else:
103 else:
104 return h.HasRepoPermissionAnyApi(*perms)(
104 return h.HasRepoPermissionAnyApi(*perms)(
105 user=user, repo_name=pull_request.target_repo.repo_name)
105 user=user, repo_name=pull_request.target_repo.repo_name)
106
106
107 def check_user_read(self, pull_request, user, api=False):
107 def check_user_read(self, pull_request, user, api=False):
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 return self._check_perms(_perms, pull_request, user, api)
109 return self._check_perms(_perms, pull_request, user, api)
110
110
111 def check_user_merge(self, pull_request, user, api=False):
111 def check_user_merge(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_update(self, pull_request, user, api=False):
115 def check_user_update(self, pull_request, user, api=False):
116 owner = user.user_id == pull_request.user_id
116 owner = user.user_id == pull_request.user_id
117 return self.check_user_merge(pull_request, user, api) or owner
117 return self.check_user_merge(pull_request, user, api) or owner
118
118
119 def check_user_change_status(self, pull_request, user, api=False):
119 def check_user_change_status(self, pull_request, user, api=False):
120 reviewer = user.user_id in [x.user_id for x in
120 reviewer = user.user_id in [x.user_id for x in
121 pull_request.reviewers]
121 pull_request.reviewers]
122 return self.check_user_update(pull_request, user, api) or reviewer
122 return self.check_user_update(pull_request, user, api) or reviewer
123
123
124 def get(self, pull_request):
124 def get(self, pull_request):
125 return self.__get_pull_request(pull_request)
125 return self.__get_pull_request(pull_request)
126
126
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 opened_by=None, order_by=None,
128 opened_by=None, order_by=None,
129 order_dir='desc'):
129 order_dir='desc'):
130 repo = self._get_repo(repo_name)
130 repo = self._get_repo(repo_name)
131 q = PullRequest.query()
131 q = PullRequest.query()
132 # source or target
132 # source or target
133 if source:
133 if source:
134 q = q.filter(PullRequest.source_repo == repo)
134 q = q.filter(PullRequest.source_repo == repo)
135 else:
135 else:
136 q = q.filter(PullRequest.target_repo == repo)
136 q = q.filter(PullRequest.target_repo == repo)
137
137
138 # closed,opened
138 # closed,opened
139 if statuses:
139 if statuses:
140 q = q.filter(PullRequest.status.in_(statuses))
140 q = q.filter(PullRequest.status.in_(statuses))
141
141
142 # opened by filter
142 # opened by filter
143 if opened_by:
143 if opened_by:
144 q = q.filter(PullRequest.user_id.in_(opened_by))
144 q = q.filter(PullRequest.user_id.in_(opened_by))
145
145
146 if order_by:
146 if order_by:
147 order_map = {
147 order_map = {
148 'name_raw': PullRequest.pull_request_id,
148 'name_raw': PullRequest.pull_request_id,
149 'title': PullRequest.title,
149 'title': PullRequest.title,
150 'updated_on_raw': PullRequest.updated_on
150 'updated_on_raw': PullRequest.updated_on
151 }
151 }
152 if order_dir == 'asc':
152 if order_dir == 'asc':
153 q = q.order_by(order_map[order_by].asc())
153 q = q.order_by(order_map[order_by].asc())
154 else:
154 else:
155 q = q.order_by(order_map[order_by].desc())
155 q = q.order_by(order_map[order_by].desc())
156
156
157 return q
157 return q
158
158
159 def count_all(self, repo_name, source=False, statuses=None,
159 def count_all(self, repo_name, source=False, statuses=None,
160 opened_by=None):
160 opened_by=None):
161 """
161 """
162 Count the number of pull requests for a specific repository.
162 Count the number of pull requests for a specific repository.
163
163
164 :param repo_name: target or source repo
164 :param repo_name: target or source repo
165 :param source: boolean flag to specify if repo_name refers to source
165 :param source: boolean flag to specify if repo_name refers to source
166 :param statuses: list of pull request statuses
166 :param statuses: list of pull request statuses
167 :param opened_by: author user of the pull request
167 :param opened_by: author user of the pull request
168 :returns: int number of pull requests
168 :returns: int number of pull requests
169 """
169 """
170 q = self._prepare_get_all_query(
170 q = self._prepare_get_all_query(
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172
172
173 return q.count()
173 return q.count()
174
174
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 offset=0, length=None, order_by=None, order_dir='desc'):
176 offset=0, length=None, order_by=None, order_dir='desc'):
177 """
177 """
178 Get all pull requests for a specific repository.
178 Get all pull requests for a specific repository.
179
179
180 :param repo_name: target or source repo
180 :param repo_name: target or source repo
181 :param source: boolean flag to specify if repo_name refers to source
181 :param source: boolean flag to specify if repo_name refers to source
182 :param statuses: list of pull request statuses
182 :param statuses: list of pull request statuses
183 :param opened_by: author user of the pull request
183 :param opened_by: author user of the pull request
184 :param offset: pagination offset
184 :param offset: pagination offset
185 :param length: length of returned list
185 :param length: length of returned list
186 :param order_by: order of the returned list
186 :param order_by: order of the returned list
187 :param order_dir: 'asc' or 'desc' ordering direction
187 :param order_dir: 'asc' or 'desc' ordering direction
188 :returns: list of pull requests
188 :returns: list of pull requests
189 """
189 """
190 q = self._prepare_get_all_query(
190 q = self._prepare_get_all_query(
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 order_by=order_by, order_dir=order_dir)
192 order_by=order_by, order_dir=order_dir)
193
193
194 if length:
194 if length:
195 pull_requests = q.limit(length).offset(offset).all()
195 pull_requests = q.limit(length).offset(offset).all()
196 else:
196 else:
197 pull_requests = q.all()
197 pull_requests = q.all()
198
198
199 return pull_requests
199 return pull_requests
200
200
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 opened_by=None):
202 opened_by=None):
203 """
203 """
204 Count the number of pull requests for a specific repository that are
204 Count the number of pull requests for a specific repository that are
205 awaiting review.
205 awaiting review.
206
206
207 :param repo_name: target or source repo
207 :param repo_name: target or source repo
208 :param source: boolean flag to specify if repo_name refers to source
208 :param source: boolean flag to specify if repo_name refers to source
209 :param statuses: list of pull request statuses
209 :param statuses: list of pull request statuses
210 :param opened_by: author user of the pull request
210 :param opened_by: author user of the pull request
211 :returns: int number of pull requests
211 :returns: int number of pull requests
212 """
212 """
213 pull_requests = self.get_awaiting_review(
213 pull_requests = self.get_awaiting_review(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215
215
216 return len(pull_requests)
216 return len(pull_requests)
217
217
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 opened_by=None, offset=0, length=None,
219 opened_by=None, offset=0, length=None,
220 order_by=None, order_dir='desc'):
220 order_by=None, order_dir='desc'):
221 """
221 """
222 Get all pull requests for a specific repository that are awaiting
222 Get all pull requests for a specific repository that are awaiting
223 review.
223 review.
224
224
225 :param repo_name: target or source repo
225 :param repo_name: target or source repo
226 :param source: boolean flag to specify if repo_name refers to source
226 :param source: boolean flag to specify if repo_name refers to source
227 :param statuses: list of pull request statuses
227 :param statuses: list of pull request statuses
228 :param opened_by: author user of the pull request
228 :param opened_by: author user of the pull request
229 :param offset: pagination offset
229 :param offset: pagination offset
230 :param length: length of returned list
230 :param length: length of returned list
231 :param order_by: order of the returned list
231 :param order_by: order of the returned list
232 :param order_dir: 'asc' or 'desc' ordering direction
232 :param order_dir: 'asc' or 'desc' ordering direction
233 :returns: list of pull requests
233 :returns: list of pull requests
234 """
234 """
235 pull_requests = self.get_all(
235 pull_requests = self.get_all(
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 order_by=order_by, order_dir=order_dir)
237 order_by=order_by, order_dir=order_dir)
238
238
239 _filtered_pull_requests = []
239 _filtered_pull_requests = []
240 for pr in pull_requests:
240 for pr in pull_requests:
241 status = pr.calculated_review_status()
241 status = pr.calculated_review_status()
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 _filtered_pull_requests.append(pr)
244 _filtered_pull_requests.append(pr)
245 if length:
245 if length:
246 return _filtered_pull_requests[offset:offset+length]
246 return _filtered_pull_requests[offset:offset+length]
247 else:
247 else:
248 return _filtered_pull_requests
248 return _filtered_pull_requests
249
249
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 opened_by=None, user_id=None):
251 opened_by=None, user_id=None):
252 """
252 """
253 Count the number of pull requests for a specific repository that are
253 Count the number of pull requests for a specific repository that are
254 awaiting review from a specific user.
254 awaiting review from a specific user.
255
255
256 :param repo_name: target or source repo
256 :param repo_name: target or source repo
257 :param source: boolean flag to specify if repo_name refers to source
257 :param source: boolean flag to specify if repo_name refers to source
258 :param statuses: list of pull request statuses
258 :param statuses: list of pull request statuses
259 :param opened_by: author user of the pull request
259 :param opened_by: author user of the pull request
260 :param user_id: reviewer user of the pull request
260 :param user_id: reviewer user of the pull request
261 :returns: int number of pull requests
261 :returns: int number of pull requests
262 """
262 """
263 pull_requests = self.get_awaiting_my_review(
263 pull_requests = self.get_awaiting_my_review(
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 user_id=user_id)
265 user_id=user_id)
266
266
267 return len(pull_requests)
267 return len(pull_requests)
268
268
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 opened_by=None, user_id=None, offset=0,
270 opened_by=None, user_id=None, offset=0,
271 length=None, order_by=None, order_dir='desc'):
271 length=None, order_by=None, order_dir='desc'):
272 """
272 """
273 Get all pull requests for a specific repository that are awaiting
273 Get all pull requests for a specific repository that are awaiting
274 review from a specific user.
274 review from a specific user.
275
275
276 :param repo_name: target or source repo
276 :param repo_name: target or source repo
277 :param source: boolean flag to specify if repo_name refers to source
277 :param source: boolean flag to specify if repo_name refers to source
278 :param statuses: list of pull request statuses
278 :param statuses: list of pull request statuses
279 :param opened_by: author user of the pull request
279 :param opened_by: author user of the pull request
280 :param user_id: reviewer user of the pull request
280 :param user_id: reviewer user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _my = PullRequestModel().get_not_reviewed(user_id)
291 _my = PullRequestModel().get_not_reviewed(user_id)
292 my_participation = []
292 my_participation = []
293 for pr in pull_requests:
293 for pr in pull_requests:
294 if pr in _my:
294 if pr in _my:
295 my_participation.append(pr)
295 my_participation.append(pr)
296 _filtered_pull_requests = my_participation
296 _filtered_pull_requests = my_participation
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def get_not_reviewed(self, user_id):
302 def get_not_reviewed(self, user_id):
303 return [
303 return [
304 x.pull_request for x in PullRequestReviewers.query().filter(
304 x.pull_request for x in PullRequestReviewers.query().filter(
305 PullRequestReviewers.user_id == user_id).all()
305 PullRequestReviewers.user_id == user_id).all()
306 ]
306 ]
307
307
308 def get_versions(self, pull_request):
308 def get_versions(self, pull_request):
309 """
309 """
310 returns version of pull request sorted by ID descending
310 returns version of pull request sorted by ID descending
311 """
311 """
312 return PullRequestVersion.query()\
312 return PullRequestVersion.query()\
313 .filter(PullRequestVersion.pull_request == pull_request)\
313 .filter(PullRequestVersion.pull_request == pull_request)\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 .all()
315 .all()
316
316
317 def create(self, created_by, source_repo, source_ref, target_repo,
317 def create(self, created_by, source_repo, source_ref, target_repo,
318 target_ref, revisions, reviewers, title, description=None):
318 target_ref, revisions, reviewers, title, description=None):
319 created_by_user = self._get_user(created_by)
319 created_by_user = self._get_user(created_by)
320 source_repo = self._get_repo(source_repo)
320 source_repo = self._get_repo(source_repo)
321 target_repo = self._get_repo(target_repo)
321 target_repo = self._get_repo(target_repo)
322
322
323 pull_request = PullRequest()
323 pull_request = PullRequest()
324 pull_request.source_repo = source_repo
324 pull_request.source_repo = source_repo
325 pull_request.source_ref = source_ref
325 pull_request.source_ref = source_ref
326 pull_request.target_repo = target_repo
326 pull_request.target_repo = target_repo
327 pull_request.target_ref = target_ref
327 pull_request.target_ref = target_ref
328 pull_request.revisions = revisions
328 pull_request.revisions = revisions
329 pull_request.title = title
329 pull_request.title = title
330 pull_request.description = description
330 pull_request.description = description
331 pull_request.author = created_by_user
331 pull_request.author = created_by_user
332
332
333 Session().add(pull_request)
333 Session().add(pull_request)
334 Session().flush()
334 Session().flush()
335
335
336 # members / reviewers
336 # members / reviewers
337 for user_id in set(reviewers):
337 for user_id in set(reviewers):
338 user = self._get_user(user_id)
338 user = self._get_user(user_id)
339 reviewer = PullRequestReviewers(user, pull_request)
339 reviewer = PullRequestReviewers(user, pull_request)
340 Session().add(reviewer)
340 Session().add(reviewer)
341
341
342 # Set approval status to "Under Review" for all commits which are
342 # Set approval status to "Under Review" for all commits which are
343 # part of this pull request.
343 # part of this pull request.
344 ChangesetStatusModel().set_status(
344 ChangesetStatusModel().set_status(
345 repo=target_repo,
345 repo=target_repo,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
346 status=ChangesetStatus.STATUS_UNDER_REVIEW,
347 user=created_by_user,
347 user=created_by_user,
348 pull_request=pull_request
348 pull_request=pull_request
349 )
349 )
350
350
351 self.notify_reviewers(pull_request, reviewers)
351 self.notify_reviewers(pull_request, reviewers)
352 self._trigger_pull_request_hook(
352 self._trigger_pull_request_hook(
353 pull_request, created_by_user, 'create')
353 pull_request, created_by_user, 'create')
354
354
355 return pull_request
355 return pull_request
356
356
357 def _trigger_pull_request_hook(self, pull_request, user, action):
357 def _trigger_pull_request_hook(self, pull_request, user, action):
358 pull_request = self.__get_pull_request(pull_request)
358 pull_request = self.__get_pull_request(pull_request)
359 target_scm = pull_request.target_repo.scm_instance()
359 target_scm = pull_request.target_repo.scm_instance()
360 if action == 'create':
360 if action == 'create':
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
361 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
362 elif action == 'merge':
362 elif action == 'merge':
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
363 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
364 elif action == 'close':
364 elif action == 'close':
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
365 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
366 elif action == 'review_status_change':
366 elif action == 'review_status_change':
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
367 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
368 elif action == 'update':
368 elif action == 'update':
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
369 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
370 else:
370 else:
371 return
371 return
372
372
373 trigger_hook(
373 trigger_hook(
374 username=user.username,
374 username=user.username,
375 repo_name=pull_request.target_repo.repo_name,
375 repo_name=pull_request.target_repo.repo_name,
376 repo_alias=target_scm.alias,
376 repo_alias=target_scm.alias,
377 pull_request=pull_request)
377 pull_request=pull_request)
378
378
379 def _get_commit_ids(self, pull_request):
379 def _get_commit_ids(self, pull_request):
380 """
380 """
381 Return the commit ids of the merged pull request.
381 Return the commit ids of the merged pull request.
382
382
383 This method is not dealing correctly yet with the lack of autoupdates
383 This method is not dealing correctly yet with the lack of autoupdates
384 nor with the implicit target updates.
384 nor with the implicit target updates.
385 For example: if a commit in the source repo is already in the target it
385 For example: if a commit in the source repo is already in the target it
386 will be reported anyways.
386 will be reported anyways.
387 """
387 """
388 merge_rev = pull_request.merge_rev
388 merge_rev = pull_request.merge_rev
389 if merge_rev is None:
389 if merge_rev is None:
390 raise ValueError('This pull request was not merged yet')
390 raise ValueError('This pull request was not merged yet')
391
391
392 commit_ids = list(pull_request.revisions)
392 commit_ids = list(pull_request.revisions)
393 if merge_rev not in commit_ids:
393 if merge_rev not in commit_ids:
394 commit_ids.append(merge_rev)
394 commit_ids.append(merge_rev)
395
395
396 return commit_ids
396 return commit_ids
397
397
398 def merge(self, pull_request, user, extras):
398 def merge(self, pull_request, user, extras):
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
399 log.debug("Merging pull request %s", pull_request.pull_request_id)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
400 merge_state = self._merge_pull_request(pull_request, user, extras)
401 if merge_state.executed:
401 if merge_state.executed:
402 log.debug(
402 log.debug(
403 "Merge was successful, updating the pull request comments.")
403 "Merge was successful, updating the pull request comments.")
404 self._comment_and_close_pr(pull_request, user, merge_state)
404 self._comment_and_close_pr(pull_request, user, merge_state)
405 self._log_action('user_merged_pull_request', user, pull_request)
405 self._log_action('user_merged_pull_request', user, pull_request)
406 else:
406 else:
407 log.warn("Merge failed, not updating the pull request.")
407 log.warn("Merge failed, not updating the pull request.")
408 return merge_state
408 return merge_state
409
409
410 def _merge_pull_request(self, pull_request, user, extras):
410 def _merge_pull_request(self, pull_request, user, extras):
411 target_vcs = pull_request.target_repo.scm_instance()
411 target_vcs = pull_request.target_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
412 source_vcs = pull_request.source_repo.scm_instance()
413 target_ref = self._refresh_reference(
413 target_ref = self._refresh_reference(
414 pull_request.target_ref_parts, target_vcs)
414 pull_request.target_ref_parts, target_vcs)
415
415
416 message = _(
416 message = _(
417 'Merge pull request #%(pr_id)s from '
417 'Merge pull request #%(pr_id)s from '
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
418 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
419 'pr_id': pull_request.pull_request_id,
419 'pr_id': pull_request.pull_request_id,
420 'source_repo': source_vcs.name,
420 'source_repo': source_vcs.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
421 'source_ref_name': pull_request.source_ref_parts.name,
422 'pr_title': pull_request.title
422 'pr_title': pull_request.title
423 }
423 }
424
424
425 workspace_id = self._workspace_id(pull_request)
425 workspace_id = self._workspace_id(pull_request)
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
426 protocol = rhodecode.CONFIG.get('vcs.hooks.protocol')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
427 use_direct_calls = rhodecode.CONFIG.get('vcs.hooks.direct_calls')
428 use_rebase = self._use_rebase_for_merging(pull_request)
428 use_rebase = self._use_rebase_for_merging(pull_request)
429
429
430 callback_daemon, extras = prepare_callback_daemon(
430 callback_daemon, extras = prepare_callback_daemon(
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
431 extras, protocol=protocol, use_direct_calls=use_direct_calls)
432
432
433 with callback_daemon:
433 with callback_daemon:
434 # TODO: johbo: Implement a clean way to run a config_override
434 # TODO: johbo: Implement a clean way to run a config_override
435 # for a single call.
435 # for a single call.
436 target_vcs.config.set(
436 target_vcs.config.set(
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
437 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
438 merge_state = target_vcs.merge(
438 merge_state = target_vcs.merge(
439 target_ref, source_vcs, pull_request.source_ref_parts,
439 target_ref, source_vcs, pull_request.source_ref_parts,
440 workspace_id, user_name=user.username,
440 workspace_id, user_name=user.username,
441 user_email=user.email, message=message, use_rebase=use_rebase)
441 user_email=user.email, message=message, use_rebase=use_rebase)
442 return merge_state
442 return merge_state
443
443
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
444 def _comment_and_close_pr(self, pull_request, user, merge_state):
445 pull_request.merge_rev = merge_state.merge_commit_id
445 pull_request.merge_rev = merge_state.merge_commit_id
446 pull_request.updated_on = datetime.datetime.now()
446 pull_request.updated_on = datetime.datetime.now()
447
447
448 ChangesetCommentsModel().create(
448 ChangesetCommentsModel().create(
449 text=unicode(_('Pull request merged and closed')),
449 text=unicode(_('Pull request merged and closed')),
450 repo=pull_request.target_repo.repo_id,
450 repo=pull_request.target_repo.repo_id,
451 user=user.user_id,
451 user=user.user_id,
452 pull_request=pull_request.pull_request_id,
452 pull_request=pull_request.pull_request_id,
453 f_path=None,
453 f_path=None,
454 line_no=None,
454 line_no=None,
455 closing_pr=True
455 closing_pr=True
456 )
456 )
457
457
458 Session().add(pull_request)
458 Session().add(pull_request)
459 Session().flush()
459 Session().flush()
460 # TODO: paris: replace invalidation with less radical solution
460 # TODO: paris: replace invalidation with less radical solution
461 ScmModel().mark_for_invalidation(
461 ScmModel().mark_for_invalidation(
462 pull_request.target_repo.repo_name)
462 pull_request.target_repo.repo_name)
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
463 self._trigger_pull_request_hook(pull_request, user, 'merge')
464
464
465 def has_valid_update_type(self, pull_request):
465 def has_valid_update_type(self, pull_request):
466 source_ref_type = pull_request.source_ref_parts.type
466 source_ref_type = pull_request.source_ref_parts.type
467 return source_ref_type in ['book', 'branch', 'tag']
467 return source_ref_type in ['book', 'branch', 'tag']
468
468
469 def update_commits(self, pull_request):
469 def update_commits(self, pull_request):
470 """
470 """
471 Get the updated list of commits for the pull request
471 Get the updated list of commits for the pull request
472 and return the new pull request version and the list
472 and return the new pull request version and the list
473 of commits processed by this update action
473 of commits processed by this update action
474 """
474 """
475
475
476 pull_request = self.__get_pull_request(pull_request)
476 pull_request = self.__get_pull_request(pull_request)
477 source_ref_type = pull_request.source_ref_parts.type
477 source_ref_type = pull_request.source_ref_parts.type
478 source_ref_name = pull_request.source_ref_parts.name
478 source_ref_name = pull_request.source_ref_parts.name
479 source_ref_id = pull_request.source_ref_parts.commit_id
479 source_ref_id = pull_request.source_ref_parts.commit_id
480
480
481 if not self.has_valid_update_type(pull_request):
481 if not self.has_valid_update_type(pull_request):
482 log.debug(
482 log.debug(
483 "Skipping update of pull request %s due to ref type: %s",
483 "Skipping update of pull request %s due to ref type: %s",
484 pull_request, source_ref_type)
484 pull_request, source_ref_type)
485 return (None, None)
485 return (None, None)
486
486
487 source_repo = pull_request.source_repo.scm_instance()
487 source_repo = pull_request.source_repo.scm_instance()
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
488 source_commit = source_repo.get_commit(commit_id=source_ref_name)
489 if source_ref_id == source_commit.raw_id:
489 if source_ref_id == source_commit.raw_id:
490 log.debug("Nothing changed in pull request %s", pull_request)
490 log.debug("Nothing changed in pull request %s", pull_request)
491 return (None, None)
491 return (None, None)
492
492
493 # Finally there is a need for an update
493 # Finally there is a need for an update
494 pull_request_version = self._create_version_from_snapshot(pull_request)
494 pull_request_version = self._create_version_from_snapshot(pull_request)
495 self._link_comments_to_version(pull_request_version)
495 self._link_comments_to_version(pull_request_version)
496
496
497 target_ref_type = pull_request.target_ref_parts.type
497 target_ref_type = pull_request.target_ref_parts.type
498 target_ref_name = pull_request.target_ref_parts.name
498 target_ref_name = pull_request.target_ref_parts.name
499 target_ref_id = pull_request.target_ref_parts.commit_id
499 target_ref_id = pull_request.target_ref_parts.commit_id
500 target_repo = pull_request.target_repo.scm_instance()
500 target_repo = pull_request.target_repo.scm_instance()
501
501
502 if target_ref_type in ('tag', 'branch', 'book'):
502 if target_ref_type in ('tag', 'branch', 'book'):
503 target_commit = target_repo.get_commit(target_ref_name)
503 target_commit = target_repo.get_commit(target_ref_name)
504 else:
504 else:
505 target_commit = target_repo.get_commit(target_ref_id)
505 target_commit = target_repo.get_commit(target_ref_id)
506
506
507 # re-compute commit ids
507 # re-compute commit ids
508 old_commit_ids = set(pull_request.revisions)
508 old_commit_ids = set(pull_request.revisions)
509 pre_load = ["author", "branch", "date", "message"]
509 pre_load = ["author", "branch", "date", "message"]
510 commit_ranges = target_repo.compare(
510 commit_ranges = target_repo.compare(
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
511 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
512 pre_load=pre_load)
512 pre_load=pre_load)
513
513
514 ancestor = target_repo.get_common_ancestor(
514 ancestor = target_repo.get_common_ancestor(
515 target_commit.raw_id, source_commit.raw_id, source_repo)
515 target_commit.raw_id, source_commit.raw_id, source_repo)
516
516
517 pull_request.source_ref = '%s:%s:%s' % (
517 pull_request.source_ref = '%s:%s:%s' % (
518 source_ref_type, source_ref_name, source_commit.raw_id)
518 source_ref_type, source_ref_name, source_commit.raw_id)
519 pull_request.target_ref = '%s:%s:%s' % (
519 pull_request.target_ref = '%s:%s:%s' % (
520 target_ref_type, target_ref_name, ancestor)
520 target_ref_type, target_ref_name, ancestor)
521 pull_request.revisions = [
521 pull_request.revisions = [
522 commit.raw_id for commit in reversed(commit_ranges)]
522 commit.raw_id for commit in reversed(commit_ranges)]
523 pull_request.updated_on = datetime.datetime.now()
523 pull_request.updated_on = datetime.datetime.now()
524 Session().add(pull_request)
524 Session().add(pull_request)
525 new_commit_ids = set(pull_request.revisions)
525 new_commit_ids = set(pull_request.revisions)
526
526
527 changes = self._calculate_commit_id_changes(
527 changes = self._calculate_commit_id_changes(
528 old_commit_ids, new_commit_ids)
528 old_commit_ids, new_commit_ids)
529
529
530 old_diff_data, new_diff_data = self._generate_update_diffs(
530 old_diff_data, new_diff_data = self._generate_update_diffs(
531 pull_request, pull_request_version)
531 pull_request, pull_request_version)
532
532
533 ChangesetCommentsModel().outdate_comments(
533 ChangesetCommentsModel().outdate_comments(
534 pull_request, old_diff_data=old_diff_data,
534 pull_request, old_diff_data=old_diff_data,
535 new_diff_data=new_diff_data)
535 new_diff_data=new_diff_data)
536
536
537 file_changes = self._calculate_file_changes(
537 file_changes = self._calculate_file_changes(
538 old_diff_data, new_diff_data)
538 old_diff_data, new_diff_data)
539
539
540 # Add an automatic comment to the pull request
540 # Add an automatic comment to the pull request
541 update_comment = ChangesetCommentsModel().create(
541 update_comment = ChangesetCommentsModel().create(
542 text=self._render_update_message(changes, file_changes),
542 text=self._render_update_message(changes, file_changes),
543 repo=pull_request.target_repo,
543 repo=pull_request.target_repo,
544 user=pull_request.author,
544 user=pull_request.author,
545 pull_request=pull_request,
545 pull_request=pull_request,
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
546 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
547
547
548 # Update status to "Under Review" for added commits
548 # Update status to "Under Review" for added commits
549 for commit_id in changes.added:
549 for commit_id in changes.added:
550 ChangesetStatusModel().set_status(
550 ChangesetStatusModel().set_status(
551 repo=pull_request.source_repo,
551 repo=pull_request.source_repo,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
552 status=ChangesetStatus.STATUS_UNDER_REVIEW,
553 comment=update_comment,
553 comment=update_comment,
554 user=pull_request.author,
554 user=pull_request.author,
555 pull_request=pull_request,
555 pull_request=pull_request,
556 revision=commit_id)
556 revision=commit_id)
557
557
558 log.debug(
558 log.debug(
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
559 'Updated pull request %s, added_ids: %s, common_ids: %s, '
560 'removed_ids: %s', pull_request.pull_request_id,
560 'removed_ids: %s', pull_request.pull_request_id,
561 changes.added, changes.common, changes.removed)
561 changes.added, changes.common, changes.removed)
562 log.debug('Updated pull request with the following file changes: %s',
562 log.debug('Updated pull request with the following file changes: %s',
563 file_changes)
563 file_changes)
564
564
565 log.info(
565 log.info(
566 "Updated pull request %s from commit %s to commit %s, "
566 "Updated pull request %s from commit %s to commit %s, "
567 "stored new version %s of this pull request.",
567 "stored new version %s of this pull request.",
568 pull_request.pull_request_id, source_ref_id,
568 pull_request.pull_request_id, source_ref_id,
569 pull_request.source_ref_parts.commit_id,
569 pull_request.source_ref_parts.commit_id,
570 pull_request_version.pull_request_version_id)
570 pull_request_version.pull_request_version_id)
571 Session().commit()
571 Session().commit()
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
572 self._trigger_pull_request_hook(pull_request, pull_request.author,
573 'update')
573 'update')
574 return (pull_request_version, changes)
574 return (pull_request_version, changes)
575
575
576 def _create_version_from_snapshot(self, pull_request):
576 def _create_version_from_snapshot(self, pull_request):
577 version = PullRequestVersion()
577 version = PullRequestVersion()
578 version.title = pull_request.title
578 version.title = pull_request.title
579 version.description = pull_request.description
579 version.description = pull_request.description
580 version.status = pull_request.status
580 version.status = pull_request.status
581 version.created_on = pull_request.created_on
581 version.created_on = pull_request.created_on
582 version.updated_on = pull_request.updated_on
582 version.updated_on = pull_request.updated_on
583 version.user_id = pull_request.user_id
583 version.user_id = pull_request.user_id
584 version.source_repo = pull_request.source_repo
584 version.source_repo = pull_request.source_repo
585 version.source_ref = pull_request.source_ref
585 version.source_ref = pull_request.source_ref
586 version.target_repo = pull_request.target_repo
586 version.target_repo = pull_request.target_repo
587 version.target_ref = pull_request.target_ref
587 version.target_ref = pull_request.target_ref
588
588
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
589 version._last_merge_source_rev = pull_request._last_merge_source_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
590 version._last_merge_target_rev = pull_request._last_merge_target_rev
591 version._last_merge_status = pull_request._last_merge_status
591 version._last_merge_status = pull_request._last_merge_status
592 version.merge_rev = pull_request.merge_rev
592 version.merge_rev = pull_request.merge_rev
593
593
594 version.revisions = pull_request.revisions
594 version.revisions = pull_request.revisions
595 version.pull_request = pull_request
595 version.pull_request = pull_request
596 Session().add(version)
596 Session().add(version)
597 Session().flush()
597 Session().flush()
598
598
599 return version
599 return version
600
600
601 def _generate_update_diffs(self, pull_request, pull_request_version):
601 def _generate_update_diffs(self, pull_request, pull_request_version):
602 diff_context = (
602 diff_context = (
603 self.DIFF_CONTEXT +
603 self.DIFF_CONTEXT +
604 ChangesetCommentsModel.needed_extra_diff_context())
604 ChangesetCommentsModel.needed_extra_diff_context())
605 old_diff = self._get_diff_from_pr_or_version(
605 old_diff = self._get_diff_from_pr_or_version(
606 pull_request_version, context=diff_context)
606 pull_request_version, context=diff_context)
607 new_diff = self._get_diff_from_pr_or_version(
607 new_diff = self._get_diff_from_pr_or_version(
608 pull_request, context=diff_context)
608 pull_request, context=diff_context)
609
609
610 old_diff_data = diffs.DiffProcessor(old_diff)
610 old_diff_data = diffs.DiffProcessor(old_diff)
611 old_diff_data.prepare()
611 old_diff_data.prepare()
612 new_diff_data = diffs.DiffProcessor(new_diff)
612 new_diff_data = diffs.DiffProcessor(new_diff)
613 new_diff_data.prepare()
613 new_diff_data.prepare()
614
614
615 return old_diff_data, new_diff_data
615 return old_diff_data, new_diff_data
616
616
617 def _link_comments_to_version(self, pull_request_version):
617 def _link_comments_to_version(self, pull_request_version):
618 """
618 """
619 Link all unlinked comments of this pull request to the given version.
619 Link all unlinked comments of this pull request to the given version.
620
620
621 :param pull_request_version: The `PullRequestVersion` to which
621 :param pull_request_version: The `PullRequestVersion` to which
622 the comments shall be linked.
622 the comments shall be linked.
623
623
624 """
624 """
625 pull_request = pull_request_version.pull_request
625 pull_request = pull_request_version.pull_request
626 comments = ChangesetComment.query().filter(
626 comments = ChangesetComment.query().filter(
627 # TODO: johbo: Should we query for the repo at all here?
627 # TODO: johbo: Should we query for the repo at all here?
628 # Pending decision on how comments of PRs are to be related
628 # Pending decision on how comments of PRs are to be related
629 # to either the source repo, the target repo or no repo at all.
629 # to either the source repo, the target repo or no repo at all.
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
630 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
631 ChangesetComment.pull_request == pull_request,
631 ChangesetComment.pull_request == pull_request,
632 ChangesetComment.pull_request_version == None)
632 ChangesetComment.pull_request_version == None)
633
633
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
634 # TODO: johbo: Find out why this breaks if it is done in a bulk
635 # operation.
635 # operation.
636 for comment in comments:
636 for comment in comments:
637 comment.pull_request_version_id = (
637 comment.pull_request_version_id = (
638 pull_request_version.pull_request_version_id)
638 pull_request_version.pull_request_version_id)
639 Session().add(comment)
639 Session().add(comment)
640
640
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
641 def _calculate_commit_id_changes(self, old_ids, new_ids):
642 added = new_ids.difference(old_ids)
642 added = new_ids.difference(old_ids)
643 common = old_ids.intersection(new_ids)
643 common = old_ids.intersection(new_ids)
644 removed = old_ids.difference(new_ids)
644 removed = old_ids.difference(new_ids)
645 return ChangeTuple(added, common, removed)
645 return ChangeTuple(added, common, removed)
646
646
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
647 def _calculate_file_changes(self, old_diff_data, new_diff_data):
648
648
649 old_files = OrderedDict()
649 old_files = OrderedDict()
650 for diff_data in old_diff_data.parsed_diff:
650 for diff_data in old_diff_data.parsed_diff:
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
651 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
652
652
653 added_files = []
653 added_files = []
654 modified_files = []
654 modified_files = []
655 removed_files = []
655 removed_files = []
656 for diff_data in new_diff_data.parsed_diff:
656 for diff_data in new_diff_data.parsed_diff:
657 new_filename = diff_data['filename']
657 new_filename = diff_data['filename']
658 new_hash = md5_safe(diff_data['raw_diff'])
658 new_hash = md5_safe(diff_data['raw_diff'])
659
659
660 old_hash = old_files.get(new_filename)
660 old_hash = old_files.get(new_filename)
661 if not old_hash:
661 if not old_hash:
662 # file is not present in old diff, means it's added
662 # file is not present in old diff, means it's added
663 added_files.append(new_filename)
663 added_files.append(new_filename)
664 else:
664 else:
665 if new_hash != old_hash:
665 if new_hash != old_hash:
666 modified_files.append(new_filename)
666 modified_files.append(new_filename)
667 # now remove a file from old, since we have seen it already
667 # now remove a file from old, since we have seen it already
668 del old_files[new_filename]
668 del old_files[new_filename]
669
669
670 # removed files is when there are present in old, but not in NEW,
670 # removed files is when there are present in old, but not in NEW,
671 # since we remove old files that are present in new diff, left-overs
671 # since we remove old files that are present in new diff, left-overs
672 # if any should be the removed files
672 # if any should be the removed files
673 removed_files.extend(old_files.keys())
673 removed_files.extend(old_files.keys())
674
674
675 return FileChangeTuple(added_files, modified_files, removed_files)
675 return FileChangeTuple(added_files, modified_files, removed_files)
676
676
677 def _render_update_message(self, changes, file_changes):
677 def _render_update_message(self, changes, file_changes):
678 """
678 """
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
679 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
680 so it's always looking the same disregarding on which default
680 so it's always looking the same disregarding on which default
681 renderer system is using.
681 renderer system is using.
682
682
683 :param changes: changes named tuple
683 :param changes: changes named tuple
684 :param file_changes: file changes named tuple
684 :param file_changes: file changes named tuple
685
685
686 """
686 """
687 new_status = ChangesetStatus.get_status_lbl(
687 new_status = ChangesetStatus.get_status_lbl(
688 ChangesetStatus.STATUS_UNDER_REVIEW)
688 ChangesetStatus.STATUS_UNDER_REVIEW)
689
689
690 changed_files = (
690 changed_files = (
691 file_changes.added + file_changes.modified + file_changes.removed)
691 file_changes.added + file_changes.modified + file_changes.removed)
692
692
693 params = {
693 params = {
694 'under_review_label': new_status,
694 'under_review_label': new_status,
695 'added_commits': changes.added,
695 'added_commits': changes.added,
696 'removed_commits': changes.removed,
696 'removed_commits': changes.removed,
697 'changed_files': changed_files,
697 'changed_files': changed_files,
698 'added_files': file_changes.added,
698 'added_files': file_changes.added,
699 'modified_files': file_changes.modified,
699 'modified_files': file_changes.modified,
700 'removed_files': file_changes.removed,
700 'removed_files': file_changes.removed,
701 }
701 }
702 renderer = RstTemplateRenderer()
702 renderer = RstTemplateRenderer()
703 return renderer.render('pull_request_update.mako', **params)
703 return renderer.render('pull_request_update.mako', **params)
704
704
705 def edit(self, pull_request, title, description):
705 def edit(self, pull_request, title, description):
706 pull_request = self.__get_pull_request(pull_request)
706 pull_request = self.__get_pull_request(pull_request)
707 if pull_request.is_closed():
707 if pull_request.is_closed():
708 raise ValueError('This pull request is closed')
708 raise ValueError('This pull request is closed')
709 if title:
709 if title:
710 pull_request.title = title
710 pull_request.title = title
711 pull_request.description = description
711 pull_request.description = description
712 pull_request.updated_on = datetime.datetime.now()
712 pull_request.updated_on = datetime.datetime.now()
713 Session().add(pull_request)
713 Session().add(pull_request)
714
714
715 def update_reviewers(self, pull_request, reviewers_ids):
715 def update_reviewers(self, pull_request, reviewers_ids):
716 reviewers_ids = set(reviewers_ids)
716 reviewers_ids = set(reviewers_ids)
717 pull_request = self.__get_pull_request(pull_request)
717 pull_request = self.__get_pull_request(pull_request)
718 current_reviewers = PullRequestReviewers.query()\
718 current_reviewers = PullRequestReviewers.query()\
719 .filter(PullRequestReviewers.pull_request ==
719 .filter(PullRequestReviewers.pull_request ==
720 pull_request).all()
720 pull_request).all()
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
721 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
722
722
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
723 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
724 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
725
725
726 log.debug("Adding %s reviewers", ids_to_add)
726 log.debug("Adding %s reviewers", ids_to_add)
727 log.debug("Removing %s reviewers", ids_to_remove)
727 log.debug("Removing %s reviewers", ids_to_remove)
728 changed = False
728 changed = False
729 for uid in ids_to_add:
729 for uid in ids_to_add:
730 changed = True
730 changed = True
731 _usr = self._get_user(uid)
731 _usr = self._get_user(uid)
732 reviewer = PullRequestReviewers(_usr, pull_request)
732 reviewer = PullRequestReviewers(_usr, pull_request)
733 Session().add(reviewer)
733 Session().add(reviewer)
734
734
735 self.notify_reviewers(pull_request, ids_to_add)
735 self.notify_reviewers(pull_request, ids_to_add)
736
736
737 for uid in ids_to_remove:
737 for uid in ids_to_remove:
738 changed = True
738 changed = True
739 reviewer = PullRequestReviewers.query()\
739 reviewer = PullRequestReviewers.query()\
740 .filter(PullRequestReviewers.user_id == uid,
740 .filter(PullRequestReviewers.user_id == uid,
741 PullRequestReviewers.pull_request == pull_request)\
741 PullRequestReviewers.pull_request == pull_request)\
742 .scalar()
742 .scalar()
743 if reviewer:
743 if reviewer:
744 Session().delete(reviewer)
744 Session().delete(reviewer)
745 if changed:
745 if changed:
746 pull_request.updated_on = datetime.datetime.now()
746 pull_request.updated_on = datetime.datetime.now()
747 Session().add(pull_request)
747 Session().add(pull_request)
748
748
749 return ids_to_add, ids_to_remove
749 return ids_to_add, ids_to_remove
750
750
751 def get_url(self, pull_request):
752 return url('pullrequest_show', repo_name=self.target_repo.repo_name,
753 pull_request_id=self.pull_request_id,
754 qualified=True)
755
751 def notify_reviewers(self, pull_request, reviewers_ids):
756 def notify_reviewers(self, pull_request, reviewers_ids):
752 # notification to reviewers
757 # notification to reviewers
753 if not reviewers_ids:
758 if not reviewers_ids:
754 return
759 return
755
760
756 pull_request_obj = pull_request
761 pull_request_obj = pull_request
757 # get the current participants of this pull request
762 # get the current participants of this pull request
758 recipients = reviewers_ids
763 recipients = reviewers_ids
759 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
764 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
760
765
761 pr_source_repo = pull_request_obj.source_repo
766 pr_source_repo = pull_request_obj.source_repo
762 pr_target_repo = pull_request_obj.target_repo
767 pr_target_repo = pull_request_obj.target_repo
763
768
764 pr_url = h.url(
769 pr_url = h.url(
765 'pullrequest_show',
770 'pullrequest_show',
766 repo_name=pr_target_repo.repo_name,
771 repo_name=pr_target_repo.repo_name,
767 pull_request_id=pull_request_obj.pull_request_id,
772 pull_request_id=pull_request_obj.pull_request_id,
768 qualified=True,)
773 qualified=True,)
769
774
770 # set some variables for email notification
775 # set some variables for email notification
771 pr_target_repo_url = h.url(
776 pr_target_repo_url = h.url(
772 'summary_home',
777 'summary_home',
773 repo_name=pr_target_repo.repo_name,
778 repo_name=pr_target_repo.repo_name,
774 qualified=True)
779 qualified=True)
775
780
776 pr_source_repo_url = h.url(
781 pr_source_repo_url = h.url(
777 'summary_home',
782 'summary_home',
778 repo_name=pr_source_repo.repo_name,
783 repo_name=pr_source_repo.repo_name,
779 qualified=True)
784 qualified=True)
780
785
781 # pull request specifics
786 # pull request specifics
782 pull_request_commits = [
787 pull_request_commits = [
783 (x.raw_id, x.message)
788 (x.raw_id, x.message)
784 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
789 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
785
790
786 kwargs = {
791 kwargs = {
787 'user': pull_request.author,
792 'user': pull_request.author,
788 'pull_request': pull_request_obj,
793 'pull_request': pull_request_obj,
789 'pull_request_commits': pull_request_commits,
794 'pull_request_commits': pull_request_commits,
790
795
791 'pull_request_target_repo': pr_target_repo,
796 'pull_request_target_repo': pr_target_repo,
792 'pull_request_target_repo_url': pr_target_repo_url,
797 'pull_request_target_repo_url': pr_target_repo_url,
793
798
794 'pull_request_source_repo': pr_source_repo,
799 'pull_request_source_repo': pr_source_repo,
795 'pull_request_source_repo_url': pr_source_repo_url,
800 'pull_request_source_repo_url': pr_source_repo_url,
796
801
797 'pull_request_url': pr_url,
802 'pull_request_url': pr_url,
798 }
803 }
799
804
800 # pre-generate the subject for notification itself
805 # pre-generate the subject for notification itself
801 (subject,
806 (subject,
802 _h, _e, # we don't care about those
807 _h, _e, # we don't care about those
803 body_plaintext) = EmailNotificationModel().render_email(
808 body_plaintext) = EmailNotificationModel().render_email(
804 notification_type, **kwargs)
809 notification_type, **kwargs)
805
810
806 # create notification objects, and emails
811 # create notification objects, and emails
807 NotificationModel().create(
812 NotificationModel().create(
808 created_by=pull_request.author,
813 created_by=pull_request.author,
809 notification_subject=subject,
814 notification_subject=subject,
810 notification_body=body_plaintext,
815 notification_body=body_plaintext,
811 notification_type=notification_type,
816 notification_type=notification_type,
812 recipients=recipients,
817 recipients=recipients,
813 email_kwargs=kwargs,
818 email_kwargs=kwargs,
814 )
819 )
815
820
816 def delete(self, pull_request):
821 def delete(self, pull_request):
817 pull_request = self.__get_pull_request(pull_request)
822 pull_request = self.__get_pull_request(pull_request)
818 self._cleanup_merge_workspace(pull_request)
823 self._cleanup_merge_workspace(pull_request)
819 Session().delete(pull_request)
824 Session().delete(pull_request)
820
825
821 def close_pull_request(self, pull_request, user):
826 def close_pull_request(self, pull_request, user):
822 pull_request = self.__get_pull_request(pull_request)
827 pull_request = self.__get_pull_request(pull_request)
823 self._cleanup_merge_workspace(pull_request)
828 self._cleanup_merge_workspace(pull_request)
824 pull_request.status = PullRequest.STATUS_CLOSED
829 pull_request.status = PullRequest.STATUS_CLOSED
825 pull_request.updated_on = datetime.datetime.now()
830 pull_request.updated_on = datetime.datetime.now()
826 Session().add(pull_request)
831 Session().add(pull_request)
827 self._trigger_pull_request_hook(
832 self._trigger_pull_request_hook(
828 pull_request, pull_request.author, 'close')
833 pull_request, pull_request.author, 'close')
829 self._log_action('user_closed_pull_request', user, pull_request)
834 self._log_action('user_closed_pull_request', user, pull_request)
830
835
831 def close_pull_request_with_comment(self, pull_request, user, repo,
836 def close_pull_request_with_comment(self, pull_request, user, repo,
832 message=None):
837 message=None):
833 status = ChangesetStatus.STATUS_REJECTED
838 status = ChangesetStatus.STATUS_REJECTED
834
839
835 if not message:
840 if not message:
836 message = (
841 message = (
837 _('Status change %(transition_icon)s %(status)s') % {
842 _('Status change %(transition_icon)s %(status)s') % {
838 'transition_icon': '>',
843 'transition_icon': '>',
839 'status': ChangesetStatus.get_status_lbl(status)})
844 'status': ChangesetStatus.get_status_lbl(status)})
840
845
841 internal_message = _('Closing with') + ' ' + message
846 internal_message = _('Closing with') + ' ' + message
842
847
843 comm = ChangesetCommentsModel().create(
848 comm = ChangesetCommentsModel().create(
844 text=internal_message,
849 text=internal_message,
845 repo=repo.repo_id,
850 repo=repo.repo_id,
846 user=user.user_id,
851 user=user.user_id,
847 pull_request=pull_request.pull_request_id,
852 pull_request=pull_request.pull_request_id,
848 f_path=None,
853 f_path=None,
849 line_no=None,
854 line_no=None,
850 status_change=ChangesetStatus.get_status_lbl(status),
855 status_change=ChangesetStatus.get_status_lbl(status),
851 closing_pr=True
856 closing_pr=True
852 )
857 )
853
858
854 ChangesetStatusModel().set_status(
859 ChangesetStatusModel().set_status(
855 repo.repo_id,
860 repo.repo_id,
856 status,
861 status,
857 user.user_id,
862 user.user_id,
858 comm,
863 comm,
859 pull_request=pull_request.pull_request_id
864 pull_request=pull_request.pull_request_id
860 )
865 )
861 Session().flush()
866 Session().flush()
862
867
863 PullRequestModel().close_pull_request(
868 PullRequestModel().close_pull_request(
864 pull_request.pull_request_id, user)
869 pull_request.pull_request_id, user)
865
870
866 def merge_status(self, pull_request):
871 def merge_status(self, pull_request):
867 if not self._is_merge_enabled(pull_request):
872 if not self._is_merge_enabled(pull_request):
868 return False, _('Server-side pull request merging is disabled.')
873 return False, _('Server-side pull request merging is disabled.')
869 if pull_request.is_closed():
874 if pull_request.is_closed():
870 return False, _('This pull request is closed.')
875 return False, _('This pull request is closed.')
871 merge_possible, msg = self._check_repo_requirements(
876 merge_possible, msg = self._check_repo_requirements(
872 target=pull_request.target_repo, source=pull_request.source_repo)
877 target=pull_request.target_repo, source=pull_request.source_repo)
873 if not merge_possible:
878 if not merge_possible:
874 return merge_possible, msg
879 return merge_possible, msg
875
880
876 try:
881 try:
877 resp = self._try_merge(pull_request)
882 resp = self._try_merge(pull_request)
878 status = resp.possible, self.merge_status_message(
883 status = resp.possible, self.merge_status_message(
879 resp.failure_reason)
884 resp.failure_reason)
880 except NotImplementedError:
885 except NotImplementedError:
881 status = False, _('Pull request merging is not supported.')
886 status = False, _('Pull request merging is not supported.')
882
887
883 return status
888 return status
884
889
885 def _check_repo_requirements(self, target, source):
890 def _check_repo_requirements(self, target, source):
886 """
891 """
887 Check if `target` and `source` have compatible requirements.
892 Check if `target` and `source` have compatible requirements.
888
893
889 Currently this is just checking for largefiles.
894 Currently this is just checking for largefiles.
890 """
895 """
891 target_has_largefiles = self._has_largefiles(target)
896 target_has_largefiles = self._has_largefiles(target)
892 source_has_largefiles = self._has_largefiles(source)
897 source_has_largefiles = self._has_largefiles(source)
893 merge_possible = True
898 merge_possible = True
894 message = u''
899 message = u''
895
900
896 if target_has_largefiles != source_has_largefiles:
901 if target_has_largefiles != source_has_largefiles:
897 merge_possible = False
902 merge_possible = False
898 if source_has_largefiles:
903 if source_has_largefiles:
899 message = _(
904 message = _(
900 'Target repository large files support is disabled.')
905 'Target repository large files support is disabled.')
901 else:
906 else:
902 message = _(
907 message = _(
903 'Source repository large files support is disabled.')
908 'Source repository large files support is disabled.')
904
909
905 return merge_possible, message
910 return merge_possible, message
906
911
907 def _has_largefiles(self, repo):
912 def _has_largefiles(self, repo):
908 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
913 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
909 'extensions', 'largefiles')
914 'extensions', 'largefiles')
910 return largefiles_ui and largefiles_ui[0].active
915 return largefiles_ui and largefiles_ui[0].active
911
916
912 def _try_merge(self, pull_request):
917 def _try_merge(self, pull_request):
913 """
918 """
914 Try to merge the pull request and return the merge status.
919 Try to merge the pull request and return the merge status.
915 """
920 """
916 log.debug(
921 log.debug(
917 "Trying out if the pull request %s can be merged.",
922 "Trying out if the pull request %s can be merged.",
918 pull_request.pull_request_id)
923 pull_request.pull_request_id)
919 target_vcs = pull_request.target_repo.scm_instance()
924 target_vcs = pull_request.target_repo.scm_instance()
920 target_ref = self._refresh_reference(
925 target_ref = self._refresh_reference(
921 pull_request.target_ref_parts, target_vcs)
926 pull_request.target_ref_parts, target_vcs)
922
927
923 target_locked = pull_request.target_repo.locked
928 target_locked = pull_request.target_repo.locked
924 if target_locked and target_locked[0]:
929 if target_locked and target_locked[0]:
925 log.debug("The target repository is locked.")
930 log.debug("The target repository is locked.")
926 merge_state = MergeResponse(
931 merge_state = MergeResponse(
927 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
932 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
928 elif self._needs_merge_state_refresh(pull_request, target_ref):
933 elif self._needs_merge_state_refresh(pull_request, target_ref):
929 log.debug("Refreshing the merge status of the repository.")
934 log.debug("Refreshing the merge status of the repository.")
930 merge_state = self._refresh_merge_state(
935 merge_state = self._refresh_merge_state(
931 pull_request, target_vcs, target_ref)
936 pull_request, target_vcs, target_ref)
932 else:
937 else:
933 possible = pull_request.\
938 possible = pull_request.\
934 _last_merge_status == MergeFailureReason.NONE
939 _last_merge_status == MergeFailureReason.NONE
935 merge_state = MergeResponse(
940 merge_state = MergeResponse(
936 possible, False, None, pull_request._last_merge_status)
941 possible, False, None, pull_request._last_merge_status)
937 log.debug("Merge response: %s", merge_state)
942 log.debug("Merge response: %s", merge_state)
938 return merge_state
943 return merge_state
939
944
940 def _refresh_reference(self, reference, vcs_repository):
945 def _refresh_reference(self, reference, vcs_repository):
941 if reference.type in ('branch', 'book'):
946 if reference.type in ('branch', 'book'):
942 name_or_id = reference.name
947 name_or_id = reference.name
943 else:
948 else:
944 name_or_id = reference.commit_id
949 name_or_id = reference.commit_id
945 refreshed_commit = vcs_repository.get_commit(name_or_id)
950 refreshed_commit = vcs_repository.get_commit(name_or_id)
946 refreshed_reference = Reference(
951 refreshed_reference = Reference(
947 reference.type, reference.name, refreshed_commit.raw_id)
952 reference.type, reference.name, refreshed_commit.raw_id)
948 return refreshed_reference
953 return refreshed_reference
949
954
950 def _needs_merge_state_refresh(self, pull_request, target_reference):
955 def _needs_merge_state_refresh(self, pull_request, target_reference):
951 return not(
956 return not(
952 pull_request.revisions and
957 pull_request.revisions and
953 pull_request.revisions[0] == pull_request._last_merge_source_rev and
958 pull_request.revisions[0] == pull_request._last_merge_source_rev and
954 target_reference.commit_id == pull_request._last_merge_target_rev)
959 target_reference.commit_id == pull_request._last_merge_target_rev)
955
960
956 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
961 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
957 workspace_id = self._workspace_id(pull_request)
962 workspace_id = self._workspace_id(pull_request)
958 source_vcs = pull_request.source_repo.scm_instance()
963 source_vcs = pull_request.source_repo.scm_instance()
959 use_rebase = self._use_rebase_for_merging(pull_request)
964 use_rebase = self._use_rebase_for_merging(pull_request)
960 merge_state = target_vcs.merge(
965 merge_state = target_vcs.merge(
961 target_reference, source_vcs, pull_request.source_ref_parts,
966 target_reference, source_vcs, pull_request.source_ref_parts,
962 workspace_id, dry_run=True, use_rebase=use_rebase)
967 workspace_id, dry_run=True, use_rebase=use_rebase)
963
968
964 # Do not store the response if there was an unknown error.
969 # Do not store the response if there was an unknown error.
965 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
970 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
966 pull_request._last_merge_source_rev = pull_request.\
971 pull_request._last_merge_source_rev = pull_request.\
967 source_ref_parts.commit_id
972 source_ref_parts.commit_id
968 pull_request._last_merge_target_rev = target_reference.commit_id
973 pull_request._last_merge_target_rev = target_reference.commit_id
969 pull_request._last_merge_status = (
974 pull_request._last_merge_status = (
970 merge_state.failure_reason)
975 merge_state.failure_reason)
971 Session().add(pull_request)
976 Session().add(pull_request)
972 Session().flush()
977 Session().flush()
973
978
974 return merge_state
979 return merge_state
975
980
976 def _workspace_id(self, pull_request):
981 def _workspace_id(self, pull_request):
977 workspace_id = 'pr-%s' % pull_request.pull_request_id
982 workspace_id = 'pr-%s' % pull_request.pull_request_id
978 return workspace_id
983 return workspace_id
979
984
980 def merge_status_message(self, status_code):
985 def merge_status_message(self, status_code):
981 """
986 """
982 Return a human friendly error message for the given merge status code.
987 Return a human friendly error message for the given merge status code.
983 """
988 """
984 return self.MERGE_STATUS_MESSAGES[status_code]
989 return self.MERGE_STATUS_MESSAGES[status_code]
985
990
986 def generate_repo_data(self, repo, commit_id=None, branch=None,
991 def generate_repo_data(self, repo, commit_id=None, branch=None,
987 bookmark=None):
992 bookmark=None):
988 all_refs, selected_ref = \
993 all_refs, selected_ref = \
989 self._get_repo_pullrequest_sources(
994 self._get_repo_pullrequest_sources(
990 repo.scm_instance(), commit_id=commit_id,
995 repo.scm_instance(), commit_id=commit_id,
991 branch=branch, bookmark=bookmark)
996 branch=branch, bookmark=bookmark)
992
997
993 refs_select2 = []
998 refs_select2 = []
994 for element in all_refs:
999 for element in all_refs:
995 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1000 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
996 refs_select2.append({'text': element[1], 'children': children})
1001 refs_select2.append({'text': element[1], 'children': children})
997
1002
998 return {
1003 return {
999 'user': {
1004 'user': {
1000 'user_id': repo.user.user_id,
1005 'user_id': repo.user.user_id,
1001 'username': repo.user.username,
1006 'username': repo.user.username,
1002 'firstname': repo.user.firstname,
1007 'firstname': repo.user.firstname,
1003 'lastname': repo.user.lastname,
1008 'lastname': repo.user.lastname,
1004 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1009 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1005 },
1010 },
1006 'description': h.chop_at_smart(repo.description, '\n'),
1011 'description': h.chop_at_smart(repo.description, '\n'),
1007 'refs': {
1012 'refs': {
1008 'all_refs': all_refs,
1013 'all_refs': all_refs,
1009 'selected_ref': selected_ref,
1014 'selected_ref': selected_ref,
1010 'select2_refs': refs_select2
1015 'select2_refs': refs_select2
1011 }
1016 }
1012 }
1017 }
1013
1018
1014 def generate_pullrequest_title(self, source, source_ref, target):
1019 def generate_pullrequest_title(self, source, source_ref, target):
1015 return '{source}#{at_ref} to {target}'.format(
1020 return '{source}#{at_ref} to {target}'.format(
1016 source=source,
1021 source=source,
1017 at_ref=source_ref,
1022 at_ref=source_ref,
1018 target=target,
1023 target=target,
1019 )
1024 )
1020
1025
1021 def _cleanup_merge_workspace(self, pull_request):
1026 def _cleanup_merge_workspace(self, pull_request):
1022 # Merging related cleanup
1027 # Merging related cleanup
1023 target_scm = pull_request.target_repo.scm_instance()
1028 target_scm = pull_request.target_repo.scm_instance()
1024 workspace_id = 'pr-%s' % pull_request.pull_request_id
1029 workspace_id = 'pr-%s' % pull_request.pull_request_id
1025
1030
1026 try:
1031 try:
1027 target_scm.cleanup_merge_workspace(workspace_id)
1032 target_scm.cleanup_merge_workspace(workspace_id)
1028 except NotImplementedError:
1033 except NotImplementedError:
1029 pass
1034 pass
1030
1035
1031 def _get_repo_pullrequest_sources(
1036 def _get_repo_pullrequest_sources(
1032 self, repo, commit_id=None, branch=None, bookmark=None):
1037 self, repo, commit_id=None, branch=None, bookmark=None):
1033 """
1038 """
1034 Return a structure with repo's interesting commits, suitable for
1039 Return a structure with repo's interesting commits, suitable for
1035 the selectors in pullrequest controller
1040 the selectors in pullrequest controller
1036
1041
1037 :param commit_id: a commit that must be in the list somehow
1042 :param commit_id: a commit that must be in the list somehow
1038 and selected by default
1043 and selected by default
1039 :param branch: a branch that must be in the list and selected
1044 :param branch: a branch that must be in the list and selected
1040 by default - even if closed
1045 by default - even if closed
1041 :param bookmark: a bookmark that must be in the list and selected
1046 :param bookmark: a bookmark that must be in the list and selected
1042 """
1047 """
1043
1048
1044 commit_id = safe_str(commit_id) if commit_id else None
1049 commit_id = safe_str(commit_id) if commit_id else None
1045 branch = safe_str(branch) if branch else None
1050 branch = safe_str(branch) if branch else None
1046 bookmark = safe_str(bookmark) if bookmark else None
1051 bookmark = safe_str(bookmark) if bookmark else None
1047
1052
1048 selected = None
1053 selected = None
1049
1054
1050 # order matters: first source that has commit_id in it will be selected
1055 # order matters: first source that has commit_id in it will be selected
1051 sources = []
1056 sources = []
1052 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1057 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1053 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1058 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1054
1059
1055 if commit_id:
1060 if commit_id:
1056 ref_commit = (h.short_id(commit_id), commit_id)
1061 ref_commit = (h.short_id(commit_id), commit_id)
1057 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1062 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1058
1063
1059 sources.append(
1064 sources.append(
1060 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1065 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1061 )
1066 )
1062
1067
1063 groups = []
1068 groups = []
1064 for group_key, ref_list, group_name, match in sources:
1069 for group_key, ref_list, group_name, match in sources:
1065 group_refs = []
1070 group_refs = []
1066 for ref_name, ref_id in ref_list:
1071 for ref_name, ref_id in ref_list:
1067 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1072 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1068 group_refs.append((ref_key, ref_name))
1073 group_refs.append((ref_key, ref_name))
1069
1074
1070 if not selected:
1075 if not selected:
1071 if set([commit_id, match]) & set([ref_id, ref_name]):
1076 if set([commit_id, match]) & set([ref_id, ref_name]):
1072 selected = ref_key
1077 selected = ref_key
1073
1078
1074 if group_refs:
1079 if group_refs:
1075 groups.append((group_refs, group_name))
1080 groups.append((group_refs, group_name))
1076
1081
1077 if not selected:
1082 if not selected:
1078 ref = commit_id or branch or bookmark
1083 ref = commit_id or branch or bookmark
1079 if ref:
1084 if ref:
1080 raise CommitDoesNotExistError(
1085 raise CommitDoesNotExistError(
1081 'No commit refs could be found matching: %s' % ref)
1086 'No commit refs could be found matching: %s' % ref)
1082 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1087 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1083 selected = 'branch:%s:%s' % (
1088 selected = 'branch:%s:%s' % (
1084 repo.DEFAULT_BRANCH_NAME,
1089 repo.DEFAULT_BRANCH_NAME,
1085 repo.branches[repo.DEFAULT_BRANCH_NAME]
1090 repo.branches[repo.DEFAULT_BRANCH_NAME]
1086 )
1091 )
1087 elif repo.commit_ids:
1092 elif repo.commit_ids:
1088 rev = repo.commit_ids[0]
1093 rev = repo.commit_ids[0]
1089 selected = 'rev:%s:%s' % (rev, rev)
1094 selected = 'rev:%s:%s' % (rev, rev)
1090 else:
1095 else:
1091 raise EmptyRepositoryError()
1096 raise EmptyRepositoryError()
1092 return groups, selected
1097 return groups, selected
1093
1098
1094 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1099 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1095 pull_request = self.__get_pull_request(pull_request)
1100 pull_request = self.__get_pull_request(pull_request)
1096 return self._get_diff_from_pr_or_version(pull_request, context=context)
1101 return self._get_diff_from_pr_or_version(pull_request, context=context)
1097
1102
1098 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1103 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1099 source_repo = pr_or_version.source_repo
1104 source_repo = pr_or_version.source_repo
1100
1105
1101 # we swap org/other ref since we run a simple diff on one repo
1106 # we swap org/other ref since we run a simple diff on one repo
1102 target_ref_id = pr_or_version.target_ref_parts.commit_id
1107 target_ref_id = pr_or_version.target_ref_parts.commit_id
1103 source_ref_id = pr_or_version.source_ref_parts.commit_id
1108 source_ref_id = pr_or_version.source_ref_parts.commit_id
1104 target_commit = source_repo.get_commit(
1109 target_commit = source_repo.get_commit(
1105 commit_id=safe_str(target_ref_id))
1110 commit_id=safe_str(target_ref_id))
1106 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1111 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1107 vcs_repo = source_repo.scm_instance()
1112 vcs_repo = source_repo.scm_instance()
1108
1113
1109 # TODO: johbo: In the context of an update, we cannot reach
1114 # TODO: johbo: In the context of an update, we cannot reach
1110 # the old commit anymore with our normal mechanisms. It needs
1115 # the old commit anymore with our normal mechanisms. It needs
1111 # some sort of special support in the vcs layer to avoid this
1116 # some sort of special support in the vcs layer to avoid this
1112 # workaround.
1117 # workaround.
1113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1118 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1114 vcs_repo.alias == 'git'):
1119 vcs_repo.alias == 'git'):
1115 source_commit.raw_id = safe_str(source_ref_id)
1120 source_commit.raw_id = safe_str(source_ref_id)
1116
1121
1117 log.debug('calculating diff between '
1122 log.debug('calculating diff between '
1118 'source_ref:%s and target_ref:%s for repo `%s`',
1123 'source_ref:%s and target_ref:%s for repo `%s`',
1119 target_ref_id, source_ref_id,
1124 target_ref_id, source_ref_id,
1120 safe_unicode(vcs_repo.path))
1125 safe_unicode(vcs_repo.path))
1121
1126
1122 vcs_diff = vcs_repo.get_diff(
1127 vcs_diff = vcs_repo.get_diff(
1123 commit1=target_commit, commit2=source_commit, context=context)
1128 commit1=target_commit, commit2=source_commit, context=context)
1124 return vcs_diff
1129 return vcs_diff
1125
1130
1126 def _is_merge_enabled(self, pull_request):
1131 def _is_merge_enabled(self, pull_request):
1127 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1128 settings = settings_model.get_general_settings()
1133 settings = settings_model.get_general_settings()
1129 return settings.get('rhodecode_pr_merge_enabled', False)
1134 return settings.get('rhodecode_pr_merge_enabled', False)
1130
1135
1131 def _use_rebase_for_merging(self, pull_request):
1136 def _use_rebase_for_merging(self, pull_request):
1132 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1137 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1133 settings = settings_model.get_general_settings()
1138 settings = settings_model.get_general_settings()
1134 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1139 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1135
1140
1136 def _log_action(self, action, user, pull_request):
1141 def _log_action(self, action, user, pull_request):
1137 action_logger(
1142 action_logger(
1138 user,
1143 user,
1139 '{action}:{pr_id}'.format(
1144 '{action}:{pr_id}'.format(
1140 action=action, pr_id=pull_request.pull_request_id),
1145 action=action, pr_id=pull_request.pull_request_id),
1141 pull_request.target_repo)
1146 pull_request.target_repo)
1142
1147
1143
1148
1144 ChangeTuple = namedtuple('ChangeTuple',
1149 ChangeTuple = namedtuple('ChangeTuple',
1145 ['added', 'common', 'removed'])
1150 ['added', 'common', 'removed'])
1146
1151
1147 FileChangeTuple = namedtuple('FileChangeTuple',
1152 FileChangeTuple = namedtuple('FileChangeTuple',
1148 ['added', 'modified', 'removed'])
1153 ['added', 'modified', 'removed'])
@@ -1,931 +1,934 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Repository model for rhodecode
22 Repository model for rhodecode
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 from datetime import datetime
31 from datetime import datetime
32
32
33 from sqlalchemy.sql import func
33 from sqlalchemy.sql import func
34 from sqlalchemy.sql.expression import true, or_
34 from sqlalchemy.sql.expression import true, or_
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode import events
38 from rhodecode.lib import helpers as h
38 from rhodecode.lib import helpers as h
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
40 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.exceptions import AttachedForksError
41 from rhodecode.lib.exceptions import AttachedForksError
42 from rhodecode.lib.hooks_base import log_delete_repository
42 from rhodecode.lib.hooks_base import log_delete_repository
43 from rhodecode.lib.utils import make_db_config
43 from rhodecode.lib.utils import make_db_config
44 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
47 from rhodecode.lib.vcs.backends import get_backend
47 from rhodecode.lib.vcs.backends import get_backend
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
52 RepoGroup, RepositoryField)
52 RepoGroup, RepositoryField)
53 from rhodecode.model.scm import UserGroupList
53 from rhodecode.model.scm import UserGroupList
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55
55
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoModel(BaseModel):
60 class RepoModel(BaseModel):
61
61
62 cls = Repository
62 cls = Repository
63
63
64 def _get_user_group(self, users_group):
64 def _get_user_group(self, users_group):
65 return self._get_instance(UserGroup, users_group,
65 return self._get_instance(UserGroup, users_group,
66 callback=UserGroup.get_by_group_name)
66 callback=UserGroup.get_by_group_name)
67
67
68 def _get_repo_group(self, repo_group):
68 def _get_repo_group(self, repo_group):
69 return self._get_instance(RepoGroup, repo_group,
69 return self._get_instance(RepoGroup, repo_group,
70 callback=RepoGroup.get_by_group_name)
70 callback=RepoGroup.get_by_group_name)
71
71
72 def _create_default_perms(self, repository, private):
72 def _create_default_perms(self, repository, private):
73 # create default permission
73 # create default permission
74 default = 'repository.read'
74 default = 'repository.read'
75 def_user = User.get_default_user()
75 def_user = User.get_default_user()
76 for p in def_user.user_perms:
76 for p in def_user.user_perms:
77 if p.permission.permission_name.startswith('repository.'):
77 if p.permission.permission_name.startswith('repository.'):
78 default = p.permission.permission_name
78 default = p.permission.permission_name
79 break
79 break
80
80
81 default_perm = 'repository.none' if private else default
81 default_perm = 'repository.none' if private else default
82
82
83 repo_to_perm = UserRepoToPerm()
83 repo_to_perm = UserRepoToPerm()
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
85
85
86 repo_to_perm.repository = repository
86 repo_to_perm.repository = repository
87 repo_to_perm.user_id = def_user.user_id
87 repo_to_perm.user_id = def_user.user_id
88
88
89 return repo_to_perm
89 return repo_to_perm
90
90
91 @LazyProperty
91 @LazyProperty
92 def repos_path(self):
92 def repos_path(self):
93 """
93 """
94 Gets the repositories root path from database
94 Gets the repositories root path from database
95 """
95 """
96 settings_model = VcsSettingsModel(sa=self.sa)
96 settings_model = VcsSettingsModel(sa=self.sa)
97 return settings_model.get_repos_location()
97 return settings_model.get_repos_location()
98
98
99 def get(self, repo_id, cache=False):
99 def get(self, repo_id, cache=False):
100 repo = self.sa.query(Repository) \
100 repo = self.sa.query(Repository) \
101 .filter(Repository.repo_id == repo_id)
101 .filter(Repository.repo_id == repo_id)
102
102
103 if cache:
103 if cache:
104 repo = repo.options(FromCache("sql_cache_short",
104 repo = repo.options(FromCache("sql_cache_short",
105 "get_repo_%s" % repo_id))
105 "get_repo_%s" % repo_id))
106 return repo.scalar()
106 return repo.scalar()
107
107
108 def get_repo(self, repository):
108 def get_repo(self, repository):
109 return self._get_repo(repository)
109 return self._get_repo(repository)
110
110
111 def get_by_repo_name(self, repo_name, cache=False):
111 def get_by_repo_name(self, repo_name, cache=False):
112 repo = self.sa.query(Repository) \
112 repo = self.sa.query(Repository) \
113 .filter(Repository.repo_name == repo_name)
113 .filter(Repository.repo_name == repo_name)
114
114
115 if cache:
115 if cache:
116 repo = repo.options(FromCache("sql_cache_short",
116 repo = repo.options(FromCache("sql_cache_short",
117 "get_repo_%s" % repo_name))
117 "get_repo_%s" % repo_name))
118 return repo.scalar()
118 return repo.scalar()
119
119
120 def _extract_id_from_repo_name(self, repo_name):
120 def _extract_id_from_repo_name(self, repo_name):
121 if repo_name.startswith('/'):
121 if repo_name.startswith('/'):
122 repo_name = repo_name.lstrip('/')
122 repo_name = repo_name.lstrip('/')
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 if by_id_match:
124 if by_id_match:
125 return by_id_match.groups()[0]
125 return by_id_match.groups()[0]
126
126
127 def get_repo_by_id(self, repo_name):
127 def get_repo_by_id(self, repo_name):
128 """
128 """
129 Extracts repo_name by id from special urls.
129 Extracts repo_name by id from special urls.
130 Example url is _11/repo_name
130 Example url is _11/repo_name
131
131
132 :param repo_name:
132 :param repo_name:
133 :return: repo object if matched else None
133 :return: repo object if matched else None
134 """
134 """
135 try:
135 try:
136 _repo_id = self._extract_id_from_repo_name(repo_name)
136 _repo_id = self._extract_id_from_repo_name(repo_name)
137 if _repo_id:
137 if _repo_id:
138 return self.get(_repo_id)
138 return self.get(_repo_id)
139 except Exception:
139 except Exception:
140 log.exception('Failed to extract repo_name from URL')
140 log.exception('Failed to extract repo_name from URL')
141
141
142 return None
142 return None
143
143
144 def get_url(self, repo):
145 return url('summary_home', repo_name=repo.repo_name, qualified=True)
146
144 def get_users(self, name_contains=None, limit=20, only_active=True):
147 def get_users(self, name_contains=None, limit=20, only_active=True):
145 # TODO: mikhail: move this method to the UserModel.
148 # TODO: mikhail: move this method to the UserModel.
146 query = self.sa.query(User)
149 query = self.sa.query(User)
147 if only_active:
150 if only_active:
148 query = query.filter(User.active == true())
151 query = query.filter(User.active == true())
149
152
150 if name_contains:
153 if name_contains:
151 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
154 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
152 query = query.filter(
155 query = query.filter(
153 or_(
156 or_(
154 User.name.ilike(ilike_expression),
157 User.name.ilike(ilike_expression),
155 User.lastname.ilike(ilike_expression),
158 User.lastname.ilike(ilike_expression),
156 User.username.ilike(ilike_expression)
159 User.username.ilike(ilike_expression)
157 )
160 )
158 )
161 )
159 query = query.limit(limit)
162 query = query.limit(limit)
160 users = query.all()
163 users = query.all()
161
164
162 _users = [
165 _users = [
163 {
166 {
164 'id': user.user_id,
167 'id': user.user_id,
165 'first_name': user.name,
168 'first_name': user.name,
166 'last_name': user.lastname,
169 'last_name': user.lastname,
167 'username': user.username,
170 'username': user.username,
168 'icon_link': h.gravatar_url(user.email, 14),
171 'icon_link': h.gravatar_url(user.email, 14),
169 'value_display': h.person(user.email),
172 'value_display': h.person(user.email),
170 'value': user.username,
173 'value': user.username,
171 'value_type': 'user',
174 'value_type': 'user',
172 'active': user.active,
175 'active': user.active,
173 }
176 }
174 for user in users
177 for user in users
175 ]
178 ]
176 return _users
179 return _users
177
180
178 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
181 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
179 # TODO: mikhail: move this method to the UserGroupModel.
182 # TODO: mikhail: move this method to the UserGroupModel.
180 query = self.sa.query(UserGroup)
183 query = self.sa.query(UserGroup)
181 if only_active:
184 if only_active:
182 query = query.filter(UserGroup.users_group_active == true())
185 query = query.filter(UserGroup.users_group_active == true())
183
186
184 if name_contains:
187 if name_contains:
185 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
188 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
186 query = query.filter(
189 query = query.filter(
187 UserGroup.users_group_name.ilike(ilike_expression))\
190 UserGroup.users_group_name.ilike(ilike_expression))\
188 .order_by(func.length(UserGroup.users_group_name))\
191 .order_by(func.length(UserGroup.users_group_name))\
189 .order_by(UserGroup.users_group_name)
192 .order_by(UserGroup.users_group_name)
190
193
191 query = query.limit(limit)
194 query = query.limit(limit)
192 user_groups = query.all()
195 user_groups = query.all()
193 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
196 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
194 user_groups = UserGroupList(user_groups, perm_set=perm_set)
197 user_groups = UserGroupList(user_groups, perm_set=perm_set)
195
198
196 _groups = [
199 _groups = [
197 {
200 {
198 'id': group.users_group_id,
201 'id': group.users_group_id,
199 # TODO: marcink figure out a way to generate the url for the
202 # TODO: marcink figure out a way to generate the url for the
200 # icon
203 # icon
201 'icon_link': '',
204 'icon_link': '',
202 'value_display': 'Group: %s (%d members)' % (
205 'value_display': 'Group: %s (%d members)' % (
203 group.users_group_name, len(group.members),),
206 group.users_group_name, len(group.members),),
204 'value': group.users_group_name,
207 'value': group.users_group_name,
205 'value_type': 'user_group',
208 'value_type': 'user_group',
206 'active': group.users_group_active,
209 'active': group.users_group_active,
207 }
210 }
208 for group in user_groups
211 for group in user_groups
209 ]
212 ]
210 return _groups
213 return _groups
211
214
212 @classmethod
215 @classmethod
213 def update_repoinfo(cls, repositories=None):
216 def update_repoinfo(cls, repositories=None):
214 if not repositories:
217 if not repositories:
215 repositories = Repository.getAll()
218 repositories = Repository.getAll()
216 for repo in repositories:
219 for repo in repositories:
217 repo.update_commit_cache()
220 repo.update_commit_cache()
218
221
219 def get_repos_as_dict(self, repo_list=None, admin=False,
222 def get_repos_as_dict(self, repo_list=None, admin=False,
220 super_user_actions=False):
223 super_user_actions=False):
221
224
222 from rhodecode.lib.utils import PartialRenderer
225 from rhodecode.lib.utils import PartialRenderer
223 _render = PartialRenderer('data_table/_dt_elements.html')
226 _render = PartialRenderer('data_table/_dt_elements.html')
224 c = _render.c
227 c = _render.c
225
228
226 def quick_menu(repo_name):
229 def quick_menu(repo_name):
227 return _render('quick_menu', repo_name)
230 return _render('quick_menu', repo_name)
228
231
229 def repo_lnk(name, rtype, rstate, private, fork_of):
232 def repo_lnk(name, rtype, rstate, private, fork_of):
230 return _render('repo_name', name, rtype, rstate, private, fork_of,
233 return _render('repo_name', name, rtype, rstate, private, fork_of,
231 short_name=not admin, admin=False)
234 short_name=not admin, admin=False)
232
235
233 def last_change(last_change):
236 def last_change(last_change):
234 return _render("last_change", last_change)
237 return _render("last_change", last_change)
235
238
236 def rss_lnk(repo_name):
239 def rss_lnk(repo_name):
237 return _render("rss", repo_name)
240 return _render("rss", repo_name)
238
241
239 def atom_lnk(repo_name):
242 def atom_lnk(repo_name):
240 return _render("atom", repo_name)
243 return _render("atom", repo_name)
241
244
242 def last_rev(repo_name, cs_cache):
245 def last_rev(repo_name, cs_cache):
243 return _render('revision', repo_name, cs_cache.get('revision'),
246 return _render('revision', repo_name, cs_cache.get('revision'),
244 cs_cache.get('raw_id'), cs_cache.get('author'),
247 cs_cache.get('raw_id'), cs_cache.get('author'),
245 cs_cache.get('message'))
248 cs_cache.get('message'))
246
249
247 def desc(desc):
250 def desc(desc):
248 if c.visual.stylify_metatags:
251 if c.visual.stylify_metatags:
249 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
252 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
250 else:
253 else:
251 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
254 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
252
255
253 def state(repo_state):
256 def state(repo_state):
254 return _render("repo_state", repo_state)
257 return _render("repo_state", repo_state)
255
258
256 def repo_actions(repo_name):
259 def repo_actions(repo_name):
257 return _render('repo_actions', repo_name, super_user_actions)
260 return _render('repo_actions', repo_name, super_user_actions)
258
261
259 def user_profile(username):
262 def user_profile(username):
260 return _render('user_profile', username)
263 return _render('user_profile', username)
261
264
262 repos_data = []
265 repos_data = []
263 for repo in repo_list:
266 for repo in repo_list:
264 cs_cache = repo.changeset_cache
267 cs_cache = repo.changeset_cache
265 row = {
268 row = {
266 "menu": quick_menu(repo.repo_name),
269 "menu": quick_menu(repo.repo_name),
267
270
268 "name": repo_lnk(repo.repo_name, repo.repo_type,
271 "name": repo_lnk(repo.repo_name, repo.repo_type,
269 repo.repo_state, repo.private, repo.fork),
272 repo.repo_state, repo.private, repo.fork),
270 "name_raw": repo.repo_name.lower(),
273 "name_raw": repo.repo_name.lower(),
271
274
272 "last_change": last_change(repo.last_db_change),
275 "last_change": last_change(repo.last_db_change),
273 "last_change_raw": datetime_to_time(repo.last_db_change),
276 "last_change_raw": datetime_to_time(repo.last_db_change),
274
277
275 "last_changeset": last_rev(repo.repo_name, cs_cache),
278 "last_changeset": last_rev(repo.repo_name, cs_cache),
276 "last_changeset_raw": cs_cache.get('revision'),
279 "last_changeset_raw": cs_cache.get('revision'),
277
280
278 "desc": desc(repo.description),
281 "desc": desc(repo.description),
279 "owner": user_profile(repo.user.username),
282 "owner": user_profile(repo.user.username),
280
283
281 "state": state(repo.repo_state),
284 "state": state(repo.repo_state),
282 "rss": rss_lnk(repo.repo_name),
285 "rss": rss_lnk(repo.repo_name),
283
286
284 "atom": atom_lnk(repo.repo_name),
287 "atom": atom_lnk(repo.repo_name),
285 }
288 }
286 if admin:
289 if admin:
287 row.update({
290 row.update({
288 "action": repo_actions(repo.repo_name),
291 "action": repo_actions(repo.repo_name),
289 })
292 })
290 repos_data.append(row)
293 repos_data.append(row)
291
294
292 return repos_data
295 return repos_data
293
296
294 def _get_defaults(self, repo_name):
297 def _get_defaults(self, repo_name):
295 """
298 """
296 Gets information about repository, and returns a dict for
299 Gets information about repository, and returns a dict for
297 usage in forms
300 usage in forms
298
301
299 :param repo_name:
302 :param repo_name:
300 """
303 """
301
304
302 repo_info = Repository.get_by_repo_name(repo_name)
305 repo_info = Repository.get_by_repo_name(repo_name)
303
306
304 if repo_info is None:
307 if repo_info is None:
305 return None
308 return None
306
309
307 defaults = repo_info.get_dict()
310 defaults = repo_info.get_dict()
308 defaults['repo_name'] = repo_info.just_name
311 defaults['repo_name'] = repo_info.just_name
309
312
310 groups = repo_info.groups_with_parents
313 groups = repo_info.groups_with_parents
311 parent_group = groups[-1] if groups else None
314 parent_group = groups[-1] if groups else None
312
315
313 # we use -1 as this is how in HTML, we mark an empty group
316 # we use -1 as this is how in HTML, we mark an empty group
314 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
317 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
315
318
316 keys_to_process = (
319 keys_to_process = (
317 {'k': 'repo_type', 'strip': False},
320 {'k': 'repo_type', 'strip': False},
318 {'k': 'repo_enable_downloads', 'strip': True},
321 {'k': 'repo_enable_downloads', 'strip': True},
319 {'k': 'repo_description', 'strip': True},
322 {'k': 'repo_description', 'strip': True},
320 {'k': 'repo_enable_locking', 'strip': True},
323 {'k': 'repo_enable_locking', 'strip': True},
321 {'k': 'repo_landing_rev', 'strip': True},
324 {'k': 'repo_landing_rev', 'strip': True},
322 {'k': 'clone_uri', 'strip': False},
325 {'k': 'clone_uri', 'strip': False},
323 {'k': 'repo_private', 'strip': True},
326 {'k': 'repo_private', 'strip': True},
324 {'k': 'repo_enable_statistics', 'strip': True}
327 {'k': 'repo_enable_statistics', 'strip': True}
325 )
328 )
326
329
327 for item in keys_to_process:
330 for item in keys_to_process:
328 attr = item['k']
331 attr = item['k']
329 if item['strip']:
332 if item['strip']:
330 attr = remove_prefix(item['k'], 'repo_')
333 attr = remove_prefix(item['k'], 'repo_')
331
334
332 val = defaults[attr]
335 val = defaults[attr]
333 if item['k'] == 'repo_landing_rev':
336 if item['k'] == 'repo_landing_rev':
334 val = ':'.join(defaults[attr])
337 val = ':'.join(defaults[attr])
335 defaults[item['k']] = val
338 defaults[item['k']] = val
336 if item['k'] == 'clone_uri':
339 if item['k'] == 'clone_uri':
337 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
340 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
338
341
339 # fill owner
342 # fill owner
340 if repo_info.user:
343 if repo_info.user:
341 defaults.update({'user': repo_info.user.username})
344 defaults.update({'user': repo_info.user.username})
342 else:
345 else:
343 replacement_user = User.get_first_super_admin().username
346 replacement_user = User.get_first_super_admin().username
344 defaults.update({'user': replacement_user})
347 defaults.update({'user': replacement_user})
345
348
346 # fill repository users
349 # fill repository users
347 for p in repo_info.repo_to_perm:
350 for p in repo_info.repo_to_perm:
348 defaults.update({'u_perm_%s' % p.user.user_id:
351 defaults.update({'u_perm_%s' % p.user.user_id:
349 p.permission.permission_name})
352 p.permission.permission_name})
350
353
351 # fill repository groups
354 # fill repository groups
352 for p in repo_info.users_group_to_perm:
355 for p in repo_info.users_group_to_perm:
353 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
356 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
354 p.permission.permission_name})
357 p.permission.permission_name})
355
358
356 return defaults
359 return defaults
357
360
358 def update(self, repo, **kwargs):
361 def update(self, repo, **kwargs):
359 try:
362 try:
360 cur_repo = self._get_repo(repo)
363 cur_repo = self._get_repo(repo)
361 source_repo_name = cur_repo.repo_name
364 source_repo_name = cur_repo.repo_name
362 if 'user' in kwargs:
365 if 'user' in kwargs:
363 cur_repo.user = User.get_by_username(kwargs['user'])
366 cur_repo.user = User.get_by_username(kwargs['user'])
364
367
365 if 'repo_group' in kwargs:
368 if 'repo_group' in kwargs:
366 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
369 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
367 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
370 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
368
371
369 update_keys = [
372 update_keys = [
370 (1, 'repo_enable_downloads'),
373 (1, 'repo_enable_downloads'),
371 (1, 'repo_description'),
374 (1, 'repo_description'),
372 (1, 'repo_enable_locking'),
375 (1, 'repo_enable_locking'),
373 (1, 'repo_landing_rev'),
376 (1, 'repo_landing_rev'),
374 (1, 'repo_private'),
377 (1, 'repo_private'),
375 (1, 'repo_enable_statistics'),
378 (1, 'repo_enable_statistics'),
376 (0, 'clone_uri'),
379 (0, 'clone_uri'),
377 (0, 'fork_id')
380 (0, 'fork_id')
378 ]
381 ]
379 for strip, k in update_keys:
382 for strip, k in update_keys:
380 if k in kwargs:
383 if k in kwargs:
381 val = kwargs[k]
384 val = kwargs[k]
382 if strip:
385 if strip:
383 k = remove_prefix(k, 'repo_')
386 k = remove_prefix(k, 'repo_')
384 if k == 'clone_uri':
387 if k == 'clone_uri':
385 from rhodecode.model.validators import Missing
388 from rhodecode.model.validators import Missing
386 _change = kwargs.get('clone_uri_change')
389 _change = kwargs.get('clone_uri_change')
387 if _change in [Missing, 'OLD']:
390 if _change in [Missing, 'OLD']:
388 # we don't change the value, so use original one
391 # we don't change the value, so use original one
389 val = cur_repo.clone_uri
392 val = cur_repo.clone_uri
390
393
391 setattr(cur_repo, k, val)
394 setattr(cur_repo, k, val)
392
395
393 new_name = cur_repo.get_new_name(kwargs['repo_name'])
396 new_name = cur_repo.get_new_name(kwargs['repo_name'])
394 cur_repo.repo_name = new_name
397 cur_repo.repo_name = new_name
395
398
396 # if private flag is set, reset default permission to NONE
399 # if private flag is set, reset default permission to NONE
397 if kwargs.get('repo_private'):
400 if kwargs.get('repo_private'):
398 EMPTY_PERM = 'repository.none'
401 EMPTY_PERM = 'repository.none'
399 RepoModel().grant_user_permission(
402 RepoModel().grant_user_permission(
400 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
403 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
401 )
404 )
402
405
403 # handle extra fields
406 # handle extra fields
404 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
407 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
405 kwargs):
408 kwargs):
406 k = RepositoryField.un_prefix_key(field)
409 k = RepositoryField.un_prefix_key(field)
407 ex_field = RepositoryField.get_by_key_name(
410 ex_field = RepositoryField.get_by_key_name(
408 key=k, repo=cur_repo)
411 key=k, repo=cur_repo)
409 if ex_field:
412 if ex_field:
410 ex_field.field_value = kwargs[field]
413 ex_field.field_value = kwargs[field]
411 self.sa.add(ex_field)
414 self.sa.add(ex_field)
412 self.sa.add(cur_repo)
415 self.sa.add(cur_repo)
413
416
414 if source_repo_name != new_name:
417 if source_repo_name != new_name:
415 # rename repository
418 # rename repository
416 self._rename_filesystem_repo(
419 self._rename_filesystem_repo(
417 old=source_repo_name, new=new_name)
420 old=source_repo_name, new=new_name)
418
421
419 return cur_repo
422 return cur_repo
420 except Exception:
423 except Exception:
421 log.error(traceback.format_exc())
424 log.error(traceback.format_exc())
422 raise
425 raise
423
426
424 def _create_repo(self, repo_name, repo_type, description, owner,
427 def _create_repo(self, repo_name, repo_type, description, owner,
425 private=False, clone_uri=None, repo_group=None,
428 private=False, clone_uri=None, repo_group=None,
426 landing_rev='rev:tip', fork_of=None,
429 landing_rev='rev:tip', fork_of=None,
427 copy_fork_permissions=False, enable_statistics=False,
430 copy_fork_permissions=False, enable_statistics=False,
428 enable_locking=False, enable_downloads=False,
431 enable_locking=False, enable_downloads=False,
429 copy_group_permissions=False,
432 copy_group_permissions=False,
430 state=Repository.STATE_PENDING):
433 state=Repository.STATE_PENDING):
431 """
434 """
432 Create repository inside database with PENDING state, this should be
435 Create repository inside database with PENDING state, this should be
433 only executed by create() repo. With exception of importing existing
436 only executed by create() repo. With exception of importing existing
434 repos
437 repos
435 """
438 """
436 from rhodecode.model.scm import ScmModel
439 from rhodecode.model.scm import ScmModel
437
440
438 owner = self._get_user(owner)
441 owner = self._get_user(owner)
439 fork_of = self._get_repo(fork_of)
442 fork_of = self._get_repo(fork_of)
440 repo_group = self._get_repo_group(safe_int(repo_group))
443 repo_group = self._get_repo_group(safe_int(repo_group))
441
444
442 try:
445 try:
443 repo_name = safe_unicode(repo_name)
446 repo_name = safe_unicode(repo_name)
444 description = safe_unicode(description)
447 description = safe_unicode(description)
445 # repo name is just a name of repository
448 # repo name is just a name of repository
446 # while repo_name_full is a full qualified name that is combined
449 # while repo_name_full is a full qualified name that is combined
447 # with name and path of group
450 # with name and path of group
448 repo_name_full = repo_name
451 repo_name_full = repo_name
449 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
452 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
450
453
451 new_repo = Repository()
454 new_repo = Repository()
452 new_repo.repo_state = state
455 new_repo.repo_state = state
453 new_repo.enable_statistics = False
456 new_repo.enable_statistics = False
454 new_repo.repo_name = repo_name_full
457 new_repo.repo_name = repo_name_full
455 new_repo.repo_type = repo_type
458 new_repo.repo_type = repo_type
456 new_repo.user = owner
459 new_repo.user = owner
457 new_repo.group = repo_group
460 new_repo.group = repo_group
458 new_repo.description = description or repo_name
461 new_repo.description = description or repo_name
459 new_repo.private = private
462 new_repo.private = private
460 new_repo.clone_uri = clone_uri
463 new_repo.clone_uri = clone_uri
461 new_repo.landing_rev = landing_rev
464 new_repo.landing_rev = landing_rev
462
465
463 new_repo.enable_statistics = enable_statistics
466 new_repo.enable_statistics = enable_statistics
464 new_repo.enable_locking = enable_locking
467 new_repo.enable_locking = enable_locking
465 new_repo.enable_downloads = enable_downloads
468 new_repo.enable_downloads = enable_downloads
466
469
467 if repo_group:
470 if repo_group:
468 new_repo.enable_locking = repo_group.enable_locking
471 new_repo.enable_locking = repo_group.enable_locking
469
472
470 if fork_of:
473 if fork_of:
471 parent_repo = fork_of
474 parent_repo = fork_of
472 new_repo.fork = parent_repo
475 new_repo.fork = parent_repo
473
476
474 events.trigger(events.RepoPreCreateEvent(new_repo))
477 events.trigger(events.RepoPreCreateEvent(new_repo))
475
478
476 self.sa.add(new_repo)
479 self.sa.add(new_repo)
477
480
478 EMPTY_PERM = 'repository.none'
481 EMPTY_PERM = 'repository.none'
479 if fork_of and copy_fork_permissions:
482 if fork_of and copy_fork_permissions:
480 repo = fork_of
483 repo = fork_of
481 user_perms = UserRepoToPerm.query() \
484 user_perms = UserRepoToPerm.query() \
482 .filter(UserRepoToPerm.repository == repo).all()
485 .filter(UserRepoToPerm.repository == repo).all()
483 group_perms = UserGroupRepoToPerm.query() \
486 group_perms = UserGroupRepoToPerm.query() \
484 .filter(UserGroupRepoToPerm.repository == repo).all()
487 .filter(UserGroupRepoToPerm.repository == repo).all()
485
488
486 for perm in user_perms:
489 for perm in user_perms:
487 UserRepoToPerm.create(
490 UserRepoToPerm.create(
488 perm.user, new_repo, perm.permission)
491 perm.user, new_repo, perm.permission)
489
492
490 for perm in group_perms:
493 for perm in group_perms:
491 UserGroupRepoToPerm.create(
494 UserGroupRepoToPerm.create(
492 perm.users_group, new_repo, perm.permission)
495 perm.users_group, new_repo, perm.permission)
493 # in case we copy permissions and also set this repo to private
496 # in case we copy permissions and also set this repo to private
494 # override the default user permission to make it a private
497 # override the default user permission to make it a private
495 # repo
498 # repo
496 if private:
499 if private:
497 RepoModel(self.sa).grant_user_permission(
500 RepoModel(self.sa).grant_user_permission(
498 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
501 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
499
502
500 elif repo_group and copy_group_permissions:
503 elif repo_group and copy_group_permissions:
501 user_perms = UserRepoGroupToPerm.query() \
504 user_perms = UserRepoGroupToPerm.query() \
502 .filter(UserRepoGroupToPerm.group == repo_group).all()
505 .filter(UserRepoGroupToPerm.group == repo_group).all()
503
506
504 group_perms = UserGroupRepoGroupToPerm.query() \
507 group_perms = UserGroupRepoGroupToPerm.query() \
505 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
508 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
506
509
507 for perm in user_perms:
510 for perm in user_perms:
508 perm_name = perm.permission.permission_name.replace(
511 perm_name = perm.permission.permission_name.replace(
509 'group.', 'repository.')
512 'group.', 'repository.')
510 perm_obj = Permission.get_by_key(perm_name)
513 perm_obj = Permission.get_by_key(perm_name)
511 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
514 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
512
515
513 for perm in group_perms:
516 for perm in group_perms:
514 perm_name = perm.permission.permission_name.replace(
517 perm_name = perm.permission.permission_name.replace(
515 'group.', 'repository.')
518 'group.', 'repository.')
516 perm_obj = Permission.get_by_key(perm_name)
519 perm_obj = Permission.get_by_key(perm_name)
517 UserGroupRepoToPerm.create(
520 UserGroupRepoToPerm.create(
518 perm.users_group, new_repo, perm_obj)
521 perm.users_group, new_repo, perm_obj)
519
522
520 if private:
523 if private:
521 RepoModel(self.sa).grant_user_permission(
524 RepoModel(self.sa).grant_user_permission(
522 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
525 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
523
526
524 else:
527 else:
525 perm_obj = self._create_default_perms(new_repo, private)
528 perm_obj = self._create_default_perms(new_repo, private)
526 self.sa.add(perm_obj)
529 self.sa.add(perm_obj)
527
530
528 # now automatically start following this repository as owner
531 # now automatically start following this repository as owner
529 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
532 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
530 owner.user_id)
533 owner.user_id)
531
534
532 # we need to flush here, in order to check if database won't
535 # we need to flush here, in order to check if database won't
533 # throw any exceptions, create filesystem dirs at the very end
536 # throw any exceptions, create filesystem dirs at the very end
534 self.sa.flush()
537 self.sa.flush()
535 events.trigger(events.RepoCreatedEvent(new_repo))
538 events.trigger(events.RepoCreatedEvent(new_repo))
536 return new_repo
539 return new_repo
537
540
538 except Exception:
541 except Exception:
539 log.error(traceback.format_exc())
542 log.error(traceback.format_exc())
540 raise
543 raise
541
544
542 def create(self, form_data, cur_user):
545 def create(self, form_data, cur_user):
543 """
546 """
544 Create repository using celery tasks
547 Create repository using celery tasks
545
548
546 :param form_data:
549 :param form_data:
547 :param cur_user:
550 :param cur_user:
548 """
551 """
549 from rhodecode.lib.celerylib import tasks, run_task
552 from rhodecode.lib.celerylib import tasks, run_task
550 return run_task(tasks.create_repo, form_data, cur_user)
553 return run_task(tasks.create_repo, form_data, cur_user)
551
554
552 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
555 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
553 perm_deletions=None, check_perms=True,
556 perm_deletions=None, check_perms=True,
554 cur_user=None):
557 cur_user=None):
555 if not perm_additions:
558 if not perm_additions:
556 perm_additions = []
559 perm_additions = []
557 if not perm_updates:
560 if not perm_updates:
558 perm_updates = []
561 perm_updates = []
559 if not perm_deletions:
562 if not perm_deletions:
560 perm_deletions = []
563 perm_deletions = []
561
564
562 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
565 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
563
566
564 # update permissions
567 # update permissions
565 for member_id, perm, member_type in perm_updates:
568 for member_id, perm, member_type in perm_updates:
566 member_id = int(member_id)
569 member_id = int(member_id)
567 if member_type == 'user':
570 if member_type == 'user':
568 # this updates also current one if found
571 # this updates also current one if found
569 self.grant_user_permission(
572 self.grant_user_permission(
570 repo=repo, user=member_id, perm=perm)
573 repo=repo, user=member_id, perm=perm)
571 else: # set for user group
574 else: # set for user group
572 # check if we have permissions to alter this usergroup
575 # check if we have permissions to alter this usergroup
573 member_name = UserGroup.get(member_id).users_group_name
576 member_name = UserGroup.get(member_id).users_group_name
574 if not check_perms or HasUserGroupPermissionAny(
577 if not check_perms or HasUserGroupPermissionAny(
575 *req_perms)(member_name, user=cur_user):
578 *req_perms)(member_name, user=cur_user):
576 self.grant_user_group_permission(
579 self.grant_user_group_permission(
577 repo=repo, group_name=member_id, perm=perm)
580 repo=repo, group_name=member_id, perm=perm)
578
581
579 # set new permissions
582 # set new permissions
580 for member_id, perm, member_type in perm_additions:
583 for member_id, perm, member_type in perm_additions:
581 member_id = int(member_id)
584 member_id = int(member_id)
582 if member_type == 'user':
585 if member_type == 'user':
583 self.grant_user_permission(
586 self.grant_user_permission(
584 repo=repo, user=member_id, perm=perm)
587 repo=repo, user=member_id, perm=perm)
585 else: # set for user group
588 else: # set for user group
586 # check if we have permissions to alter this usergroup
589 # check if we have permissions to alter this usergroup
587 member_name = UserGroup.get(member_id).users_group_name
590 member_name = UserGroup.get(member_id).users_group_name
588 if not check_perms or HasUserGroupPermissionAny(
591 if not check_perms or HasUserGroupPermissionAny(
589 *req_perms)(member_name, user=cur_user):
592 *req_perms)(member_name, user=cur_user):
590 self.grant_user_group_permission(
593 self.grant_user_group_permission(
591 repo=repo, group_name=member_id, perm=perm)
594 repo=repo, group_name=member_id, perm=perm)
592
595
593 # delete permissions
596 # delete permissions
594 for member_id, perm, member_type in perm_deletions:
597 for member_id, perm, member_type in perm_deletions:
595 member_id = int(member_id)
598 member_id = int(member_id)
596 if member_type == 'user':
599 if member_type == 'user':
597 self.revoke_user_permission(repo=repo, user=member_id)
600 self.revoke_user_permission(repo=repo, user=member_id)
598 else: # set for user group
601 else: # set for user group
599 # check if we have permissions to alter this usergroup
602 # check if we have permissions to alter this usergroup
600 member_name = UserGroup.get(member_id).users_group_name
603 member_name = UserGroup.get(member_id).users_group_name
601 if not check_perms or HasUserGroupPermissionAny(
604 if not check_perms or HasUserGroupPermissionAny(
602 *req_perms)(member_name, user=cur_user):
605 *req_perms)(member_name, user=cur_user):
603 self.revoke_user_group_permission(
606 self.revoke_user_group_permission(
604 repo=repo, group_name=member_id)
607 repo=repo, group_name=member_id)
605
608
606 def create_fork(self, form_data, cur_user):
609 def create_fork(self, form_data, cur_user):
607 """
610 """
608 Simple wrapper into executing celery task for fork creation
611 Simple wrapper into executing celery task for fork creation
609
612
610 :param form_data:
613 :param form_data:
611 :param cur_user:
614 :param cur_user:
612 """
615 """
613 from rhodecode.lib.celerylib import tasks, run_task
616 from rhodecode.lib.celerylib import tasks, run_task
614 return run_task(tasks.create_repo_fork, form_data, cur_user)
617 return run_task(tasks.create_repo_fork, form_data, cur_user)
615
618
616 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
619 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
617 """
620 """
618 Delete given repository, forks parameter defines what do do with
621 Delete given repository, forks parameter defines what do do with
619 attached forks. Throws AttachedForksError if deleted repo has attached
622 attached forks. Throws AttachedForksError if deleted repo has attached
620 forks
623 forks
621
624
622 :param repo:
625 :param repo:
623 :param forks: str 'delete' or 'detach'
626 :param forks: str 'delete' or 'detach'
624 :param fs_remove: remove(archive) repo from filesystem
627 :param fs_remove: remove(archive) repo from filesystem
625 """
628 """
626 if not cur_user:
629 if not cur_user:
627 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
630 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
628 repo = self._get_repo(repo)
631 repo = self._get_repo(repo)
629 if repo:
632 if repo:
630 if forks == 'detach':
633 if forks == 'detach':
631 for r in repo.forks:
634 for r in repo.forks:
632 r.fork = None
635 r.fork = None
633 self.sa.add(r)
636 self.sa.add(r)
634 elif forks == 'delete':
637 elif forks == 'delete':
635 for r in repo.forks:
638 for r in repo.forks:
636 self.delete(r, forks='delete')
639 self.delete(r, forks='delete')
637 elif [f for f in repo.forks]:
640 elif [f for f in repo.forks]:
638 raise AttachedForksError()
641 raise AttachedForksError()
639
642
640 old_repo_dict = repo.get_dict()
643 old_repo_dict = repo.get_dict()
641 events.trigger(events.RepoPreDeleteEvent(repo))
644 events.trigger(events.RepoPreDeleteEvent(repo))
642 try:
645 try:
643 self.sa.delete(repo)
646 self.sa.delete(repo)
644 if fs_remove:
647 if fs_remove:
645 self._delete_filesystem_repo(repo)
648 self._delete_filesystem_repo(repo)
646 else:
649 else:
647 log.debug('skipping removal from filesystem')
650 log.debug('skipping removal from filesystem')
648 old_repo_dict.update({
651 old_repo_dict.update({
649 'deleted_by': cur_user,
652 'deleted_by': cur_user,
650 'deleted_on': time.time(),
653 'deleted_on': time.time(),
651 })
654 })
652 log_delete_repository(**old_repo_dict)
655 log_delete_repository(**old_repo_dict)
653 events.trigger(events.RepoDeletedEvent(repo))
656 events.trigger(events.RepoDeletedEvent(repo))
654 except Exception:
657 except Exception:
655 log.error(traceback.format_exc())
658 log.error(traceback.format_exc())
656 raise
659 raise
657
660
658 def grant_user_permission(self, repo, user, perm):
661 def grant_user_permission(self, repo, user, perm):
659 """
662 """
660 Grant permission for user on given repository, or update existing one
663 Grant permission for user on given repository, or update existing one
661 if found
664 if found
662
665
663 :param repo: Instance of Repository, repository_id, or repository name
666 :param repo: Instance of Repository, repository_id, or repository name
664 :param user: Instance of User, user_id or username
667 :param user: Instance of User, user_id or username
665 :param perm: Instance of Permission, or permission_name
668 :param perm: Instance of Permission, or permission_name
666 """
669 """
667 user = self._get_user(user)
670 user = self._get_user(user)
668 repo = self._get_repo(repo)
671 repo = self._get_repo(repo)
669 permission = self._get_perm(perm)
672 permission = self._get_perm(perm)
670
673
671 # check if we have that permission already
674 # check if we have that permission already
672 obj = self.sa.query(UserRepoToPerm) \
675 obj = self.sa.query(UserRepoToPerm) \
673 .filter(UserRepoToPerm.user == user) \
676 .filter(UserRepoToPerm.user == user) \
674 .filter(UserRepoToPerm.repository == repo) \
677 .filter(UserRepoToPerm.repository == repo) \
675 .scalar()
678 .scalar()
676 if obj is None:
679 if obj is None:
677 # create new !
680 # create new !
678 obj = UserRepoToPerm()
681 obj = UserRepoToPerm()
679 obj.repository = repo
682 obj.repository = repo
680 obj.user = user
683 obj.user = user
681 obj.permission = permission
684 obj.permission = permission
682 self.sa.add(obj)
685 self.sa.add(obj)
683 log.debug('Granted perm %s to %s on %s', perm, user, repo)
686 log.debug('Granted perm %s to %s on %s', perm, user, repo)
684 action_logger_generic(
687 action_logger_generic(
685 'granted permission: {} to user: {} on repo: {}'.format(
688 'granted permission: {} to user: {} on repo: {}'.format(
686 perm, user, repo), namespace='security.repo')
689 perm, user, repo), namespace='security.repo')
687 return obj
690 return obj
688
691
689 def revoke_user_permission(self, repo, user):
692 def revoke_user_permission(self, repo, user):
690 """
693 """
691 Revoke permission for user on given repository
694 Revoke permission for user on given repository
692
695
693 :param repo: Instance of Repository, repository_id, or repository name
696 :param repo: Instance of Repository, repository_id, or repository name
694 :param user: Instance of User, user_id or username
697 :param user: Instance of User, user_id or username
695 """
698 """
696
699
697 user = self._get_user(user)
700 user = self._get_user(user)
698 repo = self._get_repo(repo)
701 repo = self._get_repo(repo)
699
702
700 obj = self.sa.query(UserRepoToPerm) \
703 obj = self.sa.query(UserRepoToPerm) \
701 .filter(UserRepoToPerm.repository == repo) \
704 .filter(UserRepoToPerm.repository == repo) \
702 .filter(UserRepoToPerm.user == user) \
705 .filter(UserRepoToPerm.user == user) \
703 .scalar()
706 .scalar()
704 if obj:
707 if obj:
705 self.sa.delete(obj)
708 self.sa.delete(obj)
706 log.debug('Revoked perm on %s on %s', repo, user)
709 log.debug('Revoked perm on %s on %s', repo, user)
707 action_logger_generic(
710 action_logger_generic(
708 'revoked permission from user: {} on repo: {}'.format(
711 'revoked permission from user: {} on repo: {}'.format(
709 user, repo), namespace='security.repo')
712 user, repo), namespace='security.repo')
710
713
711 def grant_user_group_permission(self, repo, group_name, perm):
714 def grant_user_group_permission(self, repo, group_name, perm):
712 """
715 """
713 Grant permission for user group on given repository, or update
716 Grant permission for user group on given repository, or update
714 existing one if found
717 existing one if found
715
718
716 :param repo: Instance of Repository, repository_id, or repository name
719 :param repo: Instance of Repository, repository_id, or repository name
717 :param group_name: Instance of UserGroup, users_group_id,
720 :param group_name: Instance of UserGroup, users_group_id,
718 or user group name
721 or user group name
719 :param perm: Instance of Permission, or permission_name
722 :param perm: Instance of Permission, or permission_name
720 """
723 """
721 repo = self._get_repo(repo)
724 repo = self._get_repo(repo)
722 group_name = self._get_user_group(group_name)
725 group_name = self._get_user_group(group_name)
723 permission = self._get_perm(perm)
726 permission = self._get_perm(perm)
724
727
725 # check if we have that permission already
728 # check if we have that permission already
726 obj = self.sa.query(UserGroupRepoToPerm) \
729 obj = self.sa.query(UserGroupRepoToPerm) \
727 .filter(UserGroupRepoToPerm.users_group == group_name) \
730 .filter(UserGroupRepoToPerm.users_group == group_name) \
728 .filter(UserGroupRepoToPerm.repository == repo) \
731 .filter(UserGroupRepoToPerm.repository == repo) \
729 .scalar()
732 .scalar()
730
733
731 if obj is None:
734 if obj is None:
732 # create new
735 # create new
733 obj = UserGroupRepoToPerm()
736 obj = UserGroupRepoToPerm()
734
737
735 obj.repository = repo
738 obj.repository = repo
736 obj.users_group = group_name
739 obj.users_group = group_name
737 obj.permission = permission
740 obj.permission = permission
738 self.sa.add(obj)
741 self.sa.add(obj)
739 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
742 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
740 action_logger_generic(
743 action_logger_generic(
741 'granted permission: {} to usergroup: {} on repo: {}'.format(
744 'granted permission: {} to usergroup: {} on repo: {}'.format(
742 perm, group_name, repo), namespace='security.repo')
745 perm, group_name, repo), namespace='security.repo')
743
746
744 return obj
747 return obj
745
748
746 def revoke_user_group_permission(self, repo, group_name):
749 def revoke_user_group_permission(self, repo, group_name):
747 """
750 """
748 Revoke permission for user group on given repository
751 Revoke permission for user group on given repository
749
752
750 :param repo: Instance of Repository, repository_id, or repository name
753 :param repo: Instance of Repository, repository_id, or repository name
751 :param group_name: Instance of UserGroup, users_group_id,
754 :param group_name: Instance of UserGroup, users_group_id,
752 or user group name
755 or user group name
753 """
756 """
754 repo = self._get_repo(repo)
757 repo = self._get_repo(repo)
755 group_name = self._get_user_group(group_name)
758 group_name = self._get_user_group(group_name)
756
759
757 obj = self.sa.query(UserGroupRepoToPerm) \
760 obj = self.sa.query(UserGroupRepoToPerm) \
758 .filter(UserGroupRepoToPerm.repository == repo) \
761 .filter(UserGroupRepoToPerm.repository == repo) \
759 .filter(UserGroupRepoToPerm.users_group == group_name) \
762 .filter(UserGroupRepoToPerm.users_group == group_name) \
760 .scalar()
763 .scalar()
761 if obj:
764 if obj:
762 self.sa.delete(obj)
765 self.sa.delete(obj)
763 log.debug('Revoked perm to %s on %s', repo, group_name)
766 log.debug('Revoked perm to %s on %s', repo, group_name)
764 action_logger_generic(
767 action_logger_generic(
765 'revoked permission from usergroup: {} on repo: {}'.format(
768 'revoked permission from usergroup: {} on repo: {}'.format(
766 group_name, repo), namespace='security.repo')
769 group_name, repo), namespace='security.repo')
767
770
768 def delete_stats(self, repo_name):
771 def delete_stats(self, repo_name):
769 """
772 """
770 removes stats for given repo
773 removes stats for given repo
771
774
772 :param repo_name:
775 :param repo_name:
773 """
776 """
774 repo = self._get_repo(repo_name)
777 repo = self._get_repo(repo_name)
775 try:
778 try:
776 obj = self.sa.query(Statistics) \
779 obj = self.sa.query(Statistics) \
777 .filter(Statistics.repository == repo).scalar()
780 .filter(Statistics.repository == repo).scalar()
778 if obj:
781 if obj:
779 self.sa.delete(obj)
782 self.sa.delete(obj)
780 except Exception:
783 except Exception:
781 log.error(traceback.format_exc())
784 log.error(traceback.format_exc())
782 raise
785 raise
783
786
784 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
787 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
785 field_type='str', field_desc=''):
788 field_type='str', field_desc=''):
786
789
787 repo = self._get_repo(repo_name)
790 repo = self._get_repo(repo_name)
788
791
789 new_field = RepositoryField()
792 new_field = RepositoryField()
790 new_field.repository = repo
793 new_field.repository = repo
791 new_field.field_key = field_key
794 new_field.field_key = field_key
792 new_field.field_type = field_type # python type
795 new_field.field_type = field_type # python type
793 new_field.field_value = field_value
796 new_field.field_value = field_value
794 new_field.field_desc = field_desc
797 new_field.field_desc = field_desc
795 new_field.field_label = field_label
798 new_field.field_label = field_label
796 self.sa.add(new_field)
799 self.sa.add(new_field)
797 return new_field
800 return new_field
798
801
799 def delete_repo_field(self, repo_name, field_key):
802 def delete_repo_field(self, repo_name, field_key):
800 repo = self._get_repo(repo_name)
803 repo = self._get_repo(repo_name)
801 field = RepositoryField.get_by_key_name(field_key, repo)
804 field = RepositoryField.get_by_key_name(field_key, repo)
802 if field:
805 if field:
803 self.sa.delete(field)
806 self.sa.delete(field)
804
807
805 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
808 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
806 clone_uri=None, repo_store_location=None,
809 clone_uri=None, repo_store_location=None,
807 use_global_config=False):
810 use_global_config=False):
808 """
811 """
809 makes repository on filesystem. It's group aware means it'll create
812 makes repository on filesystem. It's group aware means it'll create
810 a repository within a group, and alter the paths accordingly of
813 a repository within a group, and alter the paths accordingly of
811 group location
814 group location
812
815
813 :param repo_name:
816 :param repo_name:
814 :param alias:
817 :param alias:
815 :param parent:
818 :param parent:
816 :param clone_uri:
819 :param clone_uri:
817 :param repo_store_location:
820 :param repo_store_location:
818 """
821 """
819 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
822 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
820 from rhodecode.model.scm import ScmModel
823 from rhodecode.model.scm import ScmModel
821
824
822 if Repository.NAME_SEP in repo_name:
825 if Repository.NAME_SEP in repo_name:
823 raise ValueError(
826 raise ValueError(
824 'repo_name must not contain groups got `%s`' % repo_name)
827 'repo_name must not contain groups got `%s`' % repo_name)
825
828
826 if isinstance(repo_group, RepoGroup):
829 if isinstance(repo_group, RepoGroup):
827 new_parent_path = os.sep.join(repo_group.full_path_splitted)
830 new_parent_path = os.sep.join(repo_group.full_path_splitted)
828 else:
831 else:
829 new_parent_path = repo_group or ''
832 new_parent_path = repo_group or ''
830
833
831 if repo_store_location:
834 if repo_store_location:
832 _paths = [repo_store_location]
835 _paths = [repo_store_location]
833 else:
836 else:
834 _paths = [self.repos_path, new_parent_path, repo_name]
837 _paths = [self.repos_path, new_parent_path, repo_name]
835 # we need to make it str for mercurial
838 # we need to make it str for mercurial
836 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
839 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
837
840
838 # check if this path is not a repository
841 # check if this path is not a repository
839 if is_valid_repo(repo_path, self.repos_path):
842 if is_valid_repo(repo_path, self.repos_path):
840 raise Exception('This path %s is a valid repository' % repo_path)
843 raise Exception('This path %s is a valid repository' % repo_path)
841
844
842 # check if this path is a group
845 # check if this path is a group
843 if is_valid_repo_group(repo_path, self.repos_path):
846 if is_valid_repo_group(repo_path, self.repos_path):
844 raise Exception('This path %s is a valid group' % repo_path)
847 raise Exception('This path %s is a valid group' % repo_path)
845
848
846 log.info('creating repo %s in %s from url: `%s`',
849 log.info('creating repo %s in %s from url: `%s`',
847 repo_name, safe_unicode(repo_path),
850 repo_name, safe_unicode(repo_path),
848 obfuscate_url_pw(clone_uri))
851 obfuscate_url_pw(clone_uri))
849
852
850 backend = get_backend(repo_type)
853 backend = get_backend(repo_type)
851
854
852 config_repo = None if use_global_config else repo_name
855 config_repo = None if use_global_config else repo_name
853 if config_repo and new_parent_path:
856 if config_repo and new_parent_path:
854 config_repo = Repository.NAME_SEP.join(
857 config_repo = Repository.NAME_SEP.join(
855 (new_parent_path, config_repo))
858 (new_parent_path, config_repo))
856 config = make_db_config(clear_session=False, repo=config_repo)
859 config = make_db_config(clear_session=False, repo=config_repo)
857 config.set('extensions', 'largefiles', '')
860 config.set('extensions', 'largefiles', '')
858
861
859 # patch and reset hooks section of UI config to not run any
862 # patch and reset hooks section of UI config to not run any
860 # hooks on creating remote repo
863 # hooks on creating remote repo
861 config.clear_section('hooks')
864 config.clear_section('hooks')
862
865
863 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
866 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
864 if repo_type == 'git':
867 if repo_type == 'git':
865 repo = backend(
868 repo = backend(
866 repo_path, config=config, create=True, src_url=clone_uri,
869 repo_path, config=config, create=True, src_url=clone_uri,
867 bare=True)
870 bare=True)
868 else:
871 else:
869 repo = backend(
872 repo = backend(
870 repo_path, config=config, create=True, src_url=clone_uri)
873 repo_path, config=config, create=True, src_url=clone_uri)
871
874
872 ScmModel().install_hooks(repo, repo_type=repo_type)
875 ScmModel().install_hooks(repo, repo_type=repo_type)
873
876
874 log.debug('Created repo %s with %s backend',
877 log.debug('Created repo %s with %s backend',
875 safe_unicode(repo_name), safe_unicode(repo_type))
878 safe_unicode(repo_name), safe_unicode(repo_type))
876 return repo
879 return repo
877
880
878 def _rename_filesystem_repo(self, old, new):
881 def _rename_filesystem_repo(self, old, new):
879 """
882 """
880 renames repository on filesystem
883 renames repository on filesystem
881
884
882 :param old: old name
885 :param old: old name
883 :param new: new name
886 :param new: new name
884 """
887 """
885 log.info('renaming repo from %s to %s', old, new)
888 log.info('renaming repo from %s to %s', old, new)
886
889
887 old_path = os.path.join(self.repos_path, old)
890 old_path = os.path.join(self.repos_path, old)
888 new_path = os.path.join(self.repos_path, new)
891 new_path = os.path.join(self.repos_path, new)
889 if os.path.isdir(new_path):
892 if os.path.isdir(new_path):
890 raise Exception(
893 raise Exception(
891 'Was trying to rename to already existing dir %s' % new_path
894 'Was trying to rename to already existing dir %s' % new_path
892 )
895 )
893 shutil.move(old_path, new_path)
896 shutil.move(old_path, new_path)
894
897
895 def _delete_filesystem_repo(self, repo):
898 def _delete_filesystem_repo(self, repo):
896 """
899 """
897 removes repo from filesystem, the removal is acctually made by
900 removes repo from filesystem, the removal is acctually made by
898 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
901 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
899 repository is no longer valid for rhodecode, can be undeleted later on
902 repository is no longer valid for rhodecode, can be undeleted later on
900 by reverting the renames on this repository
903 by reverting the renames on this repository
901
904
902 :param repo: repo object
905 :param repo: repo object
903 """
906 """
904 rm_path = os.path.join(self.repos_path, repo.repo_name)
907 rm_path = os.path.join(self.repos_path, repo.repo_name)
905 repo_group = repo.group
908 repo_group = repo.group
906 log.info("Removing repository %s", rm_path)
909 log.info("Removing repository %s", rm_path)
907 # disable hg/git internal that it doesn't get detected as repo
910 # disable hg/git internal that it doesn't get detected as repo
908 alias = repo.repo_type
911 alias = repo.repo_type
909
912
910 config = make_db_config(clear_session=False)
913 config = make_db_config(clear_session=False)
911 config.set('extensions', 'largefiles', '')
914 config.set('extensions', 'largefiles', '')
912 bare = getattr(repo.scm_instance(config=config), 'bare', False)
915 bare = getattr(repo.scm_instance(config=config), 'bare', False)
913
916
914 # skip this for bare git repos
917 # skip this for bare git repos
915 if not bare:
918 if not bare:
916 # disable VCS repo
919 # disable VCS repo
917 vcs_path = os.path.join(rm_path, '.%s' % alias)
920 vcs_path = os.path.join(rm_path, '.%s' % alias)
918 if os.path.exists(vcs_path):
921 if os.path.exists(vcs_path):
919 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
922 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
920
923
921 _now = datetime.now()
924 _now = datetime.now()
922 _ms = str(_now.microsecond).rjust(6, '0')
925 _ms = str(_now.microsecond).rjust(6, '0')
923 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
926 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
924 repo.just_name)
927 repo.just_name)
925 if repo_group:
928 if repo_group:
926 # if repository is in group, prefix the removal path with the group
929 # if repository is in group, prefix the removal path with the group
927 args = repo_group.full_path_splitted + [_d]
930 args = repo_group.full_path_splitted + [_d]
928 _d = os.path.join(*args)
931 _d = os.path.join(*args)
929
932
930 if os.path.isdir(rm_path):
933 if os.path.isdir(rm_path):
931 shutil.move(rm_path, os.path.join(self.repos_path, _d))
934 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,249 +1,250 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Import early to make sure things are patched up properly
3 # Import early to make sure things are patched up properly
4 from setuptools import setup, find_packages
4 from setuptools import setup, find_packages
5
5
6 import os
6 import os
7 import sys
7 import sys
8 import platform
8 import platform
9
9
10 if sys.version_info < (2, 7):
10 if sys.version_info < (2, 7):
11 raise Exception('RhodeCode requires Python 2.7 or later')
11 raise Exception('RhodeCode requires Python 2.7 or later')
12
12
13
13
14 here = os.path.abspath(os.path.dirname(__file__))
14 here = os.path.abspath(os.path.dirname(__file__))
15
15
16
16
17 def _get_meta_var(name, data, callback_handler=None):
17 def _get_meta_var(name, data, callback_handler=None):
18 import re
18 import re
19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
19 matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data)
20 if matches:
20 if matches:
21 if not callable(callback_handler):
21 if not callable(callback_handler):
22 callback_handler = lambda v: v
22 callback_handler = lambda v: v
23
23
24 return callback_handler(eval(matches.groups()[0]))
24 return callback_handler(eval(matches.groups()[0]))
25
25
26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
26 _meta = open(os.path.join(here, 'rhodecode', '__init__.py'), 'rb')
27 _metadata = _meta.read()
27 _metadata = _meta.read()
28 _meta.close()
28 _meta.close()
29
29
30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
30 callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:]))
31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
31 __version__ = open(os.path.join('rhodecode', 'VERSION')).read().strip()
32 __license__ = _get_meta_var('__license__', _metadata)
32 __license__ = _get_meta_var('__license__', _metadata)
33 __author__ = _get_meta_var('__author__', _metadata)
33 __author__ = _get_meta_var('__author__', _metadata)
34 __url__ = _get_meta_var('__url__', _metadata)
34 __url__ = _get_meta_var('__url__', _metadata)
35 # defines current platform
35 # defines current platform
36 __platform__ = platform.system()
36 __platform__ = platform.system()
37
37
38 # Cygwin has different platform identifiers, but they all contain the
38 # Cygwin has different platform identifiers, but they all contain the
39 # term "CYGWIN"
39 # term "CYGWIN"
40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
40 is_windows = __platform__ == 'Windows' or 'CYGWIN' in __platform__
41
41
42 requirements = [
42 requirements = [
43 'Babel',
43 'Babel',
44 'Beaker',
44 'Beaker',
45 'FormEncode',
45 'FormEncode',
46 'Mako',
46 'Mako',
47 'Markdown',
47 'Markdown',
48 'MarkupSafe',
48 'MarkupSafe',
49 'MySQL-python',
49 'MySQL-python',
50 'Paste',
50 'Paste',
51 'PasteDeploy',
51 'PasteDeploy',
52 'PasteScript',
52 'PasteScript',
53 'Pygments',
53 'Pygments',
54 'Pylons',
54 'Pylons',
55 'Pyro4',
55 'Pyro4',
56 'Routes',
56 'Routes',
57 'SQLAlchemy',
57 'SQLAlchemy',
58 'Tempita',
58 'Tempita',
59 'URLObject',
59 'URLObject',
60 'WebError',
60 'WebError',
61 'WebHelpers',
61 'WebHelpers',
62 'WebHelpers2',
62 'WebHelpers2',
63 'WebOb',
63 'WebOb',
64 'WebTest',
64 'WebTest',
65 'Whoosh',
65 'Whoosh',
66 'alembic',
66 'alembic',
67 'amqplib',
67 'amqplib',
68 'anyjson',
68 'anyjson',
69 'appenlight-client',
69 'appenlight-client',
70 'authomatic',
70 'authomatic',
71 'backport_ipaddress',
71 'backport_ipaddress',
72 'celery',
72 'celery',
73 'colander',
73 'colander',
74 'decorator',
74 'decorator',
75 'docutils',
75 'docutils',
76 'gunicorn',
76 'gunicorn',
77 'infrae.cache',
77 'infrae.cache',
78 'ipython',
78 'ipython',
79 'iso8601',
79 'iso8601',
80 'kombu',
80 'kombu',
81 'marshmallow',
81 'msgpack-python',
82 'msgpack-python',
82 'packaging',
83 'packaging',
83 'psycopg2',
84 'psycopg2',
84 'py-gfm',
85 'py-gfm',
85 'pycrypto',
86 'pycrypto',
86 'pycurl',
87 'pycurl',
87 'pyparsing',
88 'pyparsing',
88 'pyramid',
89 'pyramid',
89 'pyramid-debugtoolbar',
90 'pyramid-debugtoolbar',
90 'pyramid-mako',
91 'pyramid-mako',
91 'pyramid-beaker',
92 'pyramid-beaker',
92 'pysqlite',
93 'pysqlite',
93 'python-dateutil',
94 'python-dateutil',
94 'python-ldap',
95 'python-ldap',
95 'python-memcached',
96 'python-memcached',
96 'python-pam',
97 'python-pam',
97 'recaptcha-client',
98 'recaptcha-client',
98 'repoze.lru',
99 'repoze.lru',
99 'requests',
100 'requests',
100 'simplejson',
101 'simplejson',
101 'waitress',
102 'waitress',
102 'zope.cachedescriptors',
103 'zope.cachedescriptors',
103 'dogpile.cache',
104 'dogpile.cache',
104 'dogpile.core'
105 'dogpile.core'
105 ]
106 ]
106
107
107 if is_windows:
108 if is_windows:
108 pass
109 pass
109 else:
110 else:
110 requirements.append('psutil')
111 requirements.append('psutil')
111 requirements.append('py-bcrypt')
112 requirements.append('py-bcrypt')
112
113
113 test_requirements = [
114 test_requirements = [
114 'WebTest',
115 'WebTest',
115 'configobj',
116 'configobj',
116 'cssselect',
117 'cssselect',
117 'flake8',
118 'flake8',
118 'lxml',
119 'lxml',
119 'mock',
120 'mock',
120 'pytest',
121 'pytest',
121 'pytest-cov',
122 'pytest-cov',
122 'pytest-runner',
123 'pytest-runner',
123 ]
124 ]
124
125
125 setup_requirements = [
126 setup_requirements = [
126 'PasteScript',
127 'PasteScript',
127 'pytest-runner',
128 'pytest-runner',
128 ]
129 ]
129
130
130 dependency_links = [
131 dependency_links = [
131 ]
132 ]
132
133
133 classifiers = [
134 classifiers = [
134 'Development Status :: 6 - Mature',
135 'Development Status :: 6 - Mature',
135 'Environment :: Web Environment',
136 'Environment :: Web Environment',
136 'Framework :: Pylons',
137 'Framework :: Pylons',
137 'Intended Audience :: Developers',
138 'Intended Audience :: Developers',
138 'Operating System :: OS Independent',
139 'Operating System :: OS Independent',
139 'Programming Language :: Python',
140 'Programming Language :: Python',
140 'Programming Language :: Python :: 2.7',
141 'Programming Language :: Python :: 2.7',
141 ]
142 ]
142
143
143
144
144 # additional files from project that goes somewhere in the filesystem
145 # additional files from project that goes somewhere in the filesystem
145 # relative to sys.prefix
146 # relative to sys.prefix
146 data_files = []
147 data_files = []
147
148
148 # additional files that goes into package itself
149 # additional files that goes into package itself
149 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
150 package_data = {'rhodecode': ['i18n/*/LC_MESSAGES/*.mo', ], }
150
151
151 description = ('RhodeCode is a fast and powerful management tool '
152 description = ('RhodeCode is a fast and powerful management tool '
152 'for Mercurial and GIT with a built in push/pull server, '
153 'for Mercurial and GIT with a built in push/pull server, '
153 'full text search and code-review.')
154 'full text search and code-review.')
154
155
155 keywords = ' '.join([
156 keywords = ' '.join([
156 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
157 'rhodecode', 'rhodiumcode', 'mercurial', 'git', 'code review',
157 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
158 'repo groups', 'ldap', 'repository management', 'hgweb replacement',
158 'hgwebdir', 'gitweb replacement', 'serving hgweb',
159 'hgwebdir', 'gitweb replacement', 'serving hgweb',
159 ])
160 ])
160
161
161 # long description
162 # long description
162 README_FILE = 'README.rst'
163 README_FILE = 'README.rst'
163 CHANGELOG_FILE = 'CHANGES.rst'
164 CHANGELOG_FILE = 'CHANGES.rst'
164 try:
165 try:
165 long_description = open(README_FILE).read() + '\n\n' + \
166 long_description = open(README_FILE).read() + '\n\n' + \
166 open(CHANGELOG_FILE).read()
167 open(CHANGELOG_FILE).read()
167
168
168 except IOError, err:
169 except IOError, err:
169 sys.stderr.write(
170 sys.stderr.write(
170 '[WARNING] Cannot find file specified as long_description (%s)\n or '
171 '[WARNING] Cannot find file specified as long_description (%s)\n or '
171 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
172 'changelog (%s) skipping that file' % (README_FILE, CHANGELOG_FILE)
172 )
173 )
173 long_description = description
174 long_description = description
174
175
175 # packages
176 # packages
176 packages = find_packages()
177 packages = find_packages()
177
178
178 paster_commands = [
179 paster_commands = [
179 'make-config=rhodecode.lib.paster_commands.make_config:Command',
180 'make-config=rhodecode.lib.paster_commands.make_config:Command',
180 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
181 'setup-rhodecode=rhodecode.lib.paster_commands.setup_rhodecode:Command',
181 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
182 'update-repoinfo=rhodecode.lib.paster_commands.update_repoinfo:Command',
182 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
183 'cache-keys=rhodecode.lib.paster_commands.cache_keys:Command',
183 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
185 'upgrade-db=rhodecode.lib.dbmigrate:UpgradeDb',
185 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
186 'celeryd=rhodecode.lib.celerypylons.commands:CeleryDaemonCommand',
186 ]
187 ]
187
188
188 setup(
189 setup(
189 name='rhodecode-enterprise-ce',
190 name='rhodecode-enterprise-ce',
190 version=__version__,
191 version=__version__,
191 description=description,
192 description=description,
192 long_description=long_description,
193 long_description=long_description,
193 keywords=keywords,
194 keywords=keywords,
194 license=__license__,
195 license=__license__,
195 author=__author__,
196 author=__author__,
196 author_email='marcin@rhodecode.com',
197 author_email='marcin@rhodecode.com',
197 dependency_links=dependency_links,
198 dependency_links=dependency_links,
198 url=__url__,
199 url=__url__,
199 install_requires=requirements,
200 install_requires=requirements,
200 tests_require=test_requirements,
201 tests_require=test_requirements,
201 classifiers=classifiers,
202 classifiers=classifiers,
202 setup_requires=setup_requirements,
203 setup_requires=setup_requirements,
203 data_files=data_files,
204 data_files=data_files,
204 packages=packages,
205 packages=packages,
205 include_package_data=True,
206 include_package_data=True,
206 package_data=package_data,
207 package_data=package_data,
207 message_extractors={
208 message_extractors={
208 'rhodecode': [
209 'rhodecode': [
209 ('**.py', 'python', None),
210 ('**.py', 'python', None),
210 ('**.js', 'javascript', None),
211 ('**.js', 'javascript', None),
211 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
212 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
212 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
213 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
213 ('public/**', 'ignore', None),
214 ('public/**', 'ignore', None),
214 ]
215 ]
215 },
216 },
216 zip_safe=False,
217 zip_safe=False,
217 paster_plugins=['PasteScript', 'Pylons'],
218 paster_plugins=['PasteScript', 'Pylons'],
218 entry_points={
219 entry_points={
219 'enterprise.plugins1': [
220 'enterprise.plugins1': [
220 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 'crowd=rhodecode.authentication.plugins.auth_crowd:plugin_factory',
221 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 'headers=rhodecode.authentication.plugins.auth_headers:plugin_factory',
222 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 'jasig_cas=rhodecode.authentication.plugins.auth_jasig_cas:plugin_factory',
223 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 'ldap=rhodecode.authentication.plugins.auth_ldap:plugin_factory',
224 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 'pam=rhodecode.authentication.plugins.auth_pam:plugin_factory',
225 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 'rhodecode=rhodecode.authentication.plugins.auth_rhodecode:plugin_factory',
226 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 'token=rhodecode.authentication.plugins.auth_token:plugin_factory',
227 ],
228 ],
228 'paste.app_factory': [
229 'paste.app_factory': [
229 'main=rhodecode.config.middleware:make_pyramid_app',
230 'main=rhodecode.config.middleware:make_pyramid_app',
230 'pylons=rhodecode.config.middleware:make_app',
231 'pylons=rhodecode.config.middleware:make_app',
231 ],
232 ],
232 'paste.app_install': [
233 'paste.app_install': [
233 'main=pylons.util:PylonsInstaller',
234 'main=pylons.util:PylonsInstaller',
234 'pylons=pylons.util:PylonsInstaller',
235 'pylons=pylons.util:PylonsInstaller',
235 ],
236 ],
236 'paste.global_paster_command': paster_commands,
237 'paste.global_paster_command': paster_commands,
237 'pytest11': [
238 'pytest11': [
238 'pylons=rhodecode.tests.pylons_plugin',
239 'pylons=rhodecode.tests.pylons_plugin',
239 'enterprise=rhodecode.tests.plugin',
240 'enterprise=rhodecode.tests.plugin',
240 ],
241 ],
241 'console_scripts': [
242 'console_scripts': [
242 'rcserver=rhodecode.rcserver:main',
243 'rcserver=rhodecode.rcserver:main',
243 ],
244 ],
244 'beaker.backends': [
245 'beaker.backends': [
245 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
246 'memorylru_base=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerBase',
246 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
247 'memorylru_debug=rhodecode.lib.memory_lru_debug:MemoryLRUNamespaceManagerDebug'
247 ]
248 ]
248 },
249 },
249 )
250 )
General Comments 0
You need to be logged in to leave comments. Login now