##// END OF EJS Templates
Pull request !2362 Created on Thu, 01 Jul 2021 12:06:29, by
 # import all the libraries

import numpy as np
import re
import nltk
from sklearn.datasets import load_files
nltk.download('stopwords')
import pickle
import pandas as pd
from sklearn.multioutput import MultiOutputClassifier
from sklearn.preprocessing import LabelEncoder
from nltk.corpus import stopwords
from collections import defaultdict
from sklearn.model_selection import RepeatedStratifiedKFold
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import make_pipeline
import joblib 


pd.options.display.float_format = "{:,.2f}".format

df = pd.read_csv(r'/Users/tabetibrahim/Desktop/Vona/vef/facture_mobile_mars_avril_2021.csv', sep=";")

data_set = df[['Tem section 1', 'Tem section 2','Tem section 3','Description facture operateur']]
data_set.head()

tem_section= data_set[['Tem section 1', 'Tem section 2','Tem section 3']]

tem_en= data_set[['Tem section 1', 'Tem section 2','Tem section 3']]

labelencoder = LabelEncoder()
le = LabelEncoder()
encoder_dict = defaultdict(LabelEncoder)

df_encode = tem_en.apply(lambda x: encoder_dict[x.name].fit_transform(x))

inverse_transform_lambda = df_encode.apply(lambda x: encoder_dict[x.name].inverse_transform(x))

X, y =  data_set[['Description facture operateur']], tem_section


import nltk
import pandas as pd
import numpy as np
from nltk.stem import PorterStemmer
from nltk.corpus import stopwords

# If not previously performed:
# nltk.download('stopwords')

stemming = PorterStemmer()
stops = set(stopwords.words("french"))

def apply_cleaning_function_to_list(X):
    cleaned_X = []
    for element in X:
        cleaned_X.append(clean_text(element))
    return cleaned_X


def clean_text(raw_text):
    """This function works on a raw text string, and:
        1) changes to lower case
        2) tokenizes (breaks down into words
        3) removes punctuation and non-word text
        4) finds word stems
        5) removes stop words
        6) rejoins meaningful stem words"""

    # Convert to lower case
    text = raw_text.lower()

    # Tokenize
    tokens = nltk.word_tokenize(text)

    # Keep only words (removes punctuation + numbers)
    # use .isalnum to keep also numbers
    token_words = [w for w in tokens if w.isalpha()]

    # Stemming
    stemmed_words = [stemming.stem(w) for w in token_words]

    # Remove stop words
    meaningful_words = [w for w in stemmed_words if not w in stops]

    # Rejoin meaningful stemmed words
    joined_words = ( " ".join(meaningful_words))

    # Return cleaned data
    return joined_words




# Get text to clean
text_to_clean = list(data_set['Description facture operateur'])

# Clean text
cleaned_text = apply_cleaning_function_to_list(text_to_clean)

# Add cleaned data back into DataFrame
data_set['cleaned_review'] = cleaned_text

# Remove temporary cleaned_text list (after transfer to DataFrame)
del cleaned_text


from sklearn.model_selection import train_test_split
X = list(data_set['cleaned_review'])
y = df_encode
X_train, X_test, y_train, y_test = train_test_split(
    X,y, test_size = 0.25)

def create_bag_of_words(X):
    from sklearn.feature_extraction.text import CountVectorizer

    print ('Creating bag of words...')
    # Initialize the "CountVectorizer" object, which is scikit-learn's
    # bag of words tool.  

    # In this example features may be single words or two consecutive words
    # (as shown by ngram_range = 1,2)
    vectorizer = CountVectorizer(analyzer = "word",   \
                                 tokenizer = None,    \
                                 preprocessor = None, \
                                 stop_words = None,   \
                                 ngram_range = (1,2), \
                                 max_features = 10000
                                ) 

    # fit_transform() does two functions: First, it fits the model
    # and learns the vocabulary; second, it transforms our training data
    # into feature vectors. The input to fit_transform should be a list of 
    # strings. The output is a sparse array
    train_data_features = vectorizer.fit_transform(X)

    # Convert to a NumPy array for easy of handling
    train_data_features = train_data_features.toarray()

    # tfidf transform
    from sklearn.feature_extraction.text import TfidfTransformer
    tfidf = TfidfTransformer()
    tfidf_features = tfidf.fit_transform(train_data_features).toarray()

    # Get words in the vocabulary
    vocab = vectorizer.get_feature_names()

    return vectorizer, vocab, train_data_features, tfidf_features, tfidf


vectorizer, vocab, train_data_features, tfidf_features, tfidf  = \
    create_bag_of_words(X_train)

def train_logistic_regression(features, label):
    print ("Training the logistic regression model...")
    from sklearn.linear_model import LogisticRegression

    ml_model =  MultiOutputClassifier(LogisticRegression(C = 0.1, solver = 'newton-cg', penalty = 'l2',multi_class = 'multinomial'))


    ml_model.fit(features, label)

    print ('Finished')
    return  ml_model 

ml_model = train_logistic_regression(tfidf_features, y_train)


test_data_features = vectorizer.transform(X_test)
# Convert to numpy array
test_data_features = test_data_features.toarray()


test_data_tfidf_features = tfidf.fit_transform(test_data_features)
# Convert to numpy array
test_data_tfidf_features = test_data_tfidf_features.toarray()


predicted_y = ml_model.predict(test_data_tfidf_features)
correctly_identified_y = predicted_y == y_test
accuracy = np.mean(correctly_identified_y) * 100
1 version available for this pull request, show versions.
There are new changes for `branch:stable` in source repository, please consider updating this pull request.
ver Time Author Commit Description
24 commits hidden, click expand to show them.

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,40 b''
1 |RCE| 4.25.1 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-04-06
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18
19
20 Security
21 ^^^^^^^^
22
23
24
25 Performance
26 ^^^^^^^^^^^
27
28
29
30 Fixes
31 ^^^^^
32
33 - Artifacts: fixed admin panel bad urls generated for the new artifacts admin view in CE edition.
34
35
36
37 Upgrade notes
38 ^^^^^^^^^^^^^
39
40 - Un-scheduled release addressing problems in 4.25.X releases.
@@ -0,0 +1,53 b''
1 |RCE| 4.25.2 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-04-14
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18 - Comments: refresh on draft sidebar on draft submit.
19 - Vcsserver: log exceptions into the logs
20 - Archiving: make it explicit archiving a repo is irreversible.
21 - My-account: updated bookmarks UX
22 - Pull requests: added awaiting my review filter for users pull-requests.
23 Additionally the awaiting my review now properly filters pull requests that have no review votes on them.
24
25
26 Security
27 ^^^^^^^^
28
29
30
31 Performance
32 ^^^^^^^^^^^
33
34
35
36 Fixes
37 ^^^^^
38
39 - Draft comments: fixed logic in toggle all draft for submit.
40 - Draft comments: when submitting edited drafts also clear the history to prevent DB problems.
41 - Mercurial: fixed a case of lookup branches that had 40 characters in length.
42 - Gists: block id input for public gists.
43 - Pull requests: fixed problems with unicode characters in branches.
44 - Pull requests: small ui fix for grid.
45 - Summary: fixed ui on summary page for non-admins.
46 The setup instructions were broken if user had no write permissions.
47 - Users: make user data loading more resilient to errors.
48
49
50 Upgrade notes
51 ^^^^^^^^^^^^^
52
53 - Scheduled release addressing problems in 4.25.X releases.
@@ -0,0 +1,40 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22
23 from rhodecode.apps._base import BaseAppView, DataGridAppView
24 from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
25
26 log = logging.getLogger(__name__)
27
28
29 class AdminArtifactsView(BaseAppView, DataGridAppView):
30
31 def load_default_context(self):
32 c = self._get_local_tmpl_context()
33 return c
34
35 @LoginRequired()
36 @HasPermissionAllDecorator('hg.admin')
37 def artifacts(self):
38 c = self.load_default_context()
39 c.active = 'artifacts'
40 return self._get_template_context(c)
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,5 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.25.0
2 current_version = 4.25.2
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:rhodecode/VERSION]
6
@@ -1,77 +1,79 b''
1 1 1bd3e92b7e2e2d2024152b34bb88dff1db544a71 v4.0.0
2 2 170c5398320ea6cddd50955e88d408794c21d43a v4.0.1
3 3 c3fe200198f5aa34cf2e4066df2881a9cefe3704 v4.1.0
4 4 7fd5c850745e2ea821fb4406af5f4bff9b0a7526 v4.1.1
5 5 41c87da28a179953df86061d817bc35533c66dd2 v4.1.2
6 6 baaf9f5bcea3bae0ef12ae20c8b270482e62abb6 v4.2.0
7 7 32a70c7e56844a825f61df496ee5eaf8c3c4e189 v4.2.1
8 8 fa695cdb411d294679ac081d595ac654e5613b03 v4.3.0
9 9 0e4dc11b58cad833c513fe17bac39e6850edf959 v4.3.1
10 10 8a876f48f5cb1d018b837db28ff928500cb32cfb v4.4.0
11 11 8dd86b410b1aac086ffdfc524ef300f896af5047 v4.4.1
12 12 d2514226abc8d3b4f6fb57765f47d1b6fb360a05 v4.4.2
13 13 27d783325930af6dad2741476c0d0b1b7c8415c2 v4.5.0
14 14 7f2016f352abcbdba4a19d4039c386e9629449da v4.5.1
15 15 416fec799314c70a5c780fb28b3357b08869333a v4.5.2
16 16 27c3b85fafc83143e6678fbc3da69e1615bcac55 v4.6.0
17 17 5ad13deb9118c2a5243d4032d4d9cc174e5872db v4.6.1
18 18 2be921e01fa24bb102696ada596f87464c3666f6 v4.7.0
19 19 7198bdec29c2872c974431d55200d0398354cdb1 v4.7.1
20 20 bd1c8d230fe741c2dfd7100a0ef39fd0774fd581 v4.7.2
21 21 9731914f89765d9628dc4dddc84bc9402aa124c8 v4.8.0
22 22 c5a2b7d0e4bbdebc4a62d7b624befe375207b659 v4.9.0
23 23 d9aa3b27ac9f7e78359775c75fedf7bfece232f1 v4.9.1
24 24 4ba4d74981cec5d6b28b158f875a2540952c2f74 v4.10.0
25 25 0a6821cbd6b0b3c21503002f88800679fa35ab63 v4.10.1
26 26 434ad90ec8d621f4416074b84f6e9ce03964defb v4.10.2
27 27 68baee10e698da2724c6e0f698c03a6abb993bf2 v4.10.3
28 28 00821d3afd1dce3f4767cc353f84a17f7d5218a1 v4.10.4
29 29 22f6744ad8cc274311825f63f953e4dee2ea5cb9 v4.10.5
30 30 96eb24bea2f5f9258775245e3f09f6fa0a4dda01 v4.10.6
31 31 3121217a812c956d7dd5a5875821bd73e8002a32 v4.11.0
32 32 fa98b454715ac5b912f39e84af54345909a2a805 v4.11.1
33 33 3982abcfdcc229a723cebe52d3a9bcff10bba08e v4.11.2
34 34 33195f145db9172f0a8f1487e09207178a6ab065 v4.11.3
35 35 194c74f33e32bbae6fc4d71ec5a999cff3c13605 v4.11.4
36 36 8fbd8b0c3ddc2fa4ac9e4ca16942a03eb593df2d v4.11.5
37 37 f0609aa5d5d05a1ca2f97c3995542236131c9d8a v4.11.6
38 38 b5b30547d90d2e088472a70c84878f429ffbf40d v4.12.0
39 39 9072253aa8894d20c00b4a43dc61c2168c1eff94 v4.12.1
40 40 6a517543ea9ef9987d74371bd2a315eb0b232dc9 v4.12.2
41 41 7fc0731b024c3114be87865eda7ab621cc957e32 v4.12.3
42 42 6d531c0b068c6eda62dddceedc9f845ecb6feb6f v4.12.4
43 43 3d6bf2d81b1564830eb5e83396110d2a9a93eb1e v4.13.0
44 44 5468fc89e708bd90e413cd0d54350017abbdbc0e v4.13.1
45 45 610d621550521c314ee97b3d43473ac0bcf06fb8 v4.13.2
46 46 7dc62c090881fb5d03268141e71e0940d7c3295d v4.13.3
47 47 9151328c1c46b72ba6f00d7640d9141e75aa1ca2 v4.14.0
48 48 a47eeac5dfa41fa6779d90452affba4091c3ade8 v4.14.1
49 49 4b34ce0d2c3c10510626b3b65044939bb7a2cddf v4.15.0
50 50 14502561d22e6b70613674cd675ae9a604b7989f v4.15.1
51 51 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2
52 52 797744642eca86640ed20bef2cd77445780abaec v4.16.0
53 53 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1
54 54 5d8057df561c4b6b81b6401aed7d2f911e6e77f7 v4.16.2
55 55 13acfc008896ef4c62546bab5074e8f6f89b4fa7 v4.17.0
56 56 45b9b610976f483877142fe75321808ce9ebac59 v4.17.1
57 57 ad5bd0c4bd322fdbd04bb825a3d027e08f7a3901 v4.17.2
58 58 037f5794b55a6236d68f6485a485372dde6566e0 v4.17.3
59 59 83bc3100cfd6094c1d04f475ddb299b7dc3d0b33 v4.17.4
60 60 e3de8c95baf8cc9109ca56aee8193a2cb6a54c8a v4.17.4
61 61 f37a3126570477543507f0bc9d245ce75546181a v4.18.0
62 62 71d8791463e87b64c1a18475de330ee600d37561 v4.18.1
63 63 4bd6b75dac1d25c64885d4d49385e5533f21c525 v4.18.2
64 64 12ed92fe57f2e9fc7b71dc0b65e26c2da5c7085f v4.18.3
65 65 ddef396a6567117de531d67d44c739cbbfc3eebb v4.19.0
66 66 c0c65acd73914bf4368222d510afe1161ab8c07c v4.19.1
67 67 7ac623a4a2405917e2af660d645ded662011e40d v4.19.2
68 68 ef7ffda65eeb90c3ba88590a6cb816ef9b0bc232 v4.19.3
69 69 3e635489bb7961df93b01e42454ad1a8730ae968 v4.20.0
70 70 7e2eb896a02ca7cd2cd9f0f853ef3dac3f0039e3 v4.20.1
71 71 8bb5fece08ab65986225b184e46f53d2a71729cb v4.21.0
72 72 90734aac31ee4563bbe665a43ff73190cc762275 v4.22.0
73 73 a9655707f7cf4146affc51c12fe5ed8e02898a57 v4.23.0
74 74 56310d93b33b97535908ef9c7b0985b89bb7fad2 v4.23.1
75 75 7637c38528fa38c1eabc1fde6a869c20995a0da7 v4.23.2
76 76 6aeb4ac3ef7f0ac699c914740dad3688c9495e83 v4.24.0
77 77 6eaf953da06e468a4c4e5239d3d0e700bda6b163 v4.24.1
78 f8161cbc2d94a935d3c395a0e758d9a094287169 v4.25.0
79 77fe47b5b39338e71b2c040de2c0359b529b6251 v4.25.1
@@ -1,33 +1,33 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:rc_tools_pinned]
8 8 done = true
9 9
10 10 [task:fixes_on_stable]
11 11 done = true
12 12
13 13 [task:pip2nix_generated]
14 14 done = true
15 15
16 16 [task:changelog_updated]
17 17 done = true
18 18
19 19 [task:generate_api_docs]
20 20 done = true
21 21
22 22 [release]
23 23 state = prepared
24 version = 4.11.6
24 version = 4.25.2
25 25
26 26 [task:updated_translation]
27 27
28 28 [task:generate_js_routes]
29 29
30 30 [task:updated_trial_license]
31 31
32 32 [task:generate_oss_licenses]
33 33
@@ -1,154 +1,156 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 9 .. toctree::
10 10 :maxdepth: 1
11 11
12 release-notes-4.25.2.rst
13 release-notes-4.25.1.rst
12 14 release-notes-4.25.0.rst
13 15 release-notes-4.24.1.rst
14 16 release-notes-4.24.0.rst
15 17 release-notes-4.23.2.rst
16 18 release-notes-4.23.1.rst
17 19 release-notes-4.23.0.rst
18 20 release-notes-4.22.0.rst
19 21 release-notes-4.21.0.rst
20 22 release-notes-4.20.1.rst
21 23 release-notes-4.20.0.rst
22 24 release-notes-4.19.3.rst
23 25 release-notes-4.19.2.rst
24 26 release-notes-4.19.1.rst
25 27 release-notes-4.19.0.rst
26 28 release-notes-4.18.3.rst
27 29 release-notes-4.18.2.rst
28 30 release-notes-4.18.1.rst
29 31 release-notes-4.18.0.rst
30 32 release-notes-4.17.4.rst
31 33 release-notes-4.17.3.rst
32 34 release-notes-4.17.2.rst
33 35 release-notes-4.17.1.rst
34 36 release-notes-4.17.0.rst
35 37 release-notes-4.16.2.rst
36 38 release-notes-4.16.1.rst
37 39 release-notes-4.16.0.rst
38 40 release-notes-4.15.2.rst
39 41 release-notes-4.15.1.rst
40 42 release-notes-4.15.0.rst
41 43 release-notes-4.14.1.rst
42 44 release-notes-4.14.0.rst
43 45 release-notes-4.13.3.rst
44 46 release-notes-4.13.2.rst
45 47 release-notes-4.13.1.rst
46 48 release-notes-4.13.0.rst
47 49 release-notes-4.12.4.rst
48 50 release-notes-4.12.3.rst
49 51 release-notes-4.12.2.rst
50 52 release-notes-4.12.1.rst
51 53 release-notes-4.12.0.rst
52 54 release-notes-4.11.6.rst
53 55 release-notes-4.11.5.rst
54 56 release-notes-4.11.4.rst
55 57 release-notes-4.11.3.rst
56 58 release-notes-4.11.2.rst
57 59 release-notes-4.11.1.rst
58 60 release-notes-4.11.0.rst
59 61 release-notes-4.10.6.rst
60 62 release-notes-4.10.5.rst
61 63 release-notes-4.10.4.rst
62 64 release-notes-4.10.3.rst
63 65 release-notes-4.10.2.rst
64 66 release-notes-4.10.1.rst
65 67 release-notes-4.10.0.rst
66 68 release-notes-4.9.1.rst
67 69 release-notes-4.9.0.rst
68 70 release-notes-4.8.0.rst
69 71 release-notes-4.7.2.rst
70 72 release-notes-4.7.1.rst
71 73 release-notes-4.7.0.rst
72 74 release-notes-4.6.1.rst
73 75 release-notes-4.6.0.rst
74 76 release-notes-4.5.2.rst
75 77 release-notes-4.5.1.rst
76 78 release-notes-4.5.0.rst
77 79 release-notes-4.4.2.rst
78 80 release-notes-4.4.1.rst
79 81 release-notes-4.4.0.rst
80 82 release-notes-4.3.1.rst
81 83 release-notes-4.3.0.rst
82 84 release-notes-4.2.1.rst
83 85 release-notes-4.2.0.rst
84 86 release-notes-4.1.2.rst
85 87 release-notes-4.1.1.rst
86 88 release-notes-4.1.0.rst
87 89 release-notes-4.0.1.rst
88 90 release-notes-4.0.0.rst
89 91
90 92 |RCE| 3.x Versions
91 93 ------------------
92 94
93 95 .. toctree::
94 96 :maxdepth: 1
95 97
96 98 release-notes-3.8.4.rst
97 99 release-notes-3.8.3.rst
98 100 release-notes-3.8.2.rst
99 101 release-notes-3.8.1.rst
100 102 release-notes-3.8.0.rst
101 103 release-notes-3.7.1.rst
102 104 release-notes-3.7.0.rst
103 105 release-notes-3.6.1.rst
104 106 release-notes-3.6.0.rst
105 107 release-notes-3.5.2.rst
106 108 release-notes-3.5.1.rst
107 109 release-notes-3.5.0.rst
108 110 release-notes-3.4.1.rst
109 111 release-notes-3.4.0.rst
110 112 release-notes-3.3.4.rst
111 113 release-notes-3.3.3.rst
112 114 release-notes-3.3.2.rst
113 115 release-notes-3.3.1.rst
114 116 release-notes-3.3.0.rst
115 117 release-notes-3.2.3.rst
116 118 release-notes-3.2.2.rst
117 119 release-notes-3.2.1.rst
118 120 release-notes-3.2.0.rst
119 121 release-notes-3.1.1.rst
120 122 release-notes-3.1.0.rst
121 123 release-notes-3.0.2.rst
122 124 release-notes-3.0.1.rst
123 125 release-notes-3.0.0.rst
124 126
125 127 |RCE| 2.x Versions
126 128 ------------------
127 129
128 130 .. toctree::
129 131 :maxdepth: 1
130 132
131 133 release-notes-2.2.8.rst
132 134 release-notes-2.2.7.rst
133 135 release-notes-2.2.6.rst
134 136 release-notes-2.2.5.rst
135 137 release-notes-2.2.4.rst
136 138 release-notes-2.2.3.rst
137 139 release-notes-2.2.2.rst
138 140 release-notes-2.2.1.rst
139 141 release-notes-2.2.0.rst
140 142 release-notes-2.1.0.rst
141 143 release-notes-2.0.2.rst
142 144 release-notes-2.0.1.rst
143 145 release-notes-2.0.0.rst
144 146
145 147 |RCE| 1.x Versions
146 148 ------------------
147 149
148 150 .. toctree::
149 151 :maxdepth: 1
150 152
151 153 release-notes-1.7.2.rst
152 154 release-notes-1.7.1.rst
153 155 release-notes-1.7.0.rst
154 156 release-notes-1.6.0.rst
@@ -1,2520 +1,2520 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "alembic" = super.buildPythonPackage {
8 8 name = "alembic-1.4.2";
9 9 doCheck = false;
10 10 propagatedBuildInputs = [
11 11 self."sqlalchemy"
12 12 self."mako"
13 13 self."python-editor"
14 14 self."python-dateutil"
15 15 ];
16 16 src = fetchurl {
17 17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
18 18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
19 19 };
20 20 meta = {
21 21 license = [ pkgs.lib.licenses.mit ];
22 22 };
23 23 };
24 24 "amqp" = super.buildPythonPackage {
25 25 name = "amqp-2.5.2";
26 26 doCheck = false;
27 27 propagatedBuildInputs = [
28 28 self."vine"
29 29 ];
30 30 src = fetchurl {
31 31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
32 32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
33 33 };
34 34 meta = {
35 35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 36 };
37 37 };
38 38 "apispec" = super.buildPythonPackage {
39 39 name = "apispec-1.0.0";
40 40 doCheck = false;
41 41 propagatedBuildInputs = [
42 42 self."PyYAML"
43 43 ];
44 44 src = fetchurl {
45 45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
46 46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
47 47 };
48 48 meta = {
49 49 license = [ pkgs.lib.licenses.mit ];
50 50 };
51 51 };
52 52 "appenlight-client" = super.buildPythonPackage {
53 53 name = "appenlight-client-0.6.26";
54 54 doCheck = false;
55 55 propagatedBuildInputs = [
56 56 self."webob"
57 57 self."requests"
58 58 self."six"
59 59 ];
60 60 src = fetchurl {
61 61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
62 62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
63 63 };
64 64 meta = {
65 65 license = [ pkgs.lib.licenses.bsdOriginal ];
66 66 };
67 67 };
68 68 "asn1crypto" = super.buildPythonPackage {
69 69 name = "asn1crypto-0.24.0";
70 70 doCheck = false;
71 71 src = fetchurl {
72 72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
73 73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.mit ];
77 77 };
78 78 };
79 79 "atomicwrites" = super.buildPythonPackage {
80 80 name = "atomicwrites-1.3.0";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
84 84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "attrs" = super.buildPythonPackage {
91 91 name = "attrs-19.3.0";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
95 95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.mit ];
99 99 };
100 100 };
101 101 "babel" = super.buildPythonPackage {
102 102 name = "babel-1.3";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."pytz"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
109 109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 113 };
114 114 };
115 115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
116 116 name = "backports.shutil-get-terminal-size-1.0.0";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
120 120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.mit ];
124 124 };
125 125 };
126 126 "beaker" = super.buildPythonPackage {
127 127 name = "beaker-1.9.1";
128 128 doCheck = false;
129 129 propagatedBuildInputs = [
130 130 self."funcsigs"
131 131 ];
132 132 src = fetchurl {
133 133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
134 134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
135 135 };
136 136 meta = {
137 137 license = [ pkgs.lib.licenses.bsdOriginal ];
138 138 };
139 139 };
140 140 "beautifulsoup4" = super.buildPythonPackage {
141 141 name = "beautifulsoup4-4.6.3";
142 142 doCheck = false;
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
145 145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.mit ];
149 149 };
150 150 };
151 151 "billiard" = super.buildPythonPackage {
152 152 name = "billiard-3.6.1.0";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
156 156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "bleach" = super.buildPythonPackage {
163 163 name = "bleach-3.1.3";
164 164 doCheck = false;
165 165 propagatedBuildInputs = [
166 166 self."six"
167 167 self."webencodings"
168 168 ];
169 169 src = fetchurl {
170 170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
171 171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
172 172 };
173 173 meta = {
174 174 license = [ pkgs.lib.licenses.asl20 ];
175 175 };
176 176 };
177 177 "bumpversion" = super.buildPythonPackage {
178 178 name = "bumpversion-0.5.3";
179 179 doCheck = false;
180 180 src = fetchurl {
181 181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
182 182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
183 183 };
184 184 meta = {
185 185 license = [ pkgs.lib.licenses.mit ];
186 186 };
187 187 };
188 188 "cachetools" = super.buildPythonPackage {
189 189 name = "cachetools-3.1.1";
190 190 doCheck = false;
191 191 src = fetchurl {
192 192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
193 193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
194 194 };
195 195 meta = {
196 196 license = [ pkgs.lib.licenses.mit ];
197 197 };
198 198 };
199 199 "celery" = super.buildPythonPackage {
200 200 name = "celery-4.3.0";
201 201 doCheck = false;
202 202 propagatedBuildInputs = [
203 203 self."pytz"
204 204 self."billiard"
205 205 self."kombu"
206 206 self."vine"
207 207 ];
208 208 src = fetchurl {
209 209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
210 210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
211 211 };
212 212 meta = {
213 213 license = [ pkgs.lib.licenses.bsdOriginal ];
214 214 };
215 215 };
216 216 "certifi" = super.buildPythonPackage {
217 217 name = "certifi-2020.4.5.1";
218 218 doCheck = false;
219 219 src = fetchurl {
220 220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
221 221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
222 222 };
223 223 meta = {
224 224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
225 225 };
226 226 };
227 227 "cffi" = super.buildPythonPackage {
228 228 name = "cffi-1.12.3";
229 229 doCheck = false;
230 230 propagatedBuildInputs = [
231 231 self."pycparser"
232 232 ];
233 233 src = fetchurl {
234 234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
235 235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
236 236 };
237 237 meta = {
238 238 license = [ pkgs.lib.licenses.mit ];
239 239 };
240 240 };
241 241 "chameleon" = super.buildPythonPackage {
242 242 name = "chameleon-2.24";
243 243 doCheck = false;
244 244 src = fetchurl {
245 245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
246 246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
247 247 };
248 248 meta = {
249 249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
250 250 };
251 251 };
252 252 "channelstream" = super.buildPythonPackage {
253 253 name = "channelstream-0.6.14";
254 254 doCheck = false;
255 255 propagatedBuildInputs = [
256 256 self."gevent"
257 257 self."ws4py"
258 258 self."marshmallow"
259 259 self."python-dateutil"
260 260 self."pyramid"
261 261 self."pyramid-jinja2"
262 262 self."pyramid-apispec"
263 263 self."itsdangerous"
264 264 self."requests"
265 265 self."six"
266 266 ];
267 267 src = fetchurl {
268 268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
269 269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
270 270 };
271 271 meta = {
272 272 license = [ pkgs.lib.licenses.bsdOriginal ];
273 273 };
274 274 };
275 275 "chardet" = super.buildPythonPackage {
276 276 name = "chardet-3.0.4";
277 277 doCheck = false;
278 278 src = fetchurl {
279 279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
280 280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
281 281 };
282 282 meta = {
283 283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
284 284 };
285 285 };
286 286 "click" = super.buildPythonPackage {
287 287 name = "click-7.0";
288 288 doCheck = false;
289 289 src = fetchurl {
290 290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
291 291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
292 292 };
293 293 meta = {
294 294 license = [ pkgs.lib.licenses.bsdOriginal ];
295 295 };
296 296 };
297 297 "colander" = super.buildPythonPackage {
298 298 name = "colander-1.7.0";
299 299 doCheck = false;
300 300 propagatedBuildInputs = [
301 301 self."translationstring"
302 302 self."iso8601"
303 303 self."enum34"
304 304 ];
305 305 src = fetchurl {
306 306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
307 307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
308 308 };
309 309 meta = {
310 310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
311 311 };
312 312 };
313 313 "configobj" = super.buildPythonPackage {
314 314 name = "configobj-5.0.6";
315 315 doCheck = false;
316 316 propagatedBuildInputs = [
317 317 self."six"
318 318 ];
319 319 src = fetchurl {
320 320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
321 321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.bsdOriginal ];
325 325 };
326 326 };
327 327 "configparser" = super.buildPythonPackage {
328 328 name = "configparser-4.0.2";
329 329 doCheck = false;
330 330 src = fetchurl {
331 331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
332 332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
333 333 };
334 334 meta = {
335 335 license = [ pkgs.lib.licenses.mit ];
336 336 };
337 337 };
338 338 "contextlib2" = super.buildPythonPackage {
339 339 name = "contextlib2-0.6.0.post1";
340 340 doCheck = false;
341 341 src = fetchurl {
342 342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
343 343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
344 344 };
345 345 meta = {
346 346 license = [ pkgs.lib.licenses.psfl ];
347 347 };
348 348 };
349 349 "cov-core" = super.buildPythonPackage {
350 350 name = "cov-core-1.15.0";
351 351 doCheck = false;
352 352 propagatedBuildInputs = [
353 353 self."coverage"
354 354 ];
355 355 src = fetchurl {
356 356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
357 357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
358 358 };
359 359 meta = {
360 360 license = [ pkgs.lib.licenses.mit ];
361 361 };
362 362 };
363 363 "coverage" = super.buildPythonPackage {
364 364 name = "coverage-4.5.4";
365 365 doCheck = false;
366 366 src = fetchurl {
367 367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
368 368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
369 369 };
370 370 meta = {
371 371 license = [ pkgs.lib.licenses.asl20 ];
372 372 };
373 373 };
374 374 "cryptography" = super.buildPythonPackage {
375 375 name = "cryptography-2.6.1";
376 376 doCheck = false;
377 377 propagatedBuildInputs = [
378 378 self."asn1crypto"
379 379 self."six"
380 380 self."cffi"
381 381 self."enum34"
382 382 self."ipaddress"
383 383 ];
384 384 src = fetchurl {
385 385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
386 386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
390 390 };
391 391 };
392 392 "cssselect" = super.buildPythonPackage {
393 393 name = "cssselect-1.0.3";
394 394 doCheck = false;
395 395 src = fetchurl {
396 396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
397 397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
398 398 };
399 399 meta = {
400 400 license = [ pkgs.lib.licenses.bsdOriginal ];
401 401 };
402 402 };
403 403 "cssutils" = super.buildPythonPackage {
404 404 name = "cssutils-1.0.2";
405 405 doCheck = false;
406 406 src = fetchurl {
407 407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
408 408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
409 409 };
410 410 meta = {
411 411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
412 412 };
413 413 };
414 414 "decorator" = super.buildPythonPackage {
415 415 name = "decorator-4.1.2";
416 416 doCheck = false;
417 417 src = fetchurl {
418 418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
419 419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
420 420 };
421 421 meta = {
422 422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
423 423 };
424 424 };
425 425 "deform" = super.buildPythonPackage {
426 426 name = "deform-2.0.8";
427 427 doCheck = false;
428 428 propagatedBuildInputs = [
429 429 self."chameleon"
430 430 self."colander"
431 431 self."iso8601"
432 432 self."peppercorn"
433 433 self."translationstring"
434 434 self."zope.deprecation"
435 435 ];
436 436 src = fetchurl {
437 437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
438 438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
439 439 };
440 440 meta = {
441 441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
442 442 };
443 443 };
444 444 "defusedxml" = super.buildPythonPackage {
445 445 name = "defusedxml-0.6.0";
446 446 doCheck = false;
447 447 src = fetchurl {
448 448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
449 449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
450 450 };
451 451 meta = {
452 452 license = [ pkgs.lib.licenses.psfl ];
453 453 };
454 454 };
455 455 "dm.xmlsec.binding" = super.buildPythonPackage {
456 456 name = "dm.xmlsec.binding-1.3.7";
457 457 doCheck = false;
458 458 propagatedBuildInputs = [
459 459 self."setuptools"
460 460 self."lxml"
461 461 ];
462 462 src = fetchurl {
463 463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
464 464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 "docutils" = super.buildPythonPackage {
471 471 name = "docutils-0.16";
472 472 doCheck = false;
473 473 src = fetchurl {
474 474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
475 475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
476 476 };
477 477 meta = {
478 478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
479 479 };
480 480 };
481 481 "dogpile.cache" = super.buildPythonPackage {
482 482 name = "dogpile.cache-0.9.0";
483 483 doCheck = false;
484 484 propagatedBuildInputs = [
485 485 self."decorator"
486 486 ];
487 487 src = fetchurl {
488 488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
489 489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
490 490 };
491 491 meta = {
492 492 license = [ pkgs.lib.licenses.bsdOriginal ];
493 493 };
494 494 };
495 495 "dogpile.core" = super.buildPythonPackage {
496 496 name = "dogpile.core-0.4.1";
497 497 doCheck = false;
498 498 src = fetchurl {
499 499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
500 500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
501 501 };
502 502 meta = {
503 503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 504 };
505 505 };
506 506 "ecdsa" = super.buildPythonPackage {
507 507 name = "ecdsa-0.13.2";
508 508 doCheck = false;
509 509 src = fetchurl {
510 510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
511 511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
512 512 };
513 513 meta = {
514 514 license = [ pkgs.lib.licenses.mit ];
515 515 };
516 516 };
517 517 "elasticsearch" = super.buildPythonPackage {
518 518 name = "elasticsearch-6.3.1";
519 519 doCheck = false;
520 520 propagatedBuildInputs = [
521 521 self."urllib3"
522 522 ];
523 523 src = fetchurl {
524 524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
525 525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
526 526 };
527 527 meta = {
528 528 license = [ pkgs.lib.licenses.asl20 ];
529 529 };
530 530 };
531 531 "elasticsearch-dsl" = super.buildPythonPackage {
532 532 name = "elasticsearch-dsl-6.3.1";
533 533 doCheck = false;
534 534 propagatedBuildInputs = [
535 535 self."six"
536 536 self."python-dateutil"
537 537 self."elasticsearch"
538 538 self."ipaddress"
539 539 ];
540 540 src = fetchurl {
541 541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
542 542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.asl20 ];
546 546 };
547 547 };
548 548 "elasticsearch1" = super.buildPythonPackage {
549 549 name = "elasticsearch1-1.10.0";
550 550 doCheck = false;
551 551 propagatedBuildInputs = [
552 552 self."urllib3"
553 553 ];
554 554 src = fetchurl {
555 555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
556 556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
557 557 };
558 558 meta = {
559 559 license = [ pkgs.lib.licenses.asl20 ];
560 560 };
561 561 };
562 562 "elasticsearch1-dsl" = super.buildPythonPackage {
563 563 name = "elasticsearch1-dsl-0.0.12";
564 564 doCheck = false;
565 565 propagatedBuildInputs = [
566 566 self."six"
567 567 self."python-dateutil"
568 568 self."elasticsearch1"
569 569 ];
570 570 src = fetchurl {
571 571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
572 572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
573 573 };
574 574 meta = {
575 575 license = [ pkgs.lib.licenses.asl20 ];
576 576 };
577 577 };
578 578 "elasticsearch2" = super.buildPythonPackage {
579 579 name = "elasticsearch2-2.5.1";
580 580 doCheck = false;
581 581 propagatedBuildInputs = [
582 582 self."urllib3"
583 583 ];
584 584 src = fetchurl {
585 585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
586 586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
587 587 };
588 588 meta = {
589 589 license = [ pkgs.lib.licenses.asl20 ];
590 590 };
591 591 };
592 592 "entrypoints" = super.buildPythonPackage {
593 593 name = "entrypoints-0.2.2";
594 594 doCheck = false;
595 595 propagatedBuildInputs = [
596 596 self."configparser"
597 597 ];
598 598 src = fetchurl {
599 599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
600 600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
601 601 };
602 602 meta = {
603 603 license = [ pkgs.lib.licenses.mit ];
604 604 };
605 605 };
606 606 "enum34" = super.buildPythonPackage {
607 607 name = "enum34-1.1.10";
608 608 doCheck = false;
609 609 src = fetchurl {
610 610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
611 611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
612 612 };
613 613 meta = {
614 614 license = [ pkgs.lib.licenses.bsdOriginal ];
615 615 };
616 616 };
617 617 "formencode" = super.buildPythonPackage {
618 618 name = "formencode-1.2.4";
619 619 doCheck = false;
620 620 src = fetchurl {
621 621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
622 622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
623 623 };
624 624 meta = {
625 625 license = [ pkgs.lib.licenses.psfl ];
626 626 };
627 627 };
628 628 "funcsigs" = super.buildPythonPackage {
629 629 name = "funcsigs-1.0.2";
630 630 doCheck = false;
631 631 src = fetchurl {
632 632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
633 633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
634 634 };
635 635 meta = {
636 636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 "functools32" = super.buildPythonPackage {
640 640 name = "functools32-3.2.3.post2";
641 641 doCheck = false;
642 642 src = fetchurl {
643 643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
644 644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
645 645 };
646 646 meta = {
647 647 license = [ pkgs.lib.licenses.psfl ];
648 648 };
649 649 };
650 650 "future" = super.buildPythonPackage {
651 651 name = "future-0.14.3";
652 652 doCheck = false;
653 653 src = fetchurl {
654 654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
655 655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
656 656 };
657 657 meta = {
658 658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
659 659 };
660 660 };
661 661 "futures" = super.buildPythonPackage {
662 662 name = "futures-3.0.2";
663 663 doCheck = false;
664 664 src = fetchurl {
665 665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
666 666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
667 667 };
668 668 meta = {
669 669 license = [ pkgs.lib.licenses.bsdOriginal ];
670 670 };
671 671 };
672 672 "gevent" = super.buildPythonPackage {
673 673 name = "gevent-1.5.0";
674 674 doCheck = false;
675 675 propagatedBuildInputs = [
676 676 self."greenlet"
677 677 ];
678 678 src = fetchurl {
679 679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
680 680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
681 681 };
682 682 meta = {
683 683 license = [ pkgs.lib.licenses.mit ];
684 684 };
685 685 };
686 686 "gnureadline" = super.buildPythonPackage {
687 687 name = "gnureadline-6.3.8";
688 688 doCheck = false;
689 689 src = fetchurl {
690 690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
691 691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
692 692 };
693 693 meta = {
694 694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
695 695 };
696 696 };
697 697 "gprof2dot" = super.buildPythonPackage {
698 698 name = "gprof2dot-2017.9.19";
699 699 doCheck = false;
700 700 src = fetchurl {
701 701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
702 702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
703 703 };
704 704 meta = {
705 705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
706 706 };
707 707 };
708 708 "greenlet" = super.buildPythonPackage {
709 709 name = "greenlet-0.4.15";
710 710 doCheck = false;
711 711 src = fetchurl {
712 712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
713 713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
714 714 };
715 715 meta = {
716 716 license = [ pkgs.lib.licenses.mit ];
717 717 };
718 718 };
719 719 "gunicorn" = super.buildPythonPackage {
720 720 name = "gunicorn-19.9.0";
721 721 doCheck = false;
722 722 src = fetchurl {
723 723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
724 724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 "hupper" = super.buildPythonPackage {
731 731 name = "hupper-1.10.2";
732 732 doCheck = false;
733 733 src = fetchurl {
734 734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
735 735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
736 736 };
737 737 meta = {
738 738 license = [ pkgs.lib.licenses.mit ];
739 739 };
740 740 };
741 741 "idna" = super.buildPythonPackage {
742 742 name = "idna-2.8";
743 743 doCheck = false;
744 744 src = fetchurl {
745 745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
746 746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
747 747 };
748 748 meta = {
749 749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
750 750 };
751 751 };
752 752 "importlib-metadata" = super.buildPythonPackage {
753 753 name = "importlib-metadata-1.6.0";
754 754 doCheck = false;
755 755 propagatedBuildInputs = [
756 756 self."zipp"
757 757 self."pathlib2"
758 758 self."contextlib2"
759 759 self."configparser"
760 760 ];
761 761 src = fetchurl {
762 762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
763 763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.asl20 ];
767 767 };
768 768 };
769 769 "infrae.cache" = super.buildPythonPackage {
770 770 name = "infrae.cache-1.0.1";
771 771 doCheck = false;
772 772 propagatedBuildInputs = [
773 773 self."beaker"
774 774 self."repoze.lru"
775 775 ];
776 776 src = fetchurl {
777 777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
778 778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
779 779 };
780 780 meta = {
781 781 license = [ pkgs.lib.licenses.zpl21 ];
782 782 };
783 783 };
784 784 "invoke" = super.buildPythonPackage {
785 785 name = "invoke-0.13.0";
786 786 doCheck = false;
787 787 src = fetchurl {
788 788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
789 789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.bsdOriginal ];
793 793 };
794 794 };
795 795 "ipaddress" = super.buildPythonPackage {
796 796 name = "ipaddress-1.0.23";
797 797 doCheck = false;
798 798 src = fetchurl {
799 799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
800 800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
801 801 };
802 802 meta = {
803 803 license = [ pkgs.lib.licenses.psfl ];
804 804 };
805 805 };
806 806 "ipdb" = super.buildPythonPackage {
807 807 name = "ipdb-0.13.2";
808 808 doCheck = false;
809 809 propagatedBuildInputs = [
810 810 self."setuptools"
811 811 self."ipython"
812 812 ];
813 813 src = fetchurl {
814 814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
815 815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 "ipython" = super.buildPythonPackage {
822 822 name = "ipython-5.1.0";
823 823 doCheck = false;
824 824 propagatedBuildInputs = [
825 825 self."setuptools"
826 826 self."decorator"
827 827 self."pickleshare"
828 828 self."simplegeneric"
829 829 self."traitlets"
830 830 self."prompt-toolkit"
831 831 self."pygments"
832 832 self."pexpect"
833 833 self."backports.shutil-get-terminal-size"
834 834 self."pathlib2"
835 835 self."pexpect"
836 836 ];
837 837 src = fetchurl {
838 838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
839 839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
840 840 };
841 841 meta = {
842 842 license = [ pkgs.lib.licenses.bsdOriginal ];
843 843 };
844 844 };
845 845 "ipython-genutils" = super.buildPythonPackage {
846 846 name = "ipython-genutils-0.2.0";
847 847 doCheck = false;
848 848 src = fetchurl {
849 849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
850 850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
851 851 };
852 852 meta = {
853 853 license = [ pkgs.lib.licenses.bsdOriginal ];
854 854 };
855 855 };
856 856 "iso8601" = super.buildPythonPackage {
857 857 name = "iso8601-0.1.12";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
861 861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.mit ];
865 865 };
866 866 };
867 867 "isodate" = super.buildPythonPackage {
868 868 name = "isodate-0.6.0";
869 869 doCheck = false;
870 870 propagatedBuildInputs = [
871 871 self."six"
872 872 ];
873 873 src = fetchurl {
874 874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
875 875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
876 876 };
877 877 meta = {
878 878 license = [ pkgs.lib.licenses.bsdOriginal ];
879 879 };
880 880 };
881 881 "itsdangerous" = super.buildPythonPackage {
882 882 name = "itsdangerous-1.1.0";
883 883 doCheck = false;
884 884 src = fetchurl {
885 885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
886 886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
887 887 };
888 888 meta = {
889 889 license = [ pkgs.lib.licenses.bsdOriginal ];
890 890 };
891 891 };
892 892 "jinja2" = super.buildPythonPackage {
893 893 name = "jinja2-2.9.6";
894 894 doCheck = false;
895 895 propagatedBuildInputs = [
896 896 self."markupsafe"
897 897 ];
898 898 src = fetchurl {
899 899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
900 900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
901 901 };
902 902 meta = {
903 903 license = [ pkgs.lib.licenses.bsdOriginal ];
904 904 };
905 905 };
906 906 "jsonschema" = super.buildPythonPackage {
907 907 name = "jsonschema-2.6.0";
908 908 doCheck = false;
909 909 propagatedBuildInputs = [
910 910 self."functools32"
911 911 ];
912 912 src = fetchurl {
913 913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
914 914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
915 915 };
916 916 meta = {
917 917 license = [ pkgs.lib.licenses.mit ];
918 918 };
919 919 };
920 920 "jupyter-client" = super.buildPythonPackage {
921 921 name = "jupyter-client-5.0.0";
922 922 doCheck = false;
923 923 propagatedBuildInputs = [
924 924 self."traitlets"
925 925 self."jupyter-core"
926 926 self."pyzmq"
927 927 self."python-dateutil"
928 928 ];
929 929 src = fetchurl {
930 930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
931 931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
932 932 };
933 933 meta = {
934 934 license = [ pkgs.lib.licenses.bsdOriginal ];
935 935 };
936 936 };
937 937 "jupyter-core" = super.buildPythonPackage {
938 938 name = "jupyter-core-4.5.0";
939 939 doCheck = false;
940 940 propagatedBuildInputs = [
941 941 self."traitlets"
942 942 ];
943 943 src = fetchurl {
944 944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
945 945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
946 946 };
947 947 meta = {
948 948 license = [ pkgs.lib.licenses.bsdOriginal ];
949 949 };
950 950 };
951 951 "kombu" = super.buildPythonPackage {
952 952 name = "kombu-4.6.6";
953 953 doCheck = false;
954 954 propagatedBuildInputs = [
955 955 self."amqp"
956 956 self."importlib-metadata"
957 957 ];
958 958 src = fetchurl {
959 959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
960 960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
961 961 };
962 962 meta = {
963 963 license = [ pkgs.lib.licenses.bsdOriginal ];
964 964 };
965 965 };
966 966 "lxml" = super.buildPythonPackage {
967 967 name = "lxml-4.2.5";
968 968 doCheck = false;
969 969 src = fetchurl {
970 970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
971 971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
972 972 };
973 973 meta = {
974 974 license = [ pkgs.lib.licenses.bsdOriginal ];
975 975 };
976 976 };
977 977 "mako" = super.buildPythonPackage {
978 978 name = "mako-1.1.0";
979 979 doCheck = false;
980 980 propagatedBuildInputs = [
981 981 self."markupsafe"
982 982 ];
983 983 src = fetchurl {
984 984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
985 985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
986 986 };
987 987 meta = {
988 988 license = [ pkgs.lib.licenses.mit ];
989 989 };
990 990 };
991 991 "markdown" = super.buildPythonPackage {
992 992 name = "markdown-2.6.11";
993 993 doCheck = false;
994 994 src = fetchurl {
995 995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
996 996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
997 997 };
998 998 meta = {
999 999 license = [ pkgs.lib.licenses.bsdOriginal ];
1000 1000 };
1001 1001 };
1002 1002 "markupsafe" = super.buildPythonPackage {
1003 1003 name = "markupsafe-1.1.1";
1004 1004 doCheck = false;
1005 1005 src = fetchurl {
1006 1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1007 1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1008 1008 };
1009 1009 meta = {
1010 1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1011 1011 };
1012 1012 };
1013 1013 "marshmallow" = super.buildPythonPackage {
1014 1014 name = "marshmallow-2.18.0";
1015 1015 doCheck = false;
1016 1016 src = fetchurl {
1017 1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1018 1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1019 1019 };
1020 1020 meta = {
1021 1021 license = [ pkgs.lib.licenses.mit ];
1022 1022 };
1023 1023 };
1024 1024 "mistune" = super.buildPythonPackage {
1025 1025 name = "mistune-0.8.4";
1026 1026 doCheck = false;
1027 1027 src = fetchurl {
1028 1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1029 1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1030 1030 };
1031 1031 meta = {
1032 1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1033 1033 };
1034 1034 };
1035 1035 "mock" = super.buildPythonPackage {
1036 1036 name = "mock-3.0.5";
1037 1037 doCheck = false;
1038 1038 propagatedBuildInputs = [
1039 1039 self."six"
1040 1040 self."funcsigs"
1041 1041 ];
1042 1042 src = fetchurl {
1043 1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1044 1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1045 1045 };
1046 1046 meta = {
1047 1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1048 1048 };
1049 1049 };
1050 1050 "more-itertools" = super.buildPythonPackage {
1051 1051 name = "more-itertools-5.0.0";
1052 1052 doCheck = false;
1053 1053 propagatedBuildInputs = [
1054 1054 self."six"
1055 1055 ];
1056 1056 src = fetchurl {
1057 1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1058 1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1059 1059 };
1060 1060 meta = {
1061 1061 license = [ pkgs.lib.licenses.mit ];
1062 1062 };
1063 1063 };
1064 1064 "msgpack-python" = super.buildPythonPackage {
1065 1065 name = "msgpack-python-0.5.6";
1066 1066 doCheck = false;
1067 1067 src = fetchurl {
1068 1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1069 1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1070 1070 };
1071 1071 meta = {
1072 1072 license = [ pkgs.lib.licenses.asl20 ];
1073 1073 };
1074 1074 };
1075 1075 "mysql-python" = super.buildPythonPackage {
1076 1076 name = "mysql-python-1.2.5";
1077 1077 doCheck = false;
1078 1078 src = fetchurl {
1079 1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1080 1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1081 1081 };
1082 1082 meta = {
1083 1083 license = [ pkgs.lib.licenses.gpl1 ];
1084 1084 };
1085 1085 };
1086 1086 "nbconvert" = super.buildPythonPackage {
1087 1087 name = "nbconvert-5.3.1";
1088 1088 doCheck = false;
1089 1089 propagatedBuildInputs = [
1090 1090 self."mistune"
1091 1091 self."jinja2"
1092 1092 self."pygments"
1093 1093 self."traitlets"
1094 1094 self."jupyter-core"
1095 1095 self."nbformat"
1096 1096 self."entrypoints"
1097 1097 self."bleach"
1098 1098 self."pandocfilters"
1099 1099 self."testpath"
1100 1100 ];
1101 1101 src = fetchurl {
1102 1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1103 1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1104 1104 };
1105 1105 meta = {
1106 1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 1107 };
1108 1108 };
1109 1109 "nbformat" = super.buildPythonPackage {
1110 1110 name = "nbformat-4.4.0";
1111 1111 doCheck = false;
1112 1112 propagatedBuildInputs = [
1113 1113 self."ipython-genutils"
1114 1114 self."traitlets"
1115 1115 self."jsonschema"
1116 1116 self."jupyter-core"
1117 1117 ];
1118 1118 src = fetchurl {
1119 1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1120 1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1121 1121 };
1122 1122 meta = {
1123 1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1124 1124 };
1125 1125 };
1126 1126 "packaging" = super.buildPythonPackage {
1127 1127 name = "packaging-20.3";
1128 1128 doCheck = false;
1129 1129 propagatedBuildInputs = [
1130 1130 self."pyparsing"
1131 1131 self."six"
1132 1132 ];
1133 1133 src = fetchurl {
1134 1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1135 1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1136 1136 };
1137 1137 meta = {
1138 1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1139 1139 };
1140 1140 };
1141 1141 "pandocfilters" = super.buildPythonPackage {
1142 1142 name = "pandocfilters-1.4.2";
1143 1143 doCheck = false;
1144 1144 src = fetchurl {
1145 1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1146 1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1147 1147 };
1148 1148 meta = {
1149 1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1150 1150 };
1151 1151 };
1152 1152 "paste" = super.buildPythonPackage {
1153 1153 name = "paste-3.4.0";
1154 1154 doCheck = false;
1155 1155 propagatedBuildInputs = [
1156 1156 self."six"
1157 1157 ];
1158 1158 src = fetchurl {
1159 1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1160 1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1161 1161 };
1162 1162 meta = {
1163 1163 license = [ pkgs.lib.licenses.mit ];
1164 1164 };
1165 1165 };
1166 1166 "pastedeploy" = super.buildPythonPackage {
1167 1167 name = "pastedeploy-2.1.0";
1168 1168 doCheck = false;
1169 1169 src = fetchurl {
1170 1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1171 1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1172 1172 };
1173 1173 meta = {
1174 1174 license = [ pkgs.lib.licenses.mit ];
1175 1175 };
1176 1176 };
1177 1177 "pastescript" = super.buildPythonPackage {
1178 1178 name = "pastescript-3.2.0";
1179 1179 doCheck = false;
1180 1180 propagatedBuildInputs = [
1181 1181 self."paste"
1182 1182 self."pastedeploy"
1183 1183 self."six"
1184 1184 ];
1185 1185 src = fetchurl {
1186 1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1187 1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1188 1188 };
1189 1189 meta = {
1190 1190 license = [ pkgs.lib.licenses.mit ];
1191 1191 };
1192 1192 };
1193 1193 "pathlib2" = super.buildPythonPackage {
1194 1194 name = "pathlib2-2.3.5";
1195 1195 doCheck = false;
1196 1196 propagatedBuildInputs = [
1197 1197 self."six"
1198 1198 self."scandir"
1199 1199 ];
1200 1200 src = fetchurl {
1201 1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1202 1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1203 1203 };
1204 1204 meta = {
1205 1205 license = [ pkgs.lib.licenses.mit ];
1206 1206 };
1207 1207 };
1208 1208 "peppercorn" = super.buildPythonPackage {
1209 1209 name = "peppercorn-0.6";
1210 1210 doCheck = false;
1211 1211 src = fetchurl {
1212 1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1213 1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1214 1214 };
1215 1215 meta = {
1216 1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1217 1217 };
1218 1218 };
1219 1219 "pexpect" = super.buildPythonPackage {
1220 1220 name = "pexpect-4.8.0";
1221 1221 doCheck = false;
1222 1222 propagatedBuildInputs = [
1223 1223 self."ptyprocess"
1224 1224 ];
1225 1225 src = fetchurl {
1226 1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1227 1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1228 1228 };
1229 1229 meta = {
1230 1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1231 1231 };
1232 1232 };
1233 1233 "pickleshare" = super.buildPythonPackage {
1234 1234 name = "pickleshare-0.7.5";
1235 1235 doCheck = false;
1236 1236 propagatedBuildInputs = [
1237 1237 self."pathlib2"
1238 1238 ];
1239 1239 src = fetchurl {
1240 1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1241 1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1242 1242 };
1243 1243 meta = {
1244 1244 license = [ pkgs.lib.licenses.mit ];
1245 1245 };
1246 1246 };
1247 1247 "plaster" = super.buildPythonPackage {
1248 1248 name = "plaster-1.0";
1249 1249 doCheck = false;
1250 1250 propagatedBuildInputs = [
1251 1251 self."setuptools"
1252 1252 ];
1253 1253 src = fetchurl {
1254 1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1255 1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1256 1256 };
1257 1257 meta = {
1258 1258 license = [ pkgs.lib.licenses.mit ];
1259 1259 };
1260 1260 };
1261 1261 "plaster-pastedeploy" = super.buildPythonPackage {
1262 1262 name = "plaster-pastedeploy-0.7";
1263 1263 doCheck = false;
1264 1264 propagatedBuildInputs = [
1265 1265 self."pastedeploy"
1266 1266 self."plaster"
1267 1267 ];
1268 1268 src = fetchurl {
1269 1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1270 1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1271 1271 };
1272 1272 meta = {
1273 1273 license = [ pkgs.lib.licenses.mit ];
1274 1274 };
1275 1275 };
1276 1276 "pluggy" = super.buildPythonPackage {
1277 1277 name = "pluggy-0.13.1";
1278 1278 doCheck = false;
1279 1279 propagatedBuildInputs = [
1280 1280 self."importlib-metadata"
1281 1281 ];
1282 1282 src = fetchurl {
1283 1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1284 1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1285 1285 };
1286 1286 meta = {
1287 1287 license = [ pkgs.lib.licenses.mit ];
1288 1288 };
1289 1289 };
1290 1290 "premailer" = super.buildPythonPackage {
1291 1291 name = "premailer-3.6.1";
1292 1292 doCheck = false;
1293 1293 propagatedBuildInputs = [
1294 1294 self."lxml"
1295 1295 self."cssselect"
1296 1296 self."cssutils"
1297 1297 self."requests"
1298 1298 self."cachetools"
1299 1299 ];
1300 1300 src = fetchurl {
1301 1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1302 1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1303 1303 };
1304 1304 meta = {
1305 1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1306 1306 };
1307 1307 };
1308 1308 "prompt-toolkit" = super.buildPythonPackage {
1309 1309 name = "prompt-toolkit-1.0.18";
1310 1310 doCheck = false;
1311 1311 propagatedBuildInputs = [
1312 1312 self."six"
1313 1313 self."wcwidth"
1314 1314 ];
1315 1315 src = fetchurl {
1316 1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1317 1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1318 1318 };
1319 1319 meta = {
1320 1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1321 1321 };
1322 1322 };
1323 1323 "psutil" = super.buildPythonPackage {
1324 1324 name = "psutil-5.7.0";
1325 1325 doCheck = false;
1326 1326 src = fetchurl {
1327 1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1328 1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1329 1329 };
1330 1330 meta = {
1331 1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1332 1332 };
1333 1333 };
1334 1334 "psycopg2" = super.buildPythonPackage {
1335 1335 name = "psycopg2-2.8.4";
1336 1336 doCheck = false;
1337 1337 src = fetchurl {
1338 1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1339 1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1340 1340 };
1341 1341 meta = {
1342 1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1343 1343 };
1344 1344 };
1345 1345 "ptyprocess" = super.buildPythonPackage {
1346 1346 name = "ptyprocess-0.6.0";
1347 1347 doCheck = false;
1348 1348 src = fetchurl {
1349 1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1350 1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1351 1351 };
1352 1352 meta = {
1353 1353 license = [ ];
1354 1354 };
1355 1355 };
1356 1356 "py" = super.buildPythonPackage {
1357 1357 name = "py-1.8.0";
1358 1358 doCheck = false;
1359 1359 src = fetchurl {
1360 1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1361 1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1362 1362 };
1363 1363 meta = {
1364 1364 license = [ pkgs.lib.licenses.mit ];
1365 1365 };
1366 1366 };
1367 1367 "py-bcrypt" = super.buildPythonPackage {
1368 1368 name = "py-bcrypt-0.4";
1369 1369 doCheck = false;
1370 1370 src = fetchurl {
1371 1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1372 1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1373 1373 };
1374 1374 meta = {
1375 1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1376 1376 };
1377 1377 };
1378 1378 "py-gfm" = super.buildPythonPackage {
1379 1379 name = "py-gfm-0.1.4";
1380 1380 doCheck = false;
1381 1381 propagatedBuildInputs = [
1382 1382 self."setuptools"
1383 1383 self."markdown"
1384 1384 ];
1385 1385 src = fetchurl {
1386 1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1387 1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1388 1388 };
1389 1389 meta = {
1390 1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1391 1391 };
1392 1392 };
1393 1393 "pyasn1" = super.buildPythonPackage {
1394 1394 name = "pyasn1-0.4.8";
1395 1395 doCheck = false;
1396 1396 src = fetchurl {
1397 1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1398 1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1399 1399 };
1400 1400 meta = {
1401 1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1402 1402 };
1403 1403 };
1404 1404 "pyasn1-modules" = super.buildPythonPackage {
1405 1405 name = "pyasn1-modules-0.2.6";
1406 1406 doCheck = false;
1407 1407 propagatedBuildInputs = [
1408 1408 self."pyasn1"
1409 1409 ];
1410 1410 src = fetchurl {
1411 1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1412 1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1413 1413 };
1414 1414 meta = {
1415 1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1416 1416 };
1417 1417 };
1418 1418 "pycparser" = super.buildPythonPackage {
1419 1419 name = "pycparser-2.20";
1420 1420 doCheck = false;
1421 1421 src = fetchurl {
1422 1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1423 1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1424 1424 };
1425 1425 meta = {
1426 1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1427 1427 };
1428 1428 };
1429 1429 "pycrypto" = super.buildPythonPackage {
1430 1430 name = "pycrypto-2.6.1";
1431 1431 doCheck = false;
1432 1432 src = fetchurl {
1433 1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1434 1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1435 1435 };
1436 1436 meta = {
1437 1437 license = [ pkgs.lib.licenses.publicDomain ];
1438 1438 };
1439 1439 };
1440 1440 "pycurl" = super.buildPythonPackage {
1441 1441 name = "pycurl-7.43.0.3";
1442 1442 doCheck = false;
1443 1443 src = fetchurl {
1444 1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1445 1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1446 1446 };
1447 1447 meta = {
1448 1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1449 1449 };
1450 1450 };
1451 1451 "pygments" = super.buildPythonPackage {
1452 1452 name = "pygments-2.4.2";
1453 1453 doCheck = false;
1454 1454 src = fetchurl {
1455 1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1456 1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1457 1457 };
1458 1458 meta = {
1459 1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1460 1460 };
1461 1461 };
1462 1462 "pymysql" = super.buildPythonPackage {
1463 1463 name = "pymysql-0.8.1";
1464 1464 doCheck = false;
1465 1465 src = fetchurl {
1466 1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1467 1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1468 1468 };
1469 1469 meta = {
1470 1470 license = [ pkgs.lib.licenses.mit ];
1471 1471 };
1472 1472 };
1473 1473 "pyotp" = super.buildPythonPackage {
1474 1474 name = "pyotp-2.3.0";
1475 1475 doCheck = false;
1476 1476 src = fetchurl {
1477 1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1478 1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1479 1479 };
1480 1480 meta = {
1481 1481 license = [ pkgs.lib.licenses.mit ];
1482 1482 };
1483 1483 };
1484 1484 "pyparsing" = super.buildPythonPackage {
1485 1485 name = "pyparsing-2.4.7";
1486 1486 doCheck = false;
1487 1487 src = fetchurl {
1488 1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1489 1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1490 1490 };
1491 1491 meta = {
1492 1492 license = [ pkgs.lib.licenses.mit ];
1493 1493 };
1494 1494 };
1495 1495 "pyramid" = super.buildPythonPackage {
1496 1496 name = "pyramid-1.10.4";
1497 1497 doCheck = false;
1498 1498 propagatedBuildInputs = [
1499 1499 self."hupper"
1500 1500 self."plaster"
1501 1501 self."plaster-pastedeploy"
1502 1502 self."setuptools"
1503 1503 self."translationstring"
1504 1504 self."venusian"
1505 1505 self."webob"
1506 1506 self."zope.deprecation"
1507 1507 self."zope.interface"
1508 1508 self."repoze.lru"
1509 1509 ];
1510 1510 src = fetchurl {
1511 1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1512 1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1513 1513 };
1514 1514 meta = {
1515 1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1516 1516 };
1517 1517 };
1518 1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1519 1519 name = "pyramid-debugtoolbar-4.6.1";
1520 1520 doCheck = false;
1521 1521 propagatedBuildInputs = [
1522 1522 self."pyramid"
1523 1523 self."pyramid-mako"
1524 1524 self."repoze.lru"
1525 1525 self."pygments"
1526 1526 self."ipaddress"
1527 1527 ];
1528 1528 src = fetchurl {
1529 1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1530 1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1531 1531 };
1532 1532 meta = {
1533 1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1534 1534 };
1535 1535 };
1536 1536 "pyramid-jinja2" = super.buildPythonPackage {
1537 1537 name = "pyramid-jinja2-2.7";
1538 1538 doCheck = false;
1539 1539 propagatedBuildInputs = [
1540 1540 self."pyramid"
1541 1541 self."zope.deprecation"
1542 1542 self."jinja2"
1543 1543 self."markupsafe"
1544 1544 ];
1545 1545 src = fetchurl {
1546 1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1547 1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1548 1548 };
1549 1549 meta = {
1550 1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1551 1551 };
1552 1552 };
1553 1553 "pyramid-apispec" = super.buildPythonPackage {
1554 1554 name = "pyramid-apispec-0.3.2";
1555 1555 doCheck = false;
1556 1556 propagatedBuildInputs = [
1557 1557 self."apispec"
1558 1558 ];
1559 1559 src = fetchurl {
1560 1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1561 1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1562 1562 };
1563 1563 meta = {
1564 1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1565 1565 };
1566 1566 };
1567 1567 "pyramid-mailer" = super.buildPythonPackage {
1568 1568 name = "pyramid-mailer-0.15.1";
1569 1569 doCheck = false;
1570 1570 propagatedBuildInputs = [
1571 1571 self."pyramid"
1572 1572 self."repoze.sendmail"
1573 1573 self."transaction"
1574 1574 ];
1575 1575 src = fetchurl {
1576 1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1577 1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1578 1578 };
1579 1579 meta = {
1580 1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1581 1581 };
1582 1582 };
1583 1583 "pyramid-mako" = super.buildPythonPackage {
1584 1584 name = "pyramid-mako-1.1.0";
1585 1585 doCheck = false;
1586 1586 propagatedBuildInputs = [
1587 1587 self."pyramid"
1588 1588 self."mako"
1589 1589 ];
1590 1590 src = fetchurl {
1591 1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1592 1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1593 1593 };
1594 1594 meta = {
1595 1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1596 1596 };
1597 1597 };
1598 1598 "pysqlite" = super.buildPythonPackage {
1599 1599 name = "pysqlite-2.8.3";
1600 1600 doCheck = false;
1601 1601 src = fetchurl {
1602 1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1603 1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1604 1604 };
1605 1605 meta = {
1606 1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1607 1607 };
1608 1608 };
1609 1609 "pytest" = super.buildPythonPackage {
1610 1610 name = "pytest-4.6.5";
1611 1611 doCheck = false;
1612 1612 propagatedBuildInputs = [
1613 1613 self."py"
1614 1614 self."six"
1615 1615 self."packaging"
1616 1616 self."attrs"
1617 1617 self."atomicwrites"
1618 1618 self."pluggy"
1619 1619 self."importlib-metadata"
1620 1620 self."wcwidth"
1621 1621 self."funcsigs"
1622 1622 self."pathlib2"
1623 1623 self."more-itertools"
1624 1624 ];
1625 1625 src = fetchurl {
1626 1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1627 1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1628 1628 };
1629 1629 meta = {
1630 1630 license = [ pkgs.lib.licenses.mit ];
1631 1631 };
1632 1632 };
1633 1633 "pytest-cov" = super.buildPythonPackage {
1634 1634 name = "pytest-cov-2.7.1";
1635 1635 doCheck = false;
1636 1636 propagatedBuildInputs = [
1637 1637 self."pytest"
1638 1638 self."coverage"
1639 1639 ];
1640 1640 src = fetchurl {
1641 1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1642 1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1643 1643 };
1644 1644 meta = {
1645 1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1646 1646 };
1647 1647 };
1648 1648 "pytest-profiling" = super.buildPythonPackage {
1649 1649 name = "pytest-profiling-1.7.0";
1650 1650 doCheck = false;
1651 1651 propagatedBuildInputs = [
1652 1652 self."six"
1653 1653 self."pytest"
1654 1654 self."gprof2dot"
1655 1655 ];
1656 1656 src = fetchurl {
1657 1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1658 1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1659 1659 };
1660 1660 meta = {
1661 1661 license = [ pkgs.lib.licenses.mit ];
1662 1662 };
1663 1663 };
1664 1664 "pytest-runner" = super.buildPythonPackage {
1665 1665 name = "pytest-runner-5.1";
1666 1666 doCheck = false;
1667 1667 src = fetchurl {
1668 1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1669 1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1670 1670 };
1671 1671 meta = {
1672 1672 license = [ pkgs.lib.licenses.mit ];
1673 1673 };
1674 1674 };
1675 1675 "pytest-sugar" = super.buildPythonPackage {
1676 1676 name = "pytest-sugar-0.9.2";
1677 1677 doCheck = false;
1678 1678 propagatedBuildInputs = [
1679 1679 self."pytest"
1680 1680 self."termcolor"
1681 1681 self."packaging"
1682 1682 ];
1683 1683 src = fetchurl {
1684 1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1685 1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1686 1686 };
1687 1687 meta = {
1688 1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1689 1689 };
1690 1690 };
1691 1691 "pytest-timeout" = super.buildPythonPackage {
1692 1692 name = "pytest-timeout-1.3.3";
1693 1693 doCheck = false;
1694 1694 propagatedBuildInputs = [
1695 1695 self."pytest"
1696 1696 ];
1697 1697 src = fetchurl {
1698 1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1699 1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1700 1700 };
1701 1701 meta = {
1702 1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1703 1703 };
1704 1704 };
1705 1705 "python-dateutil" = super.buildPythonPackage {
1706 1706 name = "python-dateutil-2.8.1";
1707 1707 doCheck = false;
1708 1708 propagatedBuildInputs = [
1709 1709 self."six"
1710 1710 ];
1711 1711 src = fetchurl {
1712 1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1713 1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1714 1714 };
1715 1715 meta = {
1716 1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1717 1717 };
1718 1718 };
1719 1719 "python-editor" = super.buildPythonPackage {
1720 1720 name = "python-editor-1.0.4";
1721 1721 doCheck = false;
1722 1722 src = fetchurl {
1723 1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1724 1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1725 1725 };
1726 1726 meta = {
1727 1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1728 1728 };
1729 1729 };
1730 1730 "python-ldap" = super.buildPythonPackage {
1731 1731 name = "python-ldap-3.2.0";
1732 1732 doCheck = false;
1733 1733 propagatedBuildInputs = [
1734 1734 self."pyasn1"
1735 1735 self."pyasn1-modules"
1736 1736 ];
1737 1737 src = fetchurl {
1738 1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1739 1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1740 1740 };
1741 1741 meta = {
1742 1742 license = [ pkgs.lib.licenses.psfl ];
1743 1743 };
1744 1744 };
1745 1745 "python-memcached" = super.buildPythonPackage {
1746 1746 name = "python-memcached-1.59";
1747 1747 doCheck = false;
1748 1748 propagatedBuildInputs = [
1749 1749 self."six"
1750 1750 ];
1751 1751 src = fetchurl {
1752 1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1753 1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1754 1754 };
1755 1755 meta = {
1756 1756 license = [ pkgs.lib.licenses.psfl ];
1757 1757 };
1758 1758 };
1759 1759 "python-pam" = super.buildPythonPackage {
1760 1760 name = "python-pam-1.8.4";
1761 1761 doCheck = false;
1762 1762 src = fetchurl {
1763 1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1764 1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1765 1765 };
1766 1766 meta = {
1767 1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1768 1768 };
1769 1769 };
1770 1770 "python-saml" = super.buildPythonPackage {
1771 1771 name = "python-saml-2.4.2";
1772 1772 doCheck = false;
1773 1773 propagatedBuildInputs = [
1774 1774 self."dm.xmlsec.binding"
1775 1775 self."isodate"
1776 1776 self."defusedxml"
1777 1777 ];
1778 1778 src = fetchurl {
1779 1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1780 1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1781 1781 };
1782 1782 meta = {
1783 1783 license = [ pkgs.lib.licenses.mit ];
1784 1784 };
1785 1785 };
1786 1786 "pytz" = super.buildPythonPackage {
1787 1787 name = "pytz-2019.3";
1788 1788 doCheck = false;
1789 1789 src = fetchurl {
1790 1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1791 1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1792 1792 };
1793 1793 meta = {
1794 1794 license = [ pkgs.lib.licenses.mit ];
1795 1795 };
1796 1796 };
1797 1797 "pyzmq" = super.buildPythonPackage {
1798 1798 name = "pyzmq-14.6.0";
1799 1799 doCheck = false;
1800 1800 src = fetchurl {
1801 1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1802 1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1803 1803 };
1804 1804 meta = {
1805 1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1806 1806 };
1807 1807 };
1808 1808 "PyYAML" = super.buildPythonPackage {
1809 1809 name = "PyYAML-5.3.1";
1810 1810 doCheck = false;
1811 1811 src = fetchurl {
1812 1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1813 1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1814 1814 };
1815 1815 meta = {
1816 1816 license = [ pkgs.lib.licenses.mit ];
1817 1817 };
1818 1818 };
1819 1819 "regex" = super.buildPythonPackage {
1820 1820 name = "regex-2020.9.27";
1821 1821 doCheck = false;
1822 1822 src = fetchurl {
1823 1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1824 1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1825 1825 };
1826 1826 meta = {
1827 1827 license = [ pkgs.lib.licenses.psfl ];
1828 1828 };
1829 1829 };
1830 1830 "redis" = super.buildPythonPackage {
1831 1831 name = "redis-3.5.3";
1832 1832 doCheck = false;
1833 1833 src = fetchurl {
1834 1834 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
1835 1835 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
1836 1836 };
1837 1837 meta = {
1838 1838 license = [ pkgs.lib.licenses.mit ];
1839 1839 };
1840 1840 };
1841 1841 "repoze.lru" = super.buildPythonPackage {
1842 1842 name = "repoze.lru-0.7";
1843 1843 doCheck = false;
1844 1844 src = fetchurl {
1845 1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1846 1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1847 1847 };
1848 1848 meta = {
1849 1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1850 1850 };
1851 1851 };
1852 1852 "repoze.sendmail" = super.buildPythonPackage {
1853 1853 name = "repoze.sendmail-4.4.1";
1854 1854 doCheck = false;
1855 1855 propagatedBuildInputs = [
1856 1856 self."setuptools"
1857 1857 self."zope.interface"
1858 1858 self."transaction"
1859 1859 ];
1860 1860 src = fetchurl {
1861 1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1862 1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1863 1863 };
1864 1864 meta = {
1865 1865 license = [ pkgs.lib.licenses.zpl21 ];
1866 1866 };
1867 1867 };
1868 1868 "requests" = super.buildPythonPackage {
1869 1869 name = "requests-2.22.0";
1870 1870 doCheck = false;
1871 1871 propagatedBuildInputs = [
1872 1872 self."chardet"
1873 1873 self."idna"
1874 1874 self."urllib3"
1875 1875 self."certifi"
1876 1876 ];
1877 1877 src = fetchurl {
1878 1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1879 1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1880 1880 };
1881 1881 meta = {
1882 1882 license = [ pkgs.lib.licenses.asl20 ];
1883 1883 };
1884 1884 };
1885 1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1886 name = "rhodecode-enterprise-ce-4.25.0";
1886 name = "rhodecode-enterprise-ce-4.25.2";
1887 1887 buildInputs = [
1888 1888 self."pytest"
1889 1889 self."py"
1890 1890 self."pytest-cov"
1891 1891 self."pytest-sugar"
1892 1892 self."pytest-runner"
1893 1893 self."pytest-profiling"
1894 1894 self."pytest-timeout"
1895 1895 self."gprof2dot"
1896 1896 self."mock"
1897 1897 self."cov-core"
1898 1898 self."coverage"
1899 1899 self."webtest"
1900 1900 self."beautifulsoup4"
1901 1901 self."configobj"
1902 1902 ];
1903 1903 doCheck = true;
1904 1904 propagatedBuildInputs = [
1905 1905 self."amqp"
1906 1906 self."babel"
1907 1907 self."beaker"
1908 1908 self."bleach"
1909 1909 self."celery"
1910 1910 self."channelstream"
1911 1911 self."click"
1912 1912 self."colander"
1913 1913 self."configobj"
1914 1914 self."cssselect"
1915 1915 self."cryptography"
1916 1916 self."decorator"
1917 1917 self."deform"
1918 1918 self."docutils"
1919 1919 self."dogpile.cache"
1920 1920 self."dogpile.core"
1921 1921 self."formencode"
1922 1922 self."future"
1923 1923 self."futures"
1924 1924 self."infrae.cache"
1925 1925 self."iso8601"
1926 1926 self."itsdangerous"
1927 1927 self."kombu"
1928 1928 self."lxml"
1929 1929 self."mako"
1930 1930 self."markdown"
1931 1931 self."markupsafe"
1932 1932 self."msgpack-python"
1933 1933 self."pyotp"
1934 1934 self."packaging"
1935 1935 self."pathlib2"
1936 1936 self."paste"
1937 1937 self."pastedeploy"
1938 1938 self."pastescript"
1939 1939 self."peppercorn"
1940 1940 self."premailer"
1941 1941 self."psutil"
1942 1942 self."py-bcrypt"
1943 1943 self."pycurl"
1944 1944 self."pycrypto"
1945 1945 self."pygments"
1946 1946 self."pyparsing"
1947 1947 self."pyramid-debugtoolbar"
1948 1948 self."pyramid-mako"
1949 1949 self."pyramid"
1950 1950 self."pyramid-mailer"
1951 1951 self."python-dateutil"
1952 1952 self."python-ldap"
1953 1953 self."python-memcached"
1954 1954 self."python-pam"
1955 1955 self."python-saml"
1956 1956 self."pytz"
1957 1957 self."tzlocal"
1958 1958 self."pyzmq"
1959 1959 self."py-gfm"
1960 1960 self."regex"
1961 1961 self."redis"
1962 1962 self."repoze.lru"
1963 1963 self."requests"
1964 1964 self."routes"
1965 1965 self."simplejson"
1966 1966 self."six"
1967 1967 self."sqlalchemy"
1968 1968 self."sshpubkeys"
1969 1969 self."subprocess32"
1970 1970 self."supervisor"
1971 1971 self."translationstring"
1972 1972 self."urllib3"
1973 1973 self."urlobject"
1974 1974 self."venusian"
1975 1975 self."weberror"
1976 1976 self."webhelpers2"
1977 1977 self."webob"
1978 1978 self."whoosh"
1979 1979 self."wsgiref"
1980 1980 self."zope.cachedescriptors"
1981 1981 self."zope.deprecation"
1982 1982 self."zope.event"
1983 1983 self."zope.interface"
1984 1984 self."mysql-python"
1985 1985 self."pymysql"
1986 1986 self."pysqlite"
1987 1987 self."psycopg2"
1988 1988 self."nbconvert"
1989 1989 self."nbformat"
1990 1990 self."jupyter-client"
1991 1991 self."jupyter-core"
1992 1992 self."alembic"
1993 1993 self."invoke"
1994 1994 self."bumpversion"
1995 1995 self."gevent"
1996 1996 self."greenlet"
1997 1997 self."gunicorn"
1998 1998 self."waitress"
1999 1999 self."ipdb"
2000 2000 self."ipython"
2001 2001 self."rhodecode-tools"
2002 2002 self."appenlight-client"
2003 2003 self."pytest"
2004 2004 self."py"
2005 2005 self."pytest-cov"
2006 2006 self."pytest-sugar"
2007 2007 self."pytest-runner"
2008 2008 self."pytest-profiling"
2009 2009 self."pytest-timeout"
2010 2010 self."gprof2dot"
2011 2011 self."mock"
2012 2012 self."cov-core"
2013 2013 self."coverage"
2014 2014 self."webtest"
2015 2015 self."beautifulsoup4"
2016 2016 ];
2017 2017 src = ./.;
2018 2018 meta = {
2019 2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2020 2020 };
2021 2021 };
2022 2022 "rhodecode-tools" = super.buildPythonPackage {
2023 2023 name = "rhodecode-tools-1.4.0";
2024 2024 doCheck = false;
2025 2025 propagatedBuildInputs = [
2026 2026 self."click"
2027 2027 self."future"
2028 2028 self."six"
2029 2029 self."mako"
2030 2030 self."markupsafe"
2031 2031 self."requests"
2032 2032 self."urllib3"
2033 2033 self."whoosh"
2034 2034 self."elasticsearch"
2035 2035 self."elasticsearch-dsl"
2036 2036 self."elasticsearch2"
2037 2037 self."elasticsearch1-dsl"
2038 2038 ];
2039 2039 src = fetchurl {
2040 2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2041 2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2042 2042 };
2043 2043 meta = {
2044 2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2045 2045 };
2046 2046 };
2047 2047 "routes" = super.buildPythonPackage {
2048 2048 name = "routes-2.4.1";
2049 2049 doCheck = false;
2050 2050 propagatedBuildInputs = [
2051 2051 self."six"
2052 2052 self."repoze.lru"
2053 2053 ];
2054 2054 src = fetchurl {
2055 2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2056 2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2057 2057 };
2058 2058 meta = {
2059 2059 license = [ pkgs.lib.licenses.mit ];
2060 2060 };
2061 2061 };
2062 2062 "scandir" = super.buildPythonPackage {
2063 2063 name = "scandir-1.10.0";
2064 2064 doCheck = false;
2065 2065 src = fetchurl {
2066 2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2067 2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2068 2068 };
2069 2069 meta = {
2070 2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2071 2071 };
2072 2072 };
2073 2073 "setproctitle" = super.buildPythonPackage {
2074 2074 name = "setproctitle-1.1.10";
2075 2075 doCheck = false;
2076 2076 src = fetchurl {
2077 2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2078 2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2079 2079 };
2080 2080 meta = {
2081 2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2082 2082 };
2083 2083 };
2084 2084 "setuptools" = super.buildPythonPackage {
2085 2085 name = "setuptools-44.1.0";
2086 2086 doCheck = false;
2087 2087 src = fetchurl {
2088 2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2089 2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2090 2090 };
2091 2091 meta = {
2092 2092 license = [ pkgs.lib.licenses.mit ];
2093 2093 };
2094 2094 };
2095 2095 "setuptools-scm" = super.buildPythonPackage {
2096 2096 name = "setuptools-scm-3.5.0";
2097 2097 doCheck = false;
2098 2098 src = fetchurl {
2099 2099 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
2100 2100 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
2101 2101 };
2102 2102 meta = {
2103 2103 license = [ pkgs.lib.licenses.psfl ];
2104 2104 };
2105 2105 };
2106 2106 "simplegeneric" = super.buildPythonPackage {
2107 2107 name = "simplegeneric-0.8.1";
2108 2108 doCheck = false;
2109 2109 src = fetchurl {
2110 2110 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2111 2111 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2112 2112 };
2113 2113 meta = {
2114 2114 license = [ pkgs.lib.licenses.zpl21 ];
2115 2115 };
2116 2116 };
2117 2117 "simplejson" = super.buildPythonPackage {
2118 2118 name = "simplejson-3.16.0";
2119 2119 doCheck = false;
2120 2120 src = fetchurl {
2121 2121 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2122 2122 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2123 2123 };
2124 2124 meta = {
2125 2125 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2126 2126 };
2127 2127 };
2128 2128 "six" = super.buildPythonPackage {
2129 2129 name = "six-1.11.0";
2130 2130 doCheck = false;
2131 2131 src = fetchurl {
2132 2132 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2133 2133 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2134 2134 };
2135 2135 meta = {
2136 2136 license = [ pkgs.lib.licenses.mit ];
2137 2137 };
2138 2138 };
2139 2139 "sqlalchemy" = super.buildPythonPackage {
2140 2140 name = "sqlalchemy-1.3.15";
2141 2141 doCheck = false;
2142 2142 src = fetchurl {
2143 2143 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2144 2144 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2145 2145 };
2146 2146 meta = {
2147 2147 license = [ pkgs.lib.licenses.mit ];
2148 2148 };
2149 2149 };
2150 2150 "sshpubkeys" = super.buildPythonPackage {
2151 2151 name = "sshpubkeys-3.1.0";
2152 2152 doCheck = false;
2153 2153 propagatedBuildInputs = [
2154 2154 self."cryptography"
2155 2155 self."ecdsa"
2156 2156 ];
2157 2157 src = fetchurl {
2158 2158 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2159 2159 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2160 2160 };
2161 2161 meta = {
2162 2162 license = [ pkgs.lib.licenses.bsdOriginal ];
2163 2163 };
2164 2164 };
2165 2165 "subprocess32" = super.buildPythonPackage {
2166 2166 name = "subprocess32-3.5.4";
2167 2167 doCheck = false;
2168 2168 src = fetchurl {
2169 2169 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2170 2170 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2171 2171 };
2172 2172 meta = {
2173 2173 license = [ pkgs.lib.licenses.psfl ];
2174 2174 };
2175 2175 };
2176 2176 "supervisor" = super.buildPythonPackage {
2177 2177 name = "supervisor-4.1.0";
2178 2178 doCheck = false;
2179 2179 src = fetchurl {
2180 2180 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2181 2181 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2182 2182 };
2183 2183 meta = {
2184 2184 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2185 2185 };
2186 2186 };
2187 2187 "tempita" = super.buildPythonPackage {
2188 2188 name = "tempita-0.5.2";
2189 2189 doCheck = false;
2190 2190 src = fetchurl {
2191 2191 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2192 2192 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2193 2193 };
2194 2194 meta = {
2195 2195 license = [ pkgs.lib.licenses.mit ];
2196 2196 };
2197 2197 };
2198 2198 "termcolor" = super.buildPythonPackage {
2199 2199 name = "termcolor-1.1.0";
2200 2200 doCheck = false;
2201 2201 src = fetchurl {
2202 2202 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2203 2203 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2204 2204 };
2205 2205 meta = {
2206 2206 license = [ pkgs.lib.licenses.mit ];
2207 2207 };
2208 2208 };
2209 2209 "testpath" = super.buildPythonPackage {
2210 2210 name = "testpath-0.4.4";
2211 2211 doCheck = false;
2212 2212 src = fetchurl {
2213 2213 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2214 2214 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2215 2215 };
2216 2216 meta = {
2217 2217 license = [ ];
2218 2218 };
2219 2219 };
2220 2220 "traitlets" = super.buildPythonPackage {
2221 2221 name = "traitlets-4.3.3";
2222 2222 doCheck = false;
2223 2223 propagatedBuildInputs = [
2224 2224 self."ipython-genutils"
2225 2225 self."six"
2226 2226 self."decorator"
2227 2227 self."enum34"
2228 2228 ];
2229 2229 src = fetchurl {
2230 2230 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2231 2231 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2232 2232 };
2233 2233 meta = {
2234 2234 license = [ pkgs.lib.licenses.bsdOriginal ];
2235 2235 };
2236 2236 };
2237 2237 "transaction" = super.buildPythonPackage {
2238 2238 name = "transaction-2.4.0";
2239 2239 doCheck = false;
2240 2240 propagatedBuildInputs = [
2241 2241 self."zope.interface"
2242 2242 ];
2243 2243 src = fetchurl {
2244 2244 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2245 2245 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2246 2246 };
2247 2247 meta = {
2248 2248 license = [ pkgs.lib.licenses.zpl21 ];
2249 2249 };
2250 2250 };
2251 2251 "translationstring" = super.buildPythonPackage {
2252 2252 name = "translationstring-1.3";
2253 2253 doCheck = false;
2254 2254 src = fetchurl {
2255 2255 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2256 2256 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2257 2257 };
2258 2258 meta = {
2259 2259 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2260 2260 };
2261 2261 };
2262 2262 "tzlocal" = super.buildPythonPackage {
2263 2263 name = "tzlocal-1.5.1";
2264 2264 doCheck = false;
2265 2265 propagatedBuildInputs = [
2266 2266 self."pytz"
2267 2267 ];
2268 2268 src = fetchurl {
2269 2269 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2270 2270 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2271 2271 };
2272 2272 meta = {
2273 2273 license = [ pkgs.lib.licenses.mit ];
2274 2274 };
2275 2275 };
2276 2276 "urllib3" = super.buildPythonPackage {
2277 2277 name = "urllib3-1.25.2";
2278 2278 doCheck = false;
2279 2279 src = fetchurl {
2280 2280 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2281 2281 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2282 2282 };
2283 2283 meta = {
2284 2284 license = [ pkgs.lib.licenses.mit ];
2285 2285 };
2286 2286 };
2287 2287 "urlobject" = super.buildPythonPackage {
2288 2288 name = "urlobject-2.4.3";
2289 2289 doCheck = false;
2290 2290 src = fetchurl {
2291 2291 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2292 2292 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2293 2293 };
2294 2294 meta = {
2295 2295 license = [ pkgs.lib.licenses.publicDomain ];
2296 2296 };
2297 2297 };
2298 2298 "venusian" = super.buildPythonPackage {
2299 2299 name = "venusian-1.2.0";
2300 2300 doCheck = false;
2301 2301 src = fetchurl {
2302 2302 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2303 2303 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2304 2304 };
2305 2305 meta = {
2306 2306 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2307 2307 };
2308 2308 };
2309 2309 "vine" = super.buildPythonPackage {
2310 2310 name = "vine-1.3.0";
2311 2311 doCheck = false;
2312 2312 src = fetchurl {
2313 2313 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2314 2314 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2315 2315 };
2316 2316 meta = {
2317 2317 license = [ pkgs.lib.licenses.bsdOriginal ];
2318 2318 };
2319 2319 };
2320 2320 "waitress" = super.buildPythonPackage {
2321 2321 name = "waitress-1.3.1";
2322 2322 doCheck = false;
2323 2323 src = fetchurl {
2324 2324 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2325 2325 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2326 2326 };
2327 2327 meta = {
2328 2328 license = [ pkgs.lib.licenses.zpl21 ];
2329 2329 };
2330 2330 };
2331 2331 "wcwidth" = super.buildPythonPackage {
2332 2332 name = "wcwidth-0.1.9";
2333 2333 doCheck = false;
2334 2334 src = fetchurl {
2335 2335 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2336 2336 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2337 2337 };
2338 2338 meta = {
2339 2339 license = [ pkgs.lib.licenses.mit ];
2340 2340 };
2341 2341 };
2342 2342 "webencodings" = super.buildPythonPackage {
2343 2343 name = "webencodings-0.5.1";
2344 2344 doCheck = false;
2345 2345 src = fetchurl {
2346 2346 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2347 2347 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2348 2348 };
2349 2349 meta = {
2350 2350 license = [ pkgs.lib.licenses.bsdOriginal ];
2351 2351 };
2352 2352 };
2353 2353 "weberror" = super.buildPythonPackage {
2354 2354 name = "weberror-0.13.1";
2355 2355 doCheck = false;
2356 2356 propagatedBuildInputs = [
2357 2357 self."webob"
2358 2358 self."tempita"
2359 2359 self."pygments"
2360 2360 self."paste"
2361 2361 ];
2362 2362 src = fetchurl {
2363 2363 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2364 2364 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2365 2365 };
2366 2366 meta = {
2367 2367 license = [ pkgs.lib.licenses.mit ];
2368 2368 };
2369 2369 };
2370 2370 "webhelpers2" = super.buildPythonPackage {
2371 2371 name = "webhelpers2-2.0";
2372 2372 doCheck = false;
2373 2373 propagatedBuildInputs = [
2374 2374 self."markupsafe"
2375 2375 self."six"
2376 2376 ];
2377 2377 src = fetchurl {
2378 2378 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2379 2379 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2380 2380 };
2381 2381 meta = {
2382 2382 license = [ pkgs.lib.licenses.mit ];
2383 2383 };
2384 2384 };
2385 2385 "webob" = super.buildPythonPackage {
2386 2386 name = "webob-1.8.5";
2387 2387 doCheck = false;
2388 2388 src = fetchurl {
2389 2389 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2390 2390 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2391 2391 };
2392 2392 meta = {
2393 2393 license = [ pkgs.lib.licenses.mit ];
2394 2394 };
2395 2395 };
2396 2396 "webtest" = super.buildPythonPackage {
2397 2397 name = "webtest-2.0.34";
2398 2398 doCheck = false;
2399 2399 propagatedBuildInputs = [
2400 2400 self."six"
2401 2401 self."webob"
2402 2402 self."waitress"
2403 2403 self."beautifulsoup4"
2404 2404 ];
2405 2405 src = fetchurl {
2406 2406 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2407 2407 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2408 2408 };
2409 2409 meta = {
2410 2410 license = [ pkgs.lib.licenses.mit ];
2411 2411 };
2412 2412 };
2413 2413 "whoosh" = super.buildPythonPackage {
2414 2414 name = "whoosh-2.7.4";
2415 2415 doCheck = false;
2416 2416 src = fetchurl {
2417 2417 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2418 2418 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2419 2419 };
2420 2420 meta = {
2421 2421 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2422 2422 };
2423 2423 };
2424 2424 "ws4py" = super.buildPythonPackage {
2425 2425 name = "ws4py-0.5.1";
2426 2426 doCheck = false;
2427 2427 src = fetchurl {
2428 2428 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2429 2429 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2430 2430 };
2431 2431 meta = {
2432 2432 license = [ pkgs.lib.licenses.bsdOriginal ];
2433 2433 };
2434 2434 };
2435 2435 "wsgiref" = super.buildPythonPackage {
2436 2436 name = "wsgiref-0.1.2";
2437 2437 doCheck = false;
2438 2438 src = fetchurl {
2439 2439 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2440 2440 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2441 2441 };
2442 2442 meta = {
2443 2443 license = [ { fullName = "PSF or ZPL"; } ];
2444 2444 };
2445 2445 };
2446 2446 "zipp" = super.buildPythonPackage {
2447 2447 name = "zipp-1.2.0";
2448 2448 doCheck = false;
2449 2449 propagatedBuildInputs = [
2450 2450 self."contextlib2"
2451 2451 ];
2452 2452 src = fetchurl {
2453 2453 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2454 2454 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2455 2455 };
2456 2456 meta = {
2457 2457 license = [ pkgs.lib.licenses.mit ];
2458 2458 };
2459 2459 };
2460 2460 "zope.cachedescriptors" = super.buildPythonPackage {
2461 2461 name = "zope.cachedescriptors-4.3.1";
2462 2462 doCheck = false;
2463 2463 propagatedBuildInputs = [
2464 2464 self."setuptools"
2465 2465 ];
2466 2466 src = fetchurl {
2467 2467 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2468 2468 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2469 2469 };
2470 2470 meta = {
2471 2471 license = [ pkgs.lib.licenses.zpl21 ];
2472 2472 };
2473 2473 };
2474 2474 "zope.deprecation" = super.buildPythonPackage {
2475 2475 name = "zope.deprecation-4.4.0";
2476 2476 doCheck = false;
2477 2477 propagatedBuildInputs = [
2478 2478 self."setuptools"
2479 2479 ];
2480 2480 src = fetchurl {
2481 2481 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2482 2482 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2483 2483 };
2484 2484 meta = {
2485 2485 license = [ pkgs.lib.licenses.zpl21 ];
2486 2486 };
2487 2487 };
2488 2488 "zope.event" = super.buildPythonPackage {
2489 2489 name = "zope.event-4.4";
2490 2490 doCheck = false;
2491 2491 propagatedBuildInputs = [
2492 2492 self."setuptools"
2493 2493 ];
2494 2494 src = fetchurl {
2495 2495 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2496 2496 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2497 2497 };
2498 2498 meta = {
2499 2499 license = [ pkgs.lib.licenses.zpl21 ];
2500 2500 };
2501 2501 };
2502 2502 "zope.interface" = super.buildPythonPackage {
2503 2503 name = "zope.interface-4.6.0";
2504 2504 doCheck = false;
2505 2505 propagatedBuildInputs = [
2506 2506 self."setuptools"
2507 2507 ];
2508 2508 src = fetchurl {
2509 2509 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2510 2510 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2511 2511 };
2512 2512 meta = {
2513 2513 license = [ pkgs.lib.licenses.zpl21 ];
2514 2514 };
2515 2515 };
2516 2516
2517 2517 ### Test requirements
2518 2518
2519 2519
2520 2520 }
@@ -1,1 +1,1 b''
1 4.25.0 No newline at end of file
1 4.25.2 No newline at end of file
@@ -1,1055 +1,1084 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 from rhodecode.apps._base import ADMIN_PREFIX
23 23
24 24
25 25 def admin_routes(config):
26 26 """
27 27 Admin prefixed routes
28 28 """
29 29 from rhodecode.apps.admin.views.audit_logs import AdminAuditLogsView
30 from rhodecode.apps.admin.views.artifacts import AdminArtifactsView
30 31 from rhodecode.apps.admin.views.defaults import AdminDefaultSettingsView
31 32 from rhodecode.apps.admin.views.exception_tracker import ExceptionsTrackerView
32 33 from rhodecode.apps.admin.views.main_views import AdminMainView
33 34 from rhodecode.apps.admin.views.open_source_licenses import OpenSourceLicensesAdminSettingsView
34 35 from rhodecode.apps.admin.views.permissions import AdminPermissionsView
35 36 from rhodecode.apps.admin.views.process_management import AdminProcessManagementView
36 37 from rhodecode.apps.admin.views.repo_groups import AdminRepoGroupsView
37 38 from rhodecode.apps.admin.views.repositories import AdminReposView
38 39 from rhodecode.apps.admin.views.sessions import AdminSessionSettingsView
39 40 from rhodecode.apps.admin.views.settings import AdminSettingsView
40 41 from rhodecode.apps.admin.views.svn_config import AdminSvnConfigView
41 42 from rhodecode.apps.admin.views.system_info import AdminSystemInfoSettingsView
42 43 from rhodecode.apps.admin.views.user_groups import AdminUserGroupsView
43 44 from rhodecode.apps.admin.views.users import AdminUsersView, UsersView
44 45
45 46 config.add_route(
46 47 name='admin_audit_logs',
47 48 pattern='/audit_logs')
48 49 config.add_view(
49 50 AdminAuditLogsView,
50 51 attr='admin_audit_logs',
51 52 route_name='admin_audit_logs', request_method='GET',
52 53 renderer='rhodecode:templates/admin/admin_audit_logs.mako')
53 54
54 55 config.add_route(
55 56 name='admin_audit_log_entry',
56 57 pattern='/audit_logs/{audit_log_id}')
57 58 config.add_view(
58 59 AdminAuditLogsView,
59 60 attr='admin_audit_log_entry',
60 61 route_name='admin_audit_log_entry', request_method='GET',
61 62 renderer='rhodecode:templates/admin/admin_audit_log_entry.mako')
62 63
64 # Artifacts EE feature
65 config.add_route(
66 'admin_artifacts',
67 pattern=ADMIN_PREFIX + '/artifacts')
68 config.add_route(
69 'admin_artifacts_show_all',
70 pattern=ADMIN_PREFIX + '/artifacts')
71 config.add_view(
72 AdminArtifactsView,
73 attr='artifacts',
74 route_name='admin_artifacts', request_method='GET',
75 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
76 config.add_view(
77 AdminArtifactsView,
78 attr='artifacts',
79 route_name='admin_artifacts_show_all', request_method='GET',
80 renderer='rhodecode:templates/admin/artifacts/artifacts.mako')
81 # EE views
82 config.add_route(
83 name='admin_artifacts_show_info',
84 pattern=ADMIN_PREFIX + '/artifacts/{uid}')
85 config.add_route(
86 name='admin_artifacts_delete',
87 pattern=ADMIN_PREFIX + '/artifacts/{uid}/delete')
88 config.add_route(
89 name='admin_artifacts_update',
90 pattern=ADMIN_PREFIX + '/artifacts/{uid}/update')
91
63 92 config.add_route(
64 93 name='admin_settings_open_source',
65 94 pattern='/settings/open_source')
66 95 config.add_view(
67 96 OpenSourceLicensesAdminSettingsView,
68 97 attr='open_source_licenses',
69 98 route_name='admin_settings_open_source', request_method='GET',
70 99 renderer='rhodecode:templates/admin/settings/settings.mako')
71 100
72 101 config.add_route(
73 102 name='admin_settings_vcs_svn_generate_cfg',
74 103 pattern='/settings/vcs/svn_generate_cfg')
75 104 config.add_view(
76 105 AdminSvnConfigView,
77 106 attr='vcs_svn_generate_config',
78 107 route_name='admin_settings_vcs_svn_generate_cfg',
79 108 request_method='POST', renderer='json')
80 109
81 110 config.add_route(
82 111 name='admin_settings_system',
83 112 pattern='/settings/system')
84 113 config.add_view(
85 114 AdminSystemInfoSettingsView,
86 115 attr='settings_system_info',
87 116 route_name='admin_settings_system', request_method='GET',
88 117 renderer='rhodecode:templates/admin/settings/settings.mako')
89 118
90 119 config.add_route(
91 120 name='admin_settings_system_update',
92 121 pattern='/settings/system/updates')
93 122 config.add_view(
94 123 AdminSystemInfoSettingsView,
95 124 attr='settings_system_info_check_update',
96 125 route_name='admin_settings_system_update', request_method='GET',
97 126 renderer='rhodecode:templates/admin/settings/settings_system_update.mako')
98 127
99 128 config.add_route(
100 129 name='admin_settings_exception_tracker',
101 130 pattern='/settings/exceptions')
102 131 config.add_view(
103 132 ExceptionsTrackerView,
104 133 attr='browse_exceptions',
105 134 route_name='admin_settings_exception_tracker', request_method='GET',
106 135 renderer='rhodecode:templates/admin/settings/settings.mako')
107 136
108 137 config.add_route(
109 138 name='admin_settings_exception_tracker_delete_all',
110 139 pattern='/settings/exceptions_delete_all')
111 140 config.add_view(
112 141 ExceptionsTrackerView,
113 142 attr='exception_delete_all',
114 143 route_name='admin_settings_exception_tracker_delete_all', request_method='POST',
115 144 renderer='rhodecode:templates/admin/settings/settings.mako')
116 145
117 146 config.add_route(
118 147 name='admin_settings_exception_tracker_show',
119 148 pattern='/settings/exceptions/{exception_id}')
120 149 config.add_view(
121 150 ExceptionsTrackerView,
122 151 attr='exception_show',
123 152 route_name='admin_settings_exception_tracker_show', request_method='GET',
124 153 renderer='rhodecode:templates/admin/settings/settings.mako')
125 154
126 155 config.add_route(
127 156 name='admin_settings_exception_tracker_delete',
128 157 pattern='/settings/exceptions/{exception_id}/delete')
129 158 config.add_view(
130 159 ExceptionsTrackerView,
131 160 attr='exception_delete',
132 161 route_name='admin_settings_exception_tracker_delete', request_method='POST',
133 162 renderer='rhodecode:templates/admin/settings/settings.mako')
134 163
135 164 config.add_route(
136 165 name='admin_settings_sessions',
137 166 pattern='/settings/sessions')
138 167 config.add_view(
139 168 AdminSessionSettingsView,
140 169 attr='settings_sessions',
141 170 route_name='admin_settings_sessions', request_method='GET',
142 171 renderer='rhodecode:templates/admin/settings/settings.mako')
143 172
144 173 config.add_route(
145 174 name='admin_settings_sessions_cleanup',
146 175 pattern='/settings/sessions/cleanup')
147 176 config.add_view(
148 177 AdminSessionSettingsView,
149 178 attr='settings_sessions_cleanup',
150 179 route_name='admin_settings_sessions_cleanup', request_method='POST')
151 180
152 181 config.add_route(
153 182 name='admin_settings_process_management',
154 183 pattern='/settings/process_management')
155 184 config.add_view(
156 185 AdminProcessManagementView,
157 186 attr='process_management',
158 187 route_name='admin_settings_process_management', request_method='GET',
159 188 renderer='rhodecode:templates/admin/settings/settings.mako')
160 189
161 190 config.add_route(
162 191 name='admin_settings_process_management_data',
163 192 pattern='/settings/process_management/data')
164 193 config.add_view(
165 194 AdminProcessManagementView,
166 195 attr='process_management_data',
167 196 route_name='admin_settings_process_management_data', request_method='GET',
168 197 renderer='rhodecode:templates/admin/settings/settings_process_management_data.mako')
169 198
170 199 config.add_route(
171 200 name='admin_settings_process_management_signal',
172 201 pattern='/settings/process_management/signal')
173 202 config.add_view(
174 203 AdminProcessManagementView,
175 204 attr='process_management_signal',
176 205 route_name='admin_settings_process_management_signal',
177 206 request_method='POST', renderer='json_ext')
178 207
179 208 config.add_route(
180 209 name='admin_settings_process_management_master_signal',
181 210 pattern='/settings/process_management/master_signal')
182 211 config.add_view(
183 212 AdminProcessManagementView,
184 213 attr='process_management_master_signal',
185 214 route_name='admin_settings_process_management_master_signal',
186 215 request_method='POST', renderer='json_ext')
187 216
188 217 # default settings
189 218 config.add_route(
190 219 name='admin_defaults_repositories',
191 220 pattern='/defaults/repositories')
192 221 config.add_view(
193 222 AdminDefaultSettingsView,
194 223 attr='defaults_repository_show',
195 224 route_name='admin_defaults_repositories', request_method='GET',
196 225 renderer='rhodecode:templates/admin/defaults/defaults.mako')
197 226
198 227 config.add_route(
199 228 name='admin_defaults_repositories_update',
200 229 pattern='/defaults/repositories/update')
201 230 config.add_view(
202 231 AdminDefaultSettingsView,
203 232 attr='defaults_repository_update',
204 233 route_name='admin_defaults_repositories_update', request_method='POST',
205 234 renderer='rhodecode:templates/admin/defaults/defaults.mako')
206 235
207 236 # admin settings
208 237
209 238 config.add_route(
210 239 name='admin_settings',
211 240 pattern='/settings')
212 241 config.add_view(
213 242 AdminSettingsView,
214 243 attr='settings_global',
215 244 route_name='admin_settings', request_method='GET',
216 245 renderer='rhodecode:templates/admin/settings/settings.mako')
217 246
218 247 config.add_route(
219 248 name='admin_settings_update',
220 249 pattern='/settings/update')
221 250 config.add_view(
222 251 AdminSettingsView,
223 252 attr='settings_global_update',
224 253 route_name='admin_settings_update', request_method='POST',
225 254 renderer='rhodecode:templates/admin/settings/settings.mako')
226 255
227 256 config.add_route(
228 257 name='admin_settings_global',
229 258 pattern='/settings/global')
230 259 config.add_view(
231 260 AdminSettingsView,
232 261 attr='settings_global',
233 262 route_name='admin_settings_global', request_method='GET',
234 263 renderer='rhodecode:templates/admin/settings/settings.mako')
235 264
236 265 config.add_route(
237 266 name='admin_settings_global_update',
238 267 pattern='/settings/global/update')
239 268 config.add_view(
240 269 AdminSettingsView,
241 270 attr='settings_global_update',
242 271 route_name='admin_settings_global_update', request_method='POST',
243 272 renderer='rhodecode:templates/admin/settings/settings.mako')
244 273
245 274 config.add_route(
246 275 name='admin_settings_vcs',
247 276 pattern='/settings/vcs')
248 277 config.add_view(
249 278 AdminSettingsView,
250 279 attr='settings_vcs',
251 280 route_name='admin_settings_vcs', request_method='GET',
252 281 renderer='rhodecode:templates/admin/settings/settings.mako')
253 282
254 283 config.add_route(
255 284 name='admin_settings_vcs_update',
256 285 pattern='/settings/vcs/update')
257 286 config.add_view(
258 287 AdminSettingsView,
259 288 attr='settings_vcs_update',
260 289 route_name='admin_settings_vcs_update', request_method='POST',
261 290 renderer='rhodecode:templates/admin/settings/settings.mako')
262 291
263 292 config.add_route(
264 293 name='admin_settings_vcs_svn_pattern_delete',
265 294 pattern='/settings/vcs/svn_pattern_delete')
266 295 config.add_view(
267 296 AdminSettingsView,
268 297 attr='settings_vcs_delete_svn_pattern',
269 298 route_name='admin_settings_vcs_svn_pattern_delete', request_method='POST',
270 299 renderer='json_ext', xhr=True)
271 300
272 301 config.add_route(
273 302 name='admin_settings_mapping',
274 303 pattern='/settings/mapping')
275 304 config.add_view(
276 305 AdminSettingsView,
277 306 attr='settings_mapping',
278 307 route_name='admin_settings_mapping', request_method='GET',
279 308 renderer='rhodecode:templates/admin/settings/settings.mako')
280 309
281 310 config.add_route(
282 311 name='admin_settings_mapping_update',
283 312 pattern='/settings/mapping/update')
284 313 config.add_view(
285 314 AdminSettingsView,
286 315 attr='settings_mapping_update',
287 316 route_name='admin_settings_mapping_update', request_method='POST',
288 317 renderer='rhodecode:templates/admin/settings/settings.mako')
289 318
290 319 config.add_route(
291 320 name='admin_settings_visual',
292 321 pattern='/settings/visual')
293 322 config.add_view(
294 323 AdminSettingsView,
295 324 attr='settings_visual',
296 325 route_name='admin_settings_visual', request_method='GET',
297 326 renderer='rhodecode:templates/admin/settings/settings.mako')
298 327
299 328 config.add_route(
300 329 name='admin_settings_visual_update',
301 330 pattern='/settings/visual/update')
302 331 config.add_view(
303 332 AdminSettingsView,
304 333 attr='settings_visual_update',
305 334 route_name='admin_settings_visual_update', request_method='POST',
306 335 renderer='rhodecode:templates/admin/settings/settings.mako')
307 336
308 337 config.add_route(
309 338 name='admin_settings_issuetracker',
310 339 pattern='/settings/issue-tracker')
311 340 config.add_view(
312 341 AdminSettingsView,
313 342 attr='settings_issuetracker',
314 343 route_name='admin_settings_issuetracker', request_method='GET',
315 344 renderer='rhodecode:templates/admin/settings/settings.mako')
316 345
317 346 config.add_route(
318 347 name='admin_settings_issuetracker_update',
319 348 pattern='/settings/issue-tracker/update')
320 349 config.add_view(
321 350 AdminSettingsView,
322 351 attr='settings_issuetracker_update',
323 352 route_name='admin_settings_issuetracker_update', request_method='POST',
324 353 renderer='rhodecode:templates/admin/settings/settings.mako')
325 354
326 355 config.add_route(
327 356 name='admin_settings_issuetracker_test',
328 357 pattern='/settings/issue-tracker/test')
329 358 config.add_view(
330 359 AdminSettingsView,
331 360 attr='settings_issuetracker_test',
332 361 route_name='admin_settings_issuetracker_test', request_method='POST',
333 362 renderer='string', xhr=True)
334 363
335 364 config.add_route(
336 365 name='admin_settings_issuetracker_delete',
337 366 pattern='/settings/issue-tracker/delete')
338 367 config.add_view(
339 368 AdminSettingsView,
340 369 attr='settings_issuetracker_delete',
341 370 route_name='admin_settings_issuetracker_delete', request_method='POST',
342 371 renderer='json_ext', xhr=True)
343 372
344 373 config.add_route(
345 374 name='admin_settings_email',
346 375 pattern='/settings/email')
347 376 config.add_view(
348 377 AdminSettingsView,
349 378 attr='settings_email',
350 379 route_name='admin_settings_email', request_method='GET',
351 380 renderer='rhodecode:templates/admin/settings/settings.mako')
352 381
353 382 config.add_route(
354 383 name='admin_settings_email_update',
355 384 pattern='/settings/email/update')
356 385 config.add_view(
357 386 AdminSettingsView,
358 387 attr='settings_email_update',
359 388 route_name='admin_settings_email_update', request_method='POST',
360 389 renderer='rhodecode:templates/admin/settings/settings.mako')
361 390
362 391 config.add_route(
363 392 name='admin_settings_hooks',
364 393 pattern='/settings/hooks')
365 394 config.add_view(
366 395 AdminSettingsView,
367 396 attr='settings_hooks',
368 397 route_name='admin_settings_hooks', request_method='GET',
369 398 renderer='rhodecode:templates/admin/settings/settings.mako')
370 399
371 400 config.add_route(
372 401 name='admin_settings_hooks_update',
373 402 pattern='/settings/hooks/update')
374 403 config.add_view(
375 404 AdminSettingsView,
376 405 attr='settings_hooks_update',
377 406 route_name='admin_settings_hooks_update', request_method='POST',
378 407 renderer='rhodecode:templates/admin/settings/settings.mako')
379 408
380 409 config.add_route(
381 410 name='admin_settings_hooks_delete',
382 411 pattern='/settings/hooks/delete')
383 412 config.add_view(
384 413 AdminSettingsView,
385 414 attr='settings_hooks_update',
386 415 route_name='admin_settings_hooks_delete', request_method='POST',
387 416 renderer='rhodecode:templates/admin/settings/settings.mako')
388 417
389 418 config.add_route(
390 419 name='admin_settings_search',
391 420 pattern='/settings/search')
392 421 config.add_view(
393 422 AdminSettingsView,
394 423 attr='settings_search',
395 424 route_name='admin_settings_search', request_method='GET',
396 425 renderer='rhodecode:templates/admin/settings/settings.mako')
397 426
398 427 config.add_route(
399 428 name='admin_settings_labs',
400 429 pattern='/settings/labs')
401 430 config.add_view(
402 431 AdminSettingsView,
403 432 attr='settings_labs',
404 433 route_name='admin_settings_labs', request_method='GET',
405 434 renderer='rhodecode:templates/admin/settings/settings.mako')
406 435
407 436 config.add_route(
408 437 name='admin_settings_labs_update',
409 438 pattern='/settings/labs/update')
410 439 config.add_view(
411 440 AdminSettingsView,
412 441 attr='settings_labs_update',
413 442 route_name='admin_settings_labs_update', request_method='POST',
414 443 renderer='rhodecode:templates/admin/settings/settings.mako')
415 444
416 445 # Automation EE feature
417 446 config.add_route(
418 447 'admin_settings_automation',
419 448 pattern=ADMIN_PREFIX + '/settings/automation')
420 449 config.add_view(
421 450 AdminSettingsView,
422 451 attr='settings_automation',
423 452 route_name='admin_settings_automation', request_method='GET',
424 453 renderer='rhodecode:templates/admin/settings/settings.mako')
425 454
426 455 # global permissions
427 456
428 457 config.add_route(
429 458 name='admin_permissions_application',
430 459 pattern='/permissions/application')
431 460 config.add_view(
432 461 AdminPermissionsView,
433 462 attr='permissions_application',
434 463 route_name='admin_permissions_application', request_method='GET',
435 464 renderer='rhodecode:templates/admin/permissions/permissions.mako')
436 465
437 466 config.add_route(
438 467 name='admin_permissions_application_update',
439 468 pattern='/permissions/application/update')
440 469 config.add_view(
441 470 AdminPermissionsView,
442 471 attr='permissions_application_update',
443 472 route_name='admin_permissions_application_update', request_method='POST',
444 473 renderer='rhodecode:templates/admin/permissions/permissions.mako')
445 474
446 475 config.add_route(
447 476 name='admin_permissions_global',
448 477 pattern='/permissions/global')
449 478 config.add_view(
450 479 AdminPermissionsView,
451 480 attr='permissions_global',
452 481 route_name='admin_permissions_global', request_method='GET',
453 482 renderer='rhodecode:templates/admin/permissions/permissions.mako')
454 483
455 484 config.add_route(
456 485 name='admin_permissions_global_update',
457 486 pattern='/permissions/global/update')
458 487 config.add_view(
459 488 AdminPermissionsView,
460 489 attr='permissions_global_update',
461 490 route_name='admin_permissions_global_update', request_method='POST',
462 491 renderer='rhodecode:templates/admin/permissions/permissions.mako')
463 492
464 493 config.add_route(
465 494 name='admin_permissions_object',
466 495 pattern='/permissions/object')
467 496 config.add_view(
468 497 AdminPermissionsView,
469 498 attr='permissions_objects',
470 499 route_name='admin_permissions_object', request_method='GET',
471 500 renderer='rhodecode:templates/admin/permissions/permissions.mako')
472 501
473 502 config.add_route(
474 503 name='admin_permissions_object_update',
475 504 pattern='/permissions/object/update')
476 505 config.add_view(
477 506 AdminPermissionsView,
478 507 attr='permissions_objects_update',
479 508 route_name='admin_permissions_object_update', request_method='POST',
480 509 renderer='rhodecode:templates/admin/permissions/permissions.mako')
481 510
482 511 # Branch perms EE feature
483 512 config.add_route(
484 513 name='admin_permissions_branch',
485 514 pattern='/permissions/branch')
486 515 config.add_view(
487 516 AdminPermissionsView,
488 517 attr='permissions_branch',
489 518 route_name='admin_permissions_branch', request_method='GET',
490 519 renderer='rhodecode:templates/admin/permissions/permissions.mako')
491 520
492 521 config.add_route(
493 522 name='admin_permissions_ips',
494 523 pattern='/permissions/ips')
495 524 config.add_view(
496 525 AdminPermissionsView,
497 526 attr='permissions_ips',
498 527 route_name='admin_permissions_ips', request_method='GET',
499 528 renderer='rhodecode:templates/admin/permissions/permissions.mako')
500 529
501 530 config.add_route(
502 531 name='admin_permissions_overview',
503 532 pattern='/permissions/overview')
504 533 config.add_view(
505 534 AdminPermissionsView,
506 535 attr='permissions_overview',
507 536 route_name='admin_permissions_overview', request_method='GET',
508 537 renderer='rhodecode:templates/admin/permissions/permissions.mako')
509 538
510 539 config.add_route(
511 540 name='admin_permissions_auth_token_access',
512 541 pattern='/permissions/auth_token_access')
513 542 config.add_view(
514 543 AdminPermissionsView,
515 544 attr='auth_token_access',
516 545 route_name='admin_permissions_auth_token_access', request_method='GET',
517 546 renderer='rhodecode:templates/admin/permissions/permissions.mako')
518 547
519 548 config.add_route(
520 549 name='admin_permissions_ssh_keys',
521 550 pattern='/permissions/ssh_keys')
522 551 config.add_view(
523 552 AdminPermissionsView,
524 553 attr='ssh_keys',
525 554 route_name='admin_permissions_ssh_keys', request_method='GET',
526 555 renderer='rhodecode:templates/admin/permissions/permissions.mako')
527 556
528 557 config.add_route(
529 558 name='admin_permissions_ssh_keys_data',
530 559 pattern='/permissions/ssh_keys/data')
531 560 config.add_view(
532 561 AdminPermissionsView,
533 562 attr='ssh_keys_data',
534 563 route_name='admin_permissions_ssh_keys_data', request_method='GET',
535 564 renderer='json_ext', xhr=True)
536 565
537 566 config.add_route(
538 567 name='admin_permissions_ssh_keys_update',
539 568 pattern='/permissions/ssh_keys/update')
540 569 config.add_view(
541 570 AdminPermissionsView,
542 571 attr='ssh_keys_update',
543 572 route_name='admin_permissions_ssh_keys_update', request_method='POST',
544 573 renderer='rhodecode:templates/admin/permissions/permissions.mako')
545 574
546 575 # users admin
547 576 config.add_route(
548 577 name='users',
549 578 pattern='/users')
550 579 config.add_view(
551 580 AdminUsersView,
552 581 attr='users_list',
553 582 route_name='users', request_method='GET',
554 583 renderer='rhodecode:templates/admin/users/users.mako')
555 584
556 585 config.add_route(
557 586 name='users_data',
558 587 pattern='/users_data')
559 588 config.add_view(
560 589 AdminUsersView,
561 590 attr='users_list_data',
562 591 # renderer defined below
563 592 route_name='users_data', request_method='GET',
564 593 renderer='json_ext', xhr=True)
565 594
566 595 config.add_route(
567 596 name='users_create',
568 597 pattern='/users/create')
569 598 config.add_view(
570 599 AdminUsersView,
571 600 attr='users_create',
572 601 route_name='users_create', request_method='POST',
573 602 renderer='rhodecode:templates/admin/users/user_add.mako')
574 603
575 604 config.add_route(
576 605 name='users_new',
577 606 pattern='/users/new')
578 607 config.add_view(
579 608 AdminUsersView,
580 609 attr='users_new',
581 610 route_name='users_new', request_method='GET',
582 611 renderer='rhodecode:templates/admin/users/user_add.mako')
583 612
584 613 # user management
585 614 config.add_route(
586 615 name='user_edit',
587 616 pattern='/users/{user_id:\d+}/edit',
588 617 user_route=True)
589 618 config.add_view(
590 619 UsersView,
591 620 attr='user_edit',
592 621 route_name='user_edit', request_method='GET',
593 622 renderer='rhodecode:templates/admin/users/user_edit.mako')
594 623
595 624 config.add_route(
596 625 name='user_edit_advanced',
597 626 pattern='/users/{user_id:\d+}/edit/advanced',
598 627 user_route=True)
599 628 config.add_view(
600 629 UsersView,
601 630 attr='user_edit_advanced',
602 631 route_name='user_edit_advanced', request_method='GET',
603 632 renderer='rhodecode:templates/admin/users/user_edit.mako')
604 633
605 634 config.add_route(
606 635 name='user_edit_global_perms',
607 636 pattern='/users/{user_id:\d+}/edit/global_permissions',
608 637 user_route=True)
609 638 config.add_view(
610 639 UsersView,
611 640 attr='user_edit_global_perms',
612 641 route_name='user_edit_global_perms', request_method='GET',
613 642 renderer='rhodecode:templates/admin/users/user_edit.mako')
614 643
615 644 config.add_route(
616 645 name='user_edit_global_perms_update',
617 646 pattern='/users/{user_id:\d+}/edit/global_permissions/update',
618 647 user_route=True)
619 648 config.add_view(
620 649 UsersView,
621 650 attr='user_edit_global_perms_update',
622 651 route_name='user_edit_global_perms_update', request_method='POST',
623 652 renderer='rhodecode:templates/admin/users/user_edit.mako')
624 653
625 654 config.add_route(
626 655 name='user_update',
627 656 pattern='/users/{user_id:\d+}/update',
628 657 user_route=True)
629 658 config.add_view(
630 659 UsersView,
631 660 attr='user_update',
632 661 route_name='user_update', request_method='POST',
633 662 renderer='rhodecode:templates/admin/users/user_edit.mako')
634 663
635 664 config.add_route(
636 665 name='user_delete',
637 666 pattern='/users/{user_id:\d+}/delete',
638 667 user_route=True)
639 668 config.add_view(
640 669 UsersView,
641 670 attr='user_delete',
642 671 route_name='user_delete', request_method='POST',
643 672 renderer='rhodecode:templates/admin/users/user_edit.mako')
644 673
645 674 config.add_route(
646 675 name='user_enable_force_password_reset',
647 676 pattern='/users/{user_id:\d+}/password_reset_enable',
648 677 user_route=True)
649 678 config.add_view(
650 679 UsersView,
651 680 attr='user_enable_force_password_reset',
652 681 route_name='user_enable_force_password_reset', request_method='POST',
653 682 renderer='rhodecode:templates/admin/users/user_edit.mako')
654 683
655 684 config.add_route(
656 685 name='user_disable_force_password_reset',
657 686 pattern='/users/{user_id:\d+}/password_reset_disable',
658 687 user_route=True)
659 688 config.add_view(
660 689 UsersView,
661 690 attr='user_disable_force_password_reset',
662 691 route_name='user_disable_force_password_reset', request_method='POST',
663 692 renderer='rhodecode:templates/admin/users/user_edit.mako')
664 693
665 694 config.add_route(
666 695 name='user_create_personal_repo_group',
667 696 pattern='/users/{user_id:\d+}/create_repo_group',
668 697 user_route=True)
669 698 config.add_view(
670 699 UsersView,
671 700 attr='user_create_personal_repo_group',
672 701 route_name='user_create_personal_repo_group', request_method='POST',
673 702 renderer='rhodecode:templates/admin/users/user_edit.mako')
674 703
675 704 # user notice
676 705 config.add_route(
677 706 name='user_notice_dismiss',
678 707 pattern='/users/{user_id:\d+}/notice_dismiss',
679 708 user_route=True)
680 709 config.add_view(
681 710 UsersView,
682 711 attr='user_notice_dismiss',
683 712 route_name='user_notice_dismiss', request_method='POST',
684 713 renderer='json_ext', xhr=True)
685 714
686 715 # user auth tokens
687 716 config.add_route(
688 717 name='edit_user_auth_tokens',
689 718 pattern='/users/{user_id:\d+}/edit/auth_tokens',
690 719 user_route=True)
691 720 config.add_view(
692 721 UsersView,
693 722 attr='auth_tokens',
694 723 route_name='edit_user_auth_tokens', request_method='GET',
695 724 renderer='rhodecode:templates/admin/users/user_edit.mako')
696 725
697 726 config.add_route(
698 727 name='edit_user_auth_tokens_view',
699 728 pattern='/users/{user_id:\d+}/edit/auth_tokens/view',
700 729 user_route=True)
701 730 config.add_view(
702 731 UsersView,
703 732 attr='auth_tokens_view',
704 733 route_name='edit_user_auth_tokens_view', request_method='POST',
705 734 renderer='json_ext', xhr=True)
706 735
707 736 config.add_route(
708 737 name='edit_user_auth_tokens_add',
709 738 pattern='/users/{user_id:\d+}/edit/auth_tokens/new',
710 739 user_route=True)
711 740 config.add_view(
712 741 UsersView,
713 742 attr='auth_tokens_add',
714 743 route_name='edit_user_auth_tokens_add', request_method='POST')
715 744
716 745 config.add_route(
717 746 name='edit_user_auth_tokens_delete',
718 747 pattern='/users/{user_id:\d+}/edit/auth_tokens/delete',
719 748 user_route=True)
720 749 config.add_view(
721 750 UsersView,
722 751 attr='auth_tokens_delete',
723 752 route_name='edit_user_auth_tokens_delete', request_method='POST')
724 753
725 754 # user ssh keys
726 755 config.add_route(
727 756 name='edit_user_ssh_keys',
728 757 pattern='/users/{user_id:\d+}/edit/ssh_keys',
729 758 user_route=True)
730 759 config.add_view(
731 760 UsersView,
732 761 attr='ssh_keys',
733 762 route_name='edit_user_ssh_keys', request_method='GET',
734 763 renderer='rhodecode:templates/admin/users/user_edit.mako')
735 764
736 765 config.add_route(
737 766 name='edit_user_ssh_keys_generate_keypair',
738 767 pattern='/users/{user_id:\d+}/edit/ssh_keys/generate',
739 768 user_route=True)
740 769 config.add_view(
741 770 UsersView,
742 771 attr='ssh_keys_generate_keypair',
743 772 route_name='edit_user_ssh_keys_generate_keypair', request_method='GET',
744 773 renderer='rhodecode:templates/admin/users/user_edit.mako')
745 774
746 775 config.add_route(
747 776 name='edit_user_ssh_keys_add',
748 777 pattern='/users/{user_id:\d+}/edit/ssh_keys/new',
749 778 user_route=True)
750 779 config.add_view(
751 780 UsersView,
752 781 attr='ssh_keys_add',
753 782 route_name='edit_user_ssh_keys_add', request_method='POST')
754 783
755 784 config.add_route(
756 785 name='edit_user_ssh_keys_delete',
757 786 pattern='/users/{user_id:\d+}/edit/ssh_keys/delete',
758 787 user_route=True)
759 788 config.add_view(
760 789 UsersView,
761 790 attr='ssh_keys_delete',
762 791 route_name='edit_user_ssh_keys_delete', request_method='POST')
763 792
764 793 # user emails
765 794 config.add_route(
766 795 name='edit_user_emails',
767 796 pattern='/users/{user_id:\d+}/edit/emails',
768 797 user_route=True)
769 798 config.add_view(
770 799 UsersView,
771 800 attr='emails',
772 801 route_name='edit_user_emails', request_method='GET',
773 802 renderer='rhodecode:templates/admin/users/user_edit.mako')
774 803
775 804 config.add_route(
776 805 name='edit_user_emails_add',
777 806 pattern='/users/{user_id:\d+}/edit/emails/new',
778 807 user_route=True)
779 808 config.add_view(
780 809 UsersView,
781 810 attr='emails_add',
782 811 route_name='edit_user_emails_add', request_method='POST')
783 812
784 813 config.add_route(
785 814 name='edit_user_emails_delete',
786 815 pattern='/users/{user_id:\d+}/edit/emails/delete',
787 816 user_route=True)
788 817 config.add_view(
789 818 UsersView,
790 819 attr='emails_delete',
791 820 route_name='edit_user_emails_delete', request_method='POST')
792 821
793 822 # user IPs
794 823 config.add_route(
795 824 name='edit_user_ips',
796 825 pattern='/users/{user_id:\d+}/edit/ips',
797 826 user_route=True)
798 827 config.add_view(
799 828 UsersView,
800 829 attr='ips',
801 830 route_name='edit_user_ips', request_method='GET',
802 831 renderer='rhodecode:templates/admin/users/user_edit.mako')
803 832
804 833 config.add_route(
805 834 name='edit_user_ips_add',
806 835 pattern='/users/{user_id:\d+}/edit/ips/new',
807 836 user_route_with_default=True) # enabled for default user too
808 837 config.add_view(
809 838 UsersView,
810 839 attr='ips_add',
811 840 route_name='edit_user_ips_add', request_method='POST')
812 841
813 842 config.add_route(
814 843 name='edit_user_ips_delete',
815 844 pattern='/users/{user_id:\d+}/edit/ips/delete',
816 845 user_route_with_default=True) # enabled for default user too
817 846 config.add_view(
818 847 UsersView,
819 848 attr='ips_delete',
820 849 route_name='edit_user_ips_delete', request_method='POST')
821 850
822 851 # user perms
823 852 config.add_route(
824 853 name='edit_user_perms_summary',
825 854 pattern='/users/{user_id:\d+}/edit/permissions_summary',
826 855 user_route=True)
827 856 config.add_view(
828 857 UsersView,
829 858 attr='user_perms_summary',
830 859 route_name='edit_user_perms_summary', request_method='GET',
831 860 renderer='rhodecode:templates/admin/users/user_edit.mako')
832 861
833 862 config.add_route(
834 863 name='edit_user_perms_summary_json',
835 864 pattern='/users/{user_id:\d+}/edit/permissions_summary/json',
836 865 user_route=True)
837 866 config.add_view(
838 867 UsersView,
839 868 attr='user_perms_summary_json',
840 869 route_name='edit_user_perms_summary_json', request_method='GET',
841 870 renderer='json_ext')
842 871
843 872 # user user groups management
844 873 config.add_route(
845 874 name='edit_user_groups_management',
846 875 pattern='/users/{user_id:\d+}/edit/groups_management',
847 876 user_route=True)
848 877 config.add_view(
849 878 UsersView,
850 879 attr='groups_management',
851 880 route_name='edit_user_groups_management', request_method='GET',
852 881 renderer='rhodecode:templates/admin/users/user_edit.mako')
853 882
854 883 config.add_route(
855 884 name='edit_user_groups_management_updates',
856 885 pattern='/users/{user_id:\d+}/edit/edit_user_groups_management/updates',
857 886 user_route=True)
858 887 config.add_view(
859 888 UsersView,
860 889 attr='groups_management_updates',
861 890 route_name='edit_user_groups_management_updates', request_method='POST')
862 891
863 892 # user audit logs
864 893 config.add_route(
865 894 name='edit_user_audit_logs',
866 895 pattern='/users/{user_id:\d+}/edit/audit', user_route=True)
867 896 config.add_view(
868 897 UsersView,
869 898 attr='user_audit_logs',
870 899 route_name='edit_user_audit_logs', request_method='GET',
871 900 renderer='rhodecode:templates/admin/users/user_edit.mako')
872 901
873 902 config.add_route(
874 903 name='edit_user_audit_logs_download',
875 904 pattern='/users/{user_id:\d+}/edit/audit/download', user_route=True)
876 905 config.add_view(
877 906 UsersView,
878 907 attr='user_audit_logs_download',
879 908 route_name='edit_user_audit_logs_download', request_method='GET',
880 909 renderer='string')
881 910
882 911 # user caches
883 912 config.add_route(
884 913 name='edit_user_caches',
885 914 pattern='/users/{user_id:\d+}/edit/caches',
886 915 user_route=True)
887 916 config.add_view(
888 917 UsersView,
889 918 attr='user_caches',
890 919 route_name='edit_user_caches', request_method='GET',
891 920 renderer='rhodecode:templates/admin/users/user_edit.mako')
892 921
893 922 config.add_route(
894 923 name='edit_user_caches_update',
895 924 pattern='/users/{user_id:\d+}/edit/caches/update',
896 925 user_route=True)
897 926 config.add_view(
898 927 UsersView,
899 928 attr='user_caches_update',
900 929 route_name='edit_user_caches_update', request_method='POST')
901 930
902 931 # user-groups admin
903 932 config.add_route(
904 933 name='user_groups',
905 934 pattern='/user_groups')
906 935 config.add_view(
907 936 AdminUserGroupsView,
908 937 attr='user_groups_list',
909 938 route_name='user_groups', request_method='GET',
910 939 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
911 940
912 941 config.add_route(
913 942 name='user_groups_data',
914 943 pattern='/user_groups_data')
915 944 config.add_view(
916 945 AdminUserGroupsView,
917 946 attr='user_groups_list_data',
918 947 route_name='user_groups_data', request_method='GET',
919 948 renderer='json_ext', xhr=True)
920 949
921 950 config.add_route(
922 951 name='user_groups_new',
923 952 pattern='/user_groups/new')
924 953 config.add_view(
925 954 AdminUserGroupsView,
926 955 attr='user_groups_new',
927 956 route_name='user_groups_new', request_method='GET',
928 957 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
929 958
930 959 config.add_route(
931 960 name='user_groups_create',
932 961 pattern='/user_groups/create')
933 962 config.add_view(
934 963 AdminUserGroupsView,
935 964 attr='user_groups_create',
936 965 route_name='user_groups_create', request_method='POST',
937 966 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
938 967
939 968 # repos admin
940 969 config.add_route(
941 970 name='repos',
942 971 pattern='/repos')
943 972 config.add_view(
944 973 AdminReposView,
945 974 attr='repository_list',
946 975 route_name='repos', request_method='GET',
947 976 renderer='rhodecode:templates/admin/repos/repos.mako')
948 977
949 978 config.add_route(
950 979 name='repos_data',
951 980 pattern='/repos_data')
952 981 config.add_view(
953 982 AdminReposView,
954 983 attr='repository_list_data',
955 984 route_name='repos_data', request_method='GET',
956 985 renderer='json_ext', xhr=True)
957 986
958 987 config.add_route(
959 988 name='repo_new',
960 989 pattern='/repos/new')
961 990 config.add_view(
962 991 AdminReposView,
963 992 attr='repository_new',
964 993 route_name='repo_new', request_method='GET',
965 994 renderer='rhodecode:templates/admin/repos/repo_add.mako')
966 995
967 996 config.add_route(
968 997 name='repo_create',
969 998 pattern='/repos/create')
970 999 config.add_view(
971 1000 AdminReposView,
972 1001 attr='repository_create',
973 1002 route_name='repo_create', request_method='POST',
974 1003 renderer='rhodecode:templates/admin/repos/repos.mako')
975 1004
976 1005 # repo groups admin
977 1006 config.add_route(
978 1007 name='repo_groups',
979 1008 pattern='/repo_groups')
980 1009 config.add_view(
981 1010 AdminRepoGroupsView,
982 1011 attr='repo_group_list',
983 1012 route_name='repo_groups', request_method='GET',
984 1013 renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako')
985 1014
986 1015 config.add_route(
987 1016 name='repo_groups_data',
988 1017 pattern='/repo_groups_data')
989 1018 config.add_view(
990 1019 AdminRepoGroupsView,
991 1020 attr='repo_group_list_data',
992 1021 route_name='repo_groups_data', request_method='GET',
993 1022 renderer='json_ext', xhr=True)
994 1023
995 1024 config.add_route(
996 1025 name='repo_group_new',
997 1026 pattern='/repo_group/new')
998 1027 config.add_view(
999 1028 AdminRepoGroupsView,
1000 1029 attr='repo_group_new',
1001 1030 route_name='repo_group_new', request_method='GET',
1002 1031 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1003 1032
1004 1033 config.add_route(
1005 1034 name='repo_group_create',
1006 1035 pattern='/repo_group/create')
1007 1036 config.add_view(
1008 1037 AdminRepoGroupsView,
1009 1038 attr='repo_group_create',
1010 1039 route_name='repo_group_create', request_method='POST',
1011 1040 renderer='rhodecode:templates/admin/repo_groups/repo_group_add.mako')
1012 1041
1013 1042
1014 1043 def includeme(config):
1015 1044 from rhodecode.apps._base.navigation import includeme as nav_includeme
1016 1045 from rhodecode.apps.admin.views.main_views import AdminMainView
1017 1046
1018 1047 # Create admin navigation registry and add it to the pyramid registry.
1019 1048 nav_includeme(config)
1020 1049
1021 1050 # main admin routes
1022 1051 config.add_route(
1023 1052 name='admin_home', pattern=ADMIN_PREFIX)
1024 1053 config.add_view(
1025 1054 AdminMainView,
1026 1055 attr='admin_main',
1027 1056 route_name='admin_home', request_method='GET',
1028 1057 renderer='rhodecode:templates/admin/main.mako')
1029 1058
1030 1059 # pr global redirect
1031 1060 config.add_route(
1032 1061 name='pull_requests_global_0', # backward compat
1033 1062 pattern=ADMIN_PREFIX + '/pull_requests/{pull_request_id:\d+}')
1034 1063 config.add_view(
1035 1064 AdminMainView,
1036 1065 attr='pull_requests',
1037 1066 route_name='pull_requests_global_0', request_method='GET')
1038 1067
1039 1068 config.add_route(
1040 1069 name='pull_requests_global_1', # backward compat
1041 1070 pattern=ADMIN_PREFIX + '/pull-requests/{pull_request_id:\d+}')
1042 1071 config.add_view(
1043 1072 AdminMainView,
1044 1073 attr='pull_requests',
1045 1074 route_name='pull_requests_global_1', request_method='GET')
1046 1075
1047 1076 config.add_route(
1048 1077 name='pull_requests_global',
1049 1078 pattern=ADMIN_PREFIX + '/pull-request/{pull_request_id:\d+}')
1050 1079 config.add_view(
1051 1080 AdminMainView,
1052 1081 attr='pull_requests',
1053 1082 route_name='pull_requests_global', request_method='GET')
1054 1083
1055 1084 config.include(admin_routes, route_prefix=ADMIN_PREFIX)
@@ -1,204 +1,208 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 # -*- coding: utf-8 -*-
21 21
22 22 # Copyright (C) 2016-2020 RhodeCode GmbH
23 23 #
24 24 # This program is free software: you can redistribute it and/or modify
25 25 # it under the terms of the GNU Affero General Public License, version 3
26 26 # (only), as published by the Free Software Foundation.
27 27 #
28 28 # This program is distributed in the hope that it will be useful,
29 29 # but WITHOUT ANY WARRANTY; without even the implied warranty of
30 30 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
31 31 # GNU General Public License for more details.
32 32 #
33 33 # You should have received a copy of the GNU Affero General Public License
34 34 # along with this program. If not, see <http://www.gnu.org/licenses/>.
35 35 #
36 36 # This program is dual-licensed. If you wish to learn more about the
37 37 # RhodeCode Enterprise Edition, including its added features, Support services,
38 38 # and proprietary license terms, please see https://rhodecode.com/licenses/
39 39
40 40 import pytest
41 41
42 42 from rhodecode.model.db import User
43 43 from rhodecode.tests import TestController, assert_session_flash
44 44 from rhodecode.lib import helpers as h
45 45
46 46
47 47 def route_path(name, params=None, **kwargs):
48 48 import urllib
49 49 from rhodecode.apps._base import ADMIN_PREFIX
50 50
51 51 base_url = {
52 52 'my_account_edit': ADMIN_PREFIX + '/my_account/edit',
53 53 'my_account_update': ADMIN_PREFIX + '/my_account/update',
54 54 'my_account_pullrequests': ADMIN_PREFIX + '/my_account/pull_requests',
55 55 'my_account_pullrequests_data': ADMIN_PREFIX + '/my_account/pull_requests/data',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 class TestMyAccountEdit(TestController):
64 64
65 65 def test_my_account_edit(self):
66 66 self.log_user()
67 67 response = self.app.get(route_path('my_account_edit'))
68 68
69 69 response.mustcontain('value="test_admin')
70 70
71 71 @pytest.mark.backends("git", "hg")
72 72 def test_my_account_my_pullrequests(self, pr_util):
73 73 self.log_user()
74 74 response = self.app.get(route_path('my_account_pullrequests'))
75 75 response.mustcontain('There are currently no open pull '
76 76 'requests requiring your participation.')
77 77
78 78 @pytest.mark.backends("git", "hg")
79 def test_my_account_my_pullrequests_data(self, pr_util, xhr_header):
79 @pytest.mark.parametrize('params, expected_title', [
80 ({'closed': 1}, 'Closed'),
81 ({'awaiting_my_review': 1}, 'Awaiting my review'),
82 ])
83 def test_my_account_my_pullrequests_data(self, pr_util, xhr_header, params, expected_title):
80 84 self.log_user()
81 85 response = self.app.get(route_path('my_account_pullrequests_data'),
82 86 extra_environ=xhr_header)
83 87 assert response.json == {
84 88 u'data': [], u'draw': None,
85 89 u'recordsFiltered': 0, u'recordsTotal': 0}
86 90
87 91 pr = pr_util.create_pull_request(title='TestMyAccountPR')
88 92 expected = {
89 93 'author_raw': 'RhodeCode Admin',
90 94 'name_raw': pr.pull_request_id
91 95 }
92 96 response = self.app.get(route_path('my_account_pullrequests_data'),
93 97 extra_environ=xhr_header)
94 98 assert response.json['recordsTotal'] == 1
95 99 assert response.json['data'][0]['author_raw'] == expected['author_raw']
96 100
97 101 assert response.json['data'][0]['author_raw'] == expected['author_raw']
98 102 assert response.json['data'][0]['name_raw'] == expected['name_raw']
99 103
100 104 @pytest.mark.parametrize(
101 105 "name, attrs", [
102 106 ('firstname', {'firstname': 'new_username'}),
103 107 ('lastname', {'lastname': 'new_username'}),
104 108 ('admin', {'admin': True}),
105 109 ('admin', {'admin': False}),
106 110 ('extern_type', {'extern_type': 'ldap'}),
107 111 ('extern_type', {'extern_type': None}),
108 112 # ('extern_name', {'extern_name': 'test'}),
109 113 # ('extern_name', {'extern_name': None}),
110 114 ('active', {'active': False}),
111 115 ('active', {'active': True}),
112 116 ('email', {'email': u'some@email.com'}),
113 117 ])
114 118 def test_my_account_update(self, name, attrs, user_util):
115 119 usr = user_util.create_user(password='qweqwe')
116 120 params = usr.get_api_data() # current user data
117 121 user_id = usr.user_id
118 122 self.log_user(
119 123 username=usr.username, password='qweqwe')
120 124
121 125 params.update({'password_confirmation': ''})
122 126 params.update({'new_password': ''})
123 127 params.update({'extern_type': u'rhodecode'})
124 128 params.update({'extern_name': u'rhodecode'})
125 129 params.update({'csrf_token': self.csrf_token})
126 130
127 131 params.update(attrs)
128 132 # my account page cannot set language param yet, only for admins
129 133 del params['language']
130 134 if name == 'email':
131 135 uem = user_util.create_additional_user_email(usr, attrs['email'])
132 136 email_before = User.get(user_id).email
133 137
134 138 response = self.app.post(route_path('my_account_update'), params)
135 139
136 140 assert_session_flash(
137 141 response, 'Your account was updated successfully')
138 142
139 143 del params['csrf_token']
140 144
141 145 updated_user = User.get(user_id)
142 146 updated_params = updated_user.get_api_data()
143 147 updated_params.update({'password_confirmation': ''})
144 148 updated_params.update({'new_password': ''})
145 149
146 150 params['last_login'] = updated_params['last_login']
147 151 params['last_activity'] = updated_params['last_activity']
148 152 # my account page cannot set language param yet, only for admins
149 153 # but we get this info from API anyway
150 154 params['language'] = updated_params['language']
151 155
152 156 if name == 'email':
153 157 params['emails'] = [attrs['email'], email_before]
154 158 if name == 'extern_type':
155 159 # cannot update this via form, expected value is original one
156 160 params['extern_type'] = "rhodecode"
157 161 if name == 'extern_name':
158 162 # cannot update this via form, expected value is original one
159 163 params['extern_name'] = str(user_id)
160 164 if name == 'active':
161 165 # my account cannot deactivate account
162 166 params['active'] = True
163 167 if name == 'admin':
164 168 # my account cannot make you an admin !
165 169 params['admin'] = False
166 170
167 171 assert params == updated_params
168 172
169 173 def test_my_account_update_err_email_not_exists_in_emails(self):
170 174 self.log_user()
171 175
172 176 new_email = 'test_regular@mail.com' # not in emails
173 177 params = {
174 178 'username': 'test_admin',
175 179 'new_password': 'test12',
176 180 'password_confirmation': 'test122',
177 181 'firstname': 'NewName',
178 182 'lastname': 'NewLastname',
179 183 'email': new_email,
180 184 'csrf_token': self.csrf_token,
181 185 }
182 186
183 187 response = self.app.post(route_path('my_account_update'),
184 188 params=params)
185 189
186 190 response.mustcontain('"test_regular@mail.com" is not one of test_admin@mail.com')
187 191
188 192 def test_my_account_update_bad_email_address(self):
189 193 self.log_user('test_regular2', 'test12')
190 194
191 195 new_email = 'newmail.pl'
192 196 params = {
193 197 'username': 'test_admin',
194 198 'new_password': 'test12',
195 199 'password_confirmation': 'test122',
196 200 'firstname': 'NewName',
197 201 'lastname': 'NewLastname',
198 202 'email': new_email,
199 203 'csrf_token': self.csrf_token,
200 204 }
201 205 response = self.app.post(route_path('my_account_update'),
202 206 params=params)
203 207
204 208 response.mustcontain('"newmail.pl" is not one of test_regular2@mail.com')
@@ -1,752 +1,783 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23 import string
24 24
25 25 import formencode
26 26 import formencode.htmlfill
27 27 import peppercorn
28 28 from pyramid.httpexceptions import HTTPFound, HTTPNotFound
29 29
30 30 from rhodecode.apps._base import BaseAppView, DataGridAppView
31 31 from rhodecode import forms
32 32 from rhodecode.lib import helpers as h
33 33 from rhodecode.lib import audit_logger
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.lib.auth import (
36 36 LoginRequired, NotAnonymous, CSRFRequired,
37 37 HasRepoPermissionAny, HasRepoGroupPermissionAny, AuthUser)
38 38 from rhodecode.lib.channelstream import (
39 39 channelstream_request, ChannelstreamException)
40 40 from rhodecode.lib.utils2 import safe_int, md5, str2bool
41 41 from rhodecode.model.auth_token import AuthTokenModel
42 42 from rhodecode.model.comment import CommentsModel
43 43 from rhodecode.model.db import (
44 44 IntegrityError, or_, in_filter_generator,
45 45 Repository, UserEmailMap, UserApiKeys, UserFollowing,
46 PullRequest, UserBookmark, RepoGroup)
46 PullRequest, UserBookmark, RepoGroup, ChangesetStatus)
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.pull_request import PullRequestModel
49 49 from rhodecode.model.user import UserModel
50 50 from rhodecode.model.user_group import UserGroupModel
51 51 from rhodecode.model.validation_schema.schemas import user_schema
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class MyAccountView(BaseAppView, DataGridAppView):
57 57 ALLOW_SCOPED_TOKENS = False
58 58 """
59 59 This view has alternative version inside EE, if modified please take a look
60 60 in there as well.
61 61 """
62 62
63 63 def load_default_context(self):
64 64 c = self._get_local_tmpl_context()
65 65 c.user = c.auth_user.get_instance()
66 66 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
67 67 return c
68 68
69 69 @LoginRequired()
70 70 @NotAnonymous()
71 71 def my_account_profile(self):
72 72 c = self.load_default_context()
73 73 c.active = 'profile'
74 74 c.extern_type = c.user.extern_type
75 75 return self._get_template_context(c)
76 76
77 77 @LoginRequired()
78 78 @NotAnonymous()
79 79 def my_account_edit(self):
80 80 c = self.load_default_context()
81 81 c.active = 'profile_edit'
82 82 c.extern_type = c.user.extern_type
83 83 c.extern_name = c.user.extern_name
84 84
85 85 schema = user_schema.UserProfileSchema().bind(
86 86 username=c.user.username, user_emails=c.user.emails)
87 87 appstruct = {
88 88 'username': c.user.username,
89 89 'email': c.user.email,
90 90 'firstname': c.user.firstname,
91 91 'lastname': c.user.lastname,
92 92 'description': c.user.description,
93 93 }
94 94 c.form = forms.RcForm(
95 95 schema, appstruct=appstruct,
96 96 action=h.route_path('my_account_update'),
97 97 buttons=(forms.buttons.save, forms.buttons.reset))
98 98
99 99 return self._get_template_context(c)
100 100
101 101 @LoginRequired()
102 102 @NotAnonymous()
103 103 @CSRFRequired()
104 104 def my_account_update(self):
105 105 _ = self.request.translate
106 106 c = self.load_default_context()
107 107 c.active = 'profile_edit'
108 108 c.perm_user = c.auth_user
109 109 c.extern_type = c.user.extern_type
110 110 c.extern_name = c.user.extern_name
111 111
112 112 schema = user_schema.UserProfileSchema().bind(
113 113 username=c.user.username, user_emails=c.user.emails)
114 114 form = forms.RcForm(
115 115 schema, buttons=(forms.buttons.save, forms.buttons.reset))
116 116
117 117 controls = self.request.POST.items()
118 118 try:
119 119 valid_data = form.validate(controls)
120 120 skip_attrs = ['admin', 'active', 'extern_type', 'extern_name',
121 121 'new_password', 'password_confirmation']
122 122 if c.extern_type != "rhodecode":
123 123 # forbid updating username for external accounts
124 124 skip_attrs.append('username')
125 125 old_email = c.user.email
126 126 UserModel().update_user(
127 127 self._rhodecode_user.user_id, skip_attrs=skip_attrs,
128 128 **valid_data)
129 129 if old_email != valid_data['email']:
130 130 old = UserEmailMap.query() \
131 131 .filter(UserEmailMap.user == c.user)\
132 132 .filter(UserEmailMap.email == valid_data['email'])\
133 133 .first()
134 134 old.email = old_email
135 135 h.flash(_('Your account was updated successfully'), category='success')
136 136 Session().commit()
137 137 except forms.ValidationFailure as e:
138 138 c.form = e
139 139 return self._get_template_context(c)
140 140 except Exception:
141 141 log.exception("Exception updating user")
142 142 h.flash(_('Error occurred during update of user'),
143 143 category='error')
144 144 raise HTTPFound(h.route_path('my_account_profile'))
145 145
146 146 @LoginRequired()
147 147 @NotAnonymous()
148 148 def my_account_password(self):
149 149 c = self.load_default_context()
150 150 c.active = 'password'
151 151 c.extern_type = c.user.extern_type
152 152
153 153 schema = user_schema.ChangePasswordSchema().bind(
154 154 username=c.user.username)
155 155
156 156 form = forms.Form(
157 157 schema,
158 158 action=h.route_path('my_account_password_update'),
159 159 buttons=(forms.buttons.save, forms.buttons.reset))
160 160
161 161 c.form = form
162 162 return self._get_template_context(c)
163 163
164 164 @LoginRequired()
165 165 @NotAnonymous()
166 166 @CSRFRequired()
167 167 def my_account_password_update(self):
168 168 _ = self.request.translate
169 169 c = self.load_default_context()
170 170 c.active = 'password'
171 171 c.extern_type = c.user.extern_type
172 172
173 173 schema = user_schema.ChangePasswordSchema().bind(
174 174 username=c.user.username)
175 175
176 176 form = forms.Form(
177 177 schema, buttons=(forms.buttons.save, forms.buttons.reset))
178 178
179 179 if c.extern_type != 'rhodecode':
180 180 raise HTTPFound(self.request.route_path('my_account_password'))
181 181
182 182 controls = self.request.POST.items()
183 183 try:
184 184 valid_data = form.validate(controls)
185 185 UserModel().update_user(c.user.user_id, **valid_data)
186 186 c.user.update_userdata(force_password_change=False)
187 187 Session().commit()
188 188 except forms.ValidationFailure as e:
189 189 c.form = e
190 190 return self._get_template_context(c)
191 191
192 192 except Exception:
193 193 log.exception("Exception updating password")
194 194 h.flash(_('Error occurred during update of user password'),
195 195 category='error')
196 196 else:
197 197 instance = c.auth_user.get_instance()
198 198 self.session.setdefault('rhodecode_user', {}).update(
199 199 {'password': md5(instance.password)})
200 200 self.session.save()
201 201 h.flash(_("Successfully updated password"), category='success')
202 202
203 203 raise HTTPFound(self.request.route_path('my_account_password'))
204 204
205 205 @LoginRequired()
206 206 @NotAnonymous()
207 207 def my_account_auth_tokens(self):
208 208 _ = self.request.translate
209 209
210 210 c = self.load_default_context()
211 211 c.active = 'auth_tokens'
212 212 c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_)
213 213 c.role_values = [
214 214 (x, AuthTokenModel.cls._get_role_name(x))
215 215 for x in AuthTokenModel.cls.ROLES]
216 216 c.role_options = [(c.role_values, _("Role"))]
217 217 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
218 218 c.user.user_id, show_expired=True)
219 219 c.role_vcs = AuthTokenModel.cls.ROLE_VCS
220 220 return self._get_template_context(c)
221 221
222 222 @LoginRequired()
223 223 @NotAnonymous()
224 224 @CSRFRequired()
225 225 def my_account_auth_tokens_view(self):
226 226 _ = self.request.translate
227 227 c = self.load_default_context()
228 228
229 229 auth_token_id = self.request.POST.get('auth_token_id')
230 230
231 231 if auth_token_id:
232 232 token = UserApiKeys.get_or_404(auth_token_id)
233 233 if token.user.user_id != c.user.user_id:
234 234 raise HTTPNotFound()
235 235
236 236 return {
237 237 'auth_token': token.api_key
238 238 }
239 239
240 240 def maybe_attach_token_scope(self, token):
241 241 # implemented in EE edition
242 242 pass
243 243
244 244 @LoginRequired()
245 245 @NotAnonymous()
246 246 @CSRFRequired()
247 247 def my_account_auth_tokens_add(self):
248 248 _ = self.request.translate
249 249 c = self.load_default_context()
250 250
251 251 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
252 252 description = self.request.POST.get('description')
253 253 role = self.request.POST.get('role')
254 254
255 255 token = UserModel().add_auth_token(
256 256 user=c.user.user_id,
257 257 lifetime_minutes=lifetime, role=role, description=description,
258 258 scope_callback=self.maybe_attach_token_scope)
259 259 token_data = token.get_api_data()
260 260
261 261 audit_logger.store_web(
262 262 'user.edit.token.add', action_data={
263 263 'data': {'token': token_data, 'user': 'self'}},
264 264 user=self._rhodecode_user, )
265 265 Session().commit()
266 266
267 267 h.flash(_("Auth token successfully created"), category='success')
268 268 return HTTPFound(h.route_path('my_account_auth_tokens'))
269 269
270 270 @LoginRequired()
271 271 @NotAnonymous()
272 272 @CSRFRequired()
273 273 def my_account_auth_tokens_delete(self):
274 274 _ = self.request.translate
275 275 c = self.load_default_context()
276 276
277 277 del_auth_token = self.request.POST.get('del_auth_token')
278 278
279 279 if del_auth_token:
280 280 token = UserApiKeys.get_or_404(del_auth_token)
281 281 token_data = token.get_api_data()
282 282
283 283 AuthTokenModel().delete(del_auth_token, c.user.user_id)
284 284 audit_logger.store_web(
285 285 'user.edit.token.delete', action_data={
286 286 'data': {'token': token_data, 'user': 'self'}},
287 287 user=self._rhodecode_user,)
288 288 Session().commit()
289 289 h.flash(_("Auth token successfully deleted"), category='success')
290 290
291 291 return HTTPFound(h.route_path('my_account_auth_tokens'))
292 292
293 293 @LoginRequired()
294 294 @NotAnonymous()
295 295 def my_account_emails(self):
296 296 _ = self.request.translate
297 297
298 298 c = self.load_default_context()
299 299 c.active = 'emails'
300 300
301 301 c.user_email_map = UserEmailMap.query()\
302 302 .filter(UserEmailMap.user == c.user).all()
303 303
304 304 schema = user_schema.AddEmailSchema().bind(
305 305 username=c.user.username, user_emails=c.user.emails)
306 306
307 307 form = forms.RcForm(schema,
308 308 action=h.route_path('my_account_emails_add'),
309 309 buttons=(forms.buttons.save, forms.buttons.reset))
310 310
311 311 c.form = form
312 312 return self._get_template_context(c)
313 313
314 314 @LoginRequired()
315 315 @NotAnonymous()
316 316 @CSRFRequired()
317 317 def my_account_emails_add(self):
318 318 _ = self.request.translate
319 319 c = self.load_default_context()
320 320 c.active = 'emails'
321 321
322 322 schema = user_schema.AddEmailSchema().bind(
323 323 username=c.user.username, user_emails=c.user.emails)
324 324
325 325 form = forms.RcForm(
326 326 schema, action=h.route_path('my_account_emails_add'),
327 327 buttons=(forms.buttons.save, forms.buttons.reset))
328 328
329 329 controls = self.request.POST.items()
330 330 try:
331 331 valid_data = form.validate(controls)
332 332 UserModel().add_extra_email(c.user.user_id, valid_data['email'])
333 333 audit_logger.store_web(
334 334 'user.edit.email.add', action_data={
335 335 'data': {'email': valid_data['email'], 'user': 'self'}},
336 336 user=self._rhodecode_user,)
337 337 Session().commit()
338 338 except formencode.Invalid as error:
339 339 h.flash(h.escape(error.error_dict['email']), category='error')
340 340 except forms.ValidationFailure as e:
341 341 c.user_email_map = UserEmailMap.query() \
342 342 .filter(UserEmailMap.user == c.user).all()
343 343 c.form = e
344 344 return self._get_template_context(c)
345 345 except Exception:
346 346 log.exception("Exception adding email")
347 347 h.flash(_('Error occurred during adding email'),
348 348 category='error')
349 349 else:
350 350 h.flash(_("Successfully added email"), category='success')
351 351
352 352 raise HTTPFound(self.request.route_path('my_account_emails'))
353 353
354 354 @LoginRequired()
355 355 @NotAnonymous()
356 356 @CSRFRequired()
357 357 def my_account_emails_delete(self):
358 358 _ = self.request.translate
359 359 c = self.load_default_context()
360 360
361 361 del_email_id = self.request.POST.get('del_email_id')
362 362 if del_email_id:
363 363 email = UserEmailMap.get_or_404(del_email_id).email
364 364 UserModel().delete_extra_email(c.user.user_id, del_email_id)
365 365 audit_logger.store_web(
366 366 'user.edit.email.delete', action_data={
367 367 'data': {'email': email, 'user': 'self'}},
368 368 user=self._rhodecode_user,)
369 369 Session().commit()
370 370 h.flash(_("Email successfully deleted"),
371 371 category='success')
372 372 return HTTPFound(h.route_path('my_account_emails'))
373 373
374 374 @LoginRequired()
375 375 @NotAnonymous()
376 376 @CSRFRequired()
377 377 def my_account_notifications_test_channelstream(self):
378 378 message = 'Test message sent via Channelstream by user: {}, on {}'.format(
379 379 self._rhodecode_user.username, datetime.datetime.now())
380 380 payload = {
381 381 # 'channel': 'broadcast',
382 382 'type': 'message',
383 383 'timestamp': datetime.datetime.utcnow(),
384 384 'user': 'system',
385 385 'pm_users': [self._rhodecode_user.username],
386 386 'message': {
387 387 'message': message,
388 388 'level': 'info',
389 389 'topic': '/notifications'
390 390 }
391 391 }
392 392
393 393 registry = self.request.registry
394 394 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
395 395 channelstream_config = rhodecode_plugins.get('channelstream', {})
396 396
397 397 try:
398 398 channelstream_request(channelstream_config, [payload], '/message')
399 399 except ChannelstreamException as e:
400 400 log.exception('Failed to send channelstream data')
401 401 return {"response": 'ERROR: {}'.format(e.__class__.__name__)}
402 402 return {"response": 'Channelstream data sent. '
403 403 'You should see a new live message now.'}
404 404
405 405 def _load_my_repos_data(self, watched=False):
406 406
407 407 allowed_ids = [-1] + self._rhodecode_user.repo_acl_ids_from_stack(AuthUser.repo_read_perms)
408 408
409 409 if watched:
410 410 # repos user watch
411 411 repo_list = Session().query(
412 412 Repository
413 413 ) \
414 414 .join(
415 415 (UserFollowing, UserFollowing.follows_repo_id == Repository.repo_id)
416 416 ) \
417 417 .filter(
418 418 UserFollowing.user_id == self._rhodecode_user.user_id
419 419 ) \
420 420 .filter(or_(
421 421 # generate multiple IN to fix limitation problems
422 422 *in_filter_generator(Repository.repo_id, allowed_ids))
423 423 ) \
424 424 .order_by(Repository.repo_name) \
425 425 .all()
426 426
427 427 else:
428 428 # repos user is owner of
429 429 repo_list = Session().query(
430 430 Repository
431 431 ) \
432 432 .filter(
433 433 Repository.user_id == self._rhodecode_user.user_id
434 434 ) \
435 435 .filter(or_(
436 436 # generate multiple IN to fix limitation problems
437 437 *in_filter_generator(Repository.repo_id, allowed_ids))
438 438 ) \
439 439 .order_by(Repository.repo_name) \
440 440 .all()
441 441
442 442 _render = self.request.get_partial_renderer(
443 443 'rhodecode:templates/data_table/_dt_elements.mako')
444 444
445 445 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
446 446 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
447 447 short_name=False, admin=False)
448 448
449 449 repos_data = []
450 450 for repo in repo_list:
451 451 row = {
452 452 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
453 453 repo.private, repo.archived, repo.fork),
454 454 "name_raw": repo.repo_name.lower(),
455 455 }
456 456
457 457 repos_data.append(row)
458 458
459 459 # json used to render the grid
460 460 return json.dumps(repos_data)
461 461
462 462 @LoginRequired()
463 463 @NotAnonymous()
464 464 def my_account_repos(self):
465 465 c = self.load_default_context()
466 466 c.active = 'repos'
467 467
468 468 # json used to render the grid
469 469 c.data = self._load_my_repos_data()
470 470 return self._get_template_context(c)
471 471
472 472 @LoginRequired()
473 473 @NotAnonymous()
474 474 def my_account_watched(self):
475 475 c = self.load_default_context()
476 476 c.active = 'watched'
477 477
478 478 # json used to render the grid
479 479 c.data = self._load_my_repos_data(watched=True)
480 480 return self._get_template_context(c)
481 481
482 482 @LoginRequired()
483 483 @NotAnonymous()
484 484 def my_account_bookmarks(self):
485 485 c = self.load_default_context()
486 486 c.active = 'bookmarks'
487 487 c.bookmark_items = UserBookmark.get_bookmarks_for_user(
488 488 self._rhodecode_db_user.user_id, cache=False)
489 489 return self._get_template_context(c)
490 490
491 491 def _process_bookmark_entry(self, entry, user_id):
492 492 position = safe_int(entry.get('position'))
493 493 cur_position = safe_int(entry.get('cur_position'))
494 494 if position is None:
495 495 return
496 496
497 497 # check if this is an existing entry
498 498 is_new = False
499 499 db_entry = UserBookmark().get_by_position_for_user(cur_position, user_id)
500 500
501 501 if db_entry and str2bool(entry.get('remove')):
502 502 log.debug('Marked bookmark %s for deletion', db_entry)
503 503 Session().delete(db_entry)
504 504 return
505 505
506 506 if not db_entry:
507 507 # new
508 508 db_entry = UserBookmark()
509 509 is_new = True
510 510
511 511 should_save = False
512 512 default_redirect_url = ''
513 513
514 514 # save repo
515 515 if entry.get('bookmark_repo') and safe_int(entry.get('bookmark_repo')):
516 516 repo = Repository.get(entry['bookmark_repo'])
517 517 perm_check = HasRepoPermissionAny(
518 518 'repository.read', 'repository.write', 'repository.admin')
519 519 if repo and perm_check(repo_name=repo.repo_name):
520 520 db_entry.repository = repo
521 521 should_save = True
522 522 default_redirect_url = '${repo_url}'
523 523 # save repo group
524 524 elif entry.get('bookmark_repo_group') and safe_int(entry.get('bookmark_repo_group')):
525 525 repo_group = RepoGroup.get(entry['bookmark_repo_group'])
526 526 perm_check = HasRepoGroupPermissionAny(
527 527 'group.read', 'group.write', 'group.admin')
528 528
529 529 if repo_group and perm_check(group_name=repo_group.group_name):
530 530 db_entry.repository_group = repo_group
531 531 should_save = True
532 532 default_redirect_url = '${repo_group_url}'
533 533 # save generic info
534 534 elif entry.get('title') and entry.get('redirect_url'):
535 535 should_save = True
536 536
537 537 if should_save:
538 538 # mark user and position
539 539 db_entry.user_id = user_id
540 540 db_entry.position = position
541 541 db_entry.title = entry.get('title')
542 542 db_entry.redirect_url = entry.get('redirect_url') or default_redirect_url
543 543 log.debug('Saving bookmark %s, new:%s', db_entry, is_new)
544 544
545 545 Session().add(db_entry)
546 546
547 547 @LoginRequired()
548 548 @NotAnonymous()
549 549 @CSRFRequired()
550 550 def my_account_bookmarks_update(self):
551 551 _ = self.request.translate
552 552 c = self.load_default_context()
553 553 c.active = 'bookmarks'
554 554
555 555 controls = peppercorn.parse(self.request.POST.items())
556 556 user_id = c.user.user_id
557 557
558 558 # validate positions
559 559 positions = {}
560 560 for entry in controls.get('bookmarks', []):
561 561 position = safe_int(entry['position'])
562 562 if position is None:
563 563 continue
564 564
565 565 if position in positions:
566 566 h.flash(_("Position {} is defined twice. "
567 567 "Please correct this error.").format(position), category='error')
568 568 return HTTPFound(h.route_path('my_account_bookmarks'))
569 569
570 570 entry['position'] = position
571 571 entry['cur_position'] = safe_int(entry.get('cur_position'))
572 572 positions[position] = entry
573 573
574 574 try:
575 575 for entry in positions.values():
576 576 self._process_bookmark_entry(entry, user_id)
577 577
578 578 Session().commit()
579 579 h.flash(_("Update Bookmarks"), category='success')
580 580 except IntegrityError:
581 581 h.flash(_("Failed to update bookmarks. "
582 582 "Make sure an unique position is used."), category='error')
583 583
584 584 return HTTPFound(h.route_path('my_account_bookmarks'))
585 585
586 586 @LoginRequired()
587 587 @NotAnonymous()
588 588 def my_account_goto_bookmark(self):
589 589
590 590 bookmark_id = self.request.matchdict['bookmark_id']
591 591 user_bookmark = UserBookmark().query()\
592 592 .filter(UserBookmark.user_id == self.request.user.user_id) \
593 593 .filter(UserBookmark.position == bookmark_id).scalar()
594 594
595 595 redirect_url = h.route_path('my_account_bookmarks')
596 596 if not user_bookmark:
597 597 raise HTTPFound(redirect_url)
598 598
599 599 # repository set
600 600 if user_bookmark.repository:
601 601 repo_name = user_bookmark.repository.repo_name
602 602 base_redirect_url = h.route_path(
603 603 'repo_summary', repo_name=repo_name)
604 604 if user_bookmark.redirect_url and \
605 605 '${repo_url}' in user_bookmark.redirect_url:
606 606 redirect_url = string.Template(user_bookmark.redirect_url)\
607 607 .safe_substitute({'repo_url': base_redirect_url})
608 608 else:
609 609 redirect_url = base_redirect_url
610 610 # repository group set
611 611 elif user_bookmark.repository_group:
612 612 repo_group_name = user_bookmark.repository_group.group_name
613 613 base_redirect_url = h.route_path(
614 614 'repo_group_home', repo_group_name=repo_group_name)
615 615 if user_bookmark.redirect_url and \
616 616 '${repo_group_url}' in user_bookmark.redirect_url:
617 617 redirect_url = string.Template(user_bookmark.redirect_url)\
618 618 .safe_substitute({'repo_group_url': base_redirect_url})
619 619 else:
620 620 redirect_url = base_redirect_url
621 621 # custom URL set
622 622 elif user_bookmark.redirect_url:
623 623 server_url = h.route_url('home').rstrip('/')
624 624 redirect_url = string.Template(user_bookmark.redirect_url) \
625 625 .safe_substitute({'server_url': server_url})
626 626
627 627 log.debug('Redirecting bookmark %s to %s', user_bookmark, redirect_url)
628 628 raise HTTPFound(redirect_url)
629 629
630 630 @LoginRequired()
631 631 @NotAnonymous()
632 632 def my_account_perms(self):
633 633 c = self.load_default_context()
634 634 c.active = 'perms'
635 635
636 636 c.perm_user = c.auth_user
637 637 return self._get_template_context(c)
638 638
639 639 @LoginRequired()
640 640 @NotAnonymous()
641 641 def my_notifications(self):
642 642 c = self.load_default_context()
643 643 c.active = 'notifications'
644 644
645 645 return self._get_template_context(c)
646 646
647 647 @LoginRequired()
648 648 @NotAnonymous()
649 649 @CSRFRequired()
650 650 def my_notifications_toggle_visibility(self):
651 651 user = self._rhodecode_db_user
652 652 new_status = not user.user_data.get('notification_status', True)
653 653 user.update_userdata(notification_status=new_status)
654 654 Session().commit()
655 655 return user.user_data['notification_status']
656 656
657 def _get_pull_requests_list(self, statuses):
657 def _get_pull_requests_list(self, statuses, filter_type=None):
658 658 draw, start, limit = self._extract_chunk(self.request)
659 659 search_q, order_by, order_dir = self._extract_ordering(self.request)
660 660
661 661 _render = self.request.get_partial_renderer(
662 662 'rhodecode:templates/data_table/_dt_elements.mako')
663 663
664 if filter_type == 'awaiting_my_review':
665 pull_requests = PullRequestModel().get_im_participating_in_for_review(
666 user_id=self._rhodecode_user.user_id,
667 statuses=statuses, query=search_q,
668 offset=start, length=limit, order_by=order_by,
669 order_dir=order_dir)
670
671 pull_requests_total_count = PullRequestModel().count_im_participating_in_for_review(
672 user_id=self._rhodecode_user.user_id, statuses=statuses, query=search_q)
673 else:
664 674 pull_requests = PullRequestModel().get_im_participating_in(
665 675 user_id=self._rhodecode_user.user_id,
666 676 statuses=statuses, query=search_q,
667 677 offset=start, length=limit, order_by=order_by,
668 678 order_dir=order_dir)
669 679
670 680 pull_requests_total_count = PullRequestModel().count_im_participating_in(
671 681 user_id=self._rhodecode_user.user_id, statuses=statuses, query=search_q)
672 682
673 683 data = []
674 684 comments_model = CommentsModel()
675 685 for pr in pull_requests:
676 686 repo_id = pr.target_repo_id
677 687 comments_count = comments_model.get_all_comments(
678 688 repo_id, pull_request=pr, include_drafts=False, count_only=True)
679 689 owned = pr.user_id == self._rhodecode_user.user_id
680 690
691 review_statuses = pr.reviewers_statuses(user=self._rhodecode_db_user)
692 my_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
693 if review_statuses and review_statuses[4]:
694 _review_obj, _user, _reasons, _mandatory, statuses = review_statuses
695 my_review_status = statuses[0][1].status
696
681 697 data.append({
682 698 'target_repo': _render('pullrequest_target_repo',
683 699 pr.target_repo.repo_name),
684 700 'name': _render('pullrequest_name',
685 701 pr.pull_request_id, pr.pull_request_state,
686 702 pr.work_in_progress, pr.target_repo.repo_name,
687 703 short=True),
688 704 'name_raw': pr.pull_request_id,
689 705 'status': _render('pullrequest_status',
690 706 pr.calculated_review_status()),
707 'my_status': _render('pullrequest_status',
708 my_review_status),
691 709 'title': _render('pullrequest_title', pr.title, pr.description),
692 710 'description': h.escape(pr.description),
693 711 'updated_on': _render('pullrequest_updated_on',
694 712 h.datetime_to_time(pr.updated_on),
695 713 pr.versions_count),
696 714 'updated_on_raw': h.datetime_to_time(pr.updated_on),
697 715 'created_on': _render('pullrequest_updated_on',
698 716 h.datetime_to_time(pr.created_on)),
699 717 'created_on_raw': h.datetime_to_time(pr.created_on),
700 718 'state': pr.pull_request_state,
701 719 'author': _render('pullrequest_author',
702 720 pr.author.full_contact, ),
703 721 'author_raw': pr.author.full_name,
704 722 'comments': _render('pullrequest_comments', comments_count),
705 723 'comments_raw': comments_count,
706 724 'closed': pr.is_closed(),
707 725 'owned': owned
708 726 })
709 727
710 728 # json used to render the grid
711 729 data = ({
712 730 'draw': draw,
713 731 'data': data,
714 732 'recordsTotal': pull_requests_total_count,
715 733 'recordsFiltered': pull_requests_total_count,
716 734 })
717 735 return data
718 736
719 737 @LoginRequired()
720 738 @NotAnonymous()
721 739 def my_account_pullrequests(self):
722 740 c = self.load_default_context()
723 741 c.active = 'pullrequests'
724 742 req_get = self.request.GET
725 743
726 c.closed = str2bool(req_get.get('pr_show_closed'))
744 c.closed = str2bool(req_get.get('closed'))
745 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
746
747 c.selected_filter = 'all'
748 if c.closed:
749 c.selected_filter = 'all_closed'
750 if c.awaiting_my_review:
751 c.selected_filter = 'awaiting_my_review'
727 752
728 753 return self._get_template_context(c)
729 754
730 755 @LoginRequired()
731 756 @NotAnonymous()
732 757 def my_account_pullrequests_data(self):
733 758 self.load_default_context()
734 759 req_get = self.request.GET
760
761 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
735 762 closed = str2bool(req_get.get('closed'))
736 763
737 764 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
738 765 if closed:
739 766 statuses += [PullRequest.STATUS_CLOSED]
740 767
741 data = self._get_pull_requests_list(statuses=statuses)
768 filter_type = \
769 'awaiting_my_review' if awaiting_my_review \
770 else None
771
772 data = self._get_pull_requests_list(statuses=statuses, filter_type=filter_type)
742 773 return data
743 774
744 775 @LoginRequired()
745 776 @NotAnonymous()
746 777 def my_account_user_group_membership(self):
747 778 c = self.load_default_context()
748 779 c.active = 'user_group_membership'
749 780 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
750 781 for group in self._rhodecode_db_user.group_member]
751 782 c.user_groups = json.dumps(groups)
752 783 return self._get_template_context(c)
@@ -1,84 +1,84 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22 from rhodecode.model.db import Repository
23 23
24 24
25 25 def route_path(name, params=None, **kwargs):
26 26 import urllib
27 27
28 28 base_url = {
29 29 'pullrequest_show_all': '/{repo_name}/pull-request',
30 30 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
31 31 }[name].format(**kwargs)
32 32
33 33 if params:
34 34 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
35 35 return base_url
36 36
37 37
38 38 @pytest.mark.backends("git", "hg")
39 39 @pytest.mark.usefixtures('autologin_user', 'app')
40 40 class TestPullRequestList(object):
41 41
42 42 @pytest.mark.parametrize('params, expected_title', [
43 43 ({'source': 0, 'closed': 1}, 'Closed'),
44 ({'source': 0, 'my': 1}, 'Opened by me'),
44 ({'source': 0, 'my': 1}, 'Created by me'),
45 45 ({'source': 0, 'awaiting_review': 1}, 'Awaiting review'),
46 46 ({'source': 0, 'awaiting_my_review': 1}, 'Awaiting my review'),
47 47 ({'source': 1}, 'From this repo'),
48 48 ])
49 49 def test_showing_list_page(self, backend, pr_util, params, expected_title):
50 50 pull_request = pr_util.create_pull_request()
51 51
52 52 response = self.app.get(
53 53 route_path('pullrequest_show_all',
54 54 repo_name=pull_request.target_repo.repo_name,
55 55 params=params))
56 56
57 57 assert_response = response.assert_response()
58 58
59 59 element = assert_response.get_element('.title .active')
60 60 element_text = element.text_content()
61 61 assert expected_title == element_text
62 62
63 63 def test_showing_list_page_data(self, backend, pr_util, xhr_header):
64 64 pull_request = pr_util.create_pull_request()
65 65 response = self.app.get(
66 66 route_path('pullrequest_show_all_data',
67 67 repo_name=pull_request.target_repo.repo_name),
68 68 extra_environ=xhr_header)
69 69
70 70 assert response.json['recordsTotal'] == 1
71 71 assert response.json['data'][0]['description'] == 'Description'
72 72
73 73 def test_description_is_escaped_on_index_page(self, backend, pr_util, xhr_header):
74 74 xss_description = "<script>alert('Hi!')</script>"
75 75 pull_request = pr_util.create_pull_request(description=xss_description)
76 76
77 77 response = self.app.get(
78 78 route_path('pullrequest_show_all_data',
79 79 repo_name=pull_request.target_repo.repo_name),
80 80 extra_environ=xhr_header)
81 81
82 82 assert response.json['recordsTotal'] == 1
83 83 assert response.json['data'][0]['description'] == \
84 84 "&lt;script&gt;alert(&#39;Hi!&#39;)&lt;/script&gt;"
@@ -1,1861 +1,1868 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 29
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import CommentsModel
49 49 from rhodecode.model.db import (
50 50 func, false, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
51 51 PullRequestReviewers)
52 52 from rhodecode.model.forms import PullRequestForm
53 53 from rhodecode.model.meta import Session
54 54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
55 55 from rhodecode.model.scm import ScmModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
61 61
62 62 def load_default_context(self):
63 63 c = self._get_local_tmpl_context(include_app_defaults=True)
64 64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
65 65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
66 66 # backward compat., we use for OLD PRs a plain renderer
67 67 c.renderer = 'plain'
68 68 return c
69 69
70 70 def _get_pull_requests_list(
71 71 self, repo_name, source, filter_type, opened_by, statuses):
72 72
73 73 draw, start, limit = self._extract_chunk(self.request)
74 74 search_q, order_by, order_dir = self._extract_ordering(self.request)
75 75 _render = self.request.get_partial_renderer(
76 76 'rhodecode:templates/data_table/_dt_elements.mako')
77 77
78 78 # pagination
79 79
80 80 if filter_type == 'awaiting_review':
81 81 pull_requests = PullRequestModel().get_awaiting_review(
82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
83 statuses=statuses, offset=start, length=limit,
84 order_by=order_by, order_dir=order_dir)
82 repo_name,
83 search_q=search_q, statuses=statuses,
84 offset=start, length=limit, order_by=order_by, order_dir=order_dir)
85 85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
86 repo_name, search_q=search_q, source=source, statuses=statuses,
87 opened_by=opened_by)
86 repo_name,
87 search_q=search_q, statuses=statuses)
88 88 elif filter_type == 'awaiting_my_review':
89 89 pull_requests = PullRequestModel().get_awaiting_my_review(
90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
91 user_id=self._rhodecode_user.user_id, statuses=statuses,
92 offset=start, length=limit, order_by=order_by,
93 order_dir=order_dir)
90 repo_name, self._rhodecode_user.user_id,
91 search_q=search_q, statuses=statuses,
92 offset=start, length=limit, order_by=order_by, order_dir=order_dir)
94 93 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
96 statuses=statuses, opened_by=opened_by)
94 repo_name, self._rhodecode_user.user_id,
95 search_q=search_q, statuses=statuses)
97 96 else:
98 97 pull_requests = PullRequestModel().get_all(
99 98 repo_name, search_q=search_q, source=source, opened_by=opened_by,
100 99 statuses=statuses, offset=start, length=limit,
101 100 order_by=order_by, order_dir=order_dir)
102 101 pull_requests_total_count = PullRequestModel().count_all(
103 102 repo_name, search_q=search_q, source=source, statuses=statuses,
104 103 opened_by=opened_by)
105 104
106 105 data = []
107 106 comments_model = CommentsModel()
108 107 for pr in pull_requests:
109 108 comments_count = comments_model.get_all_comments(
110 109 self.db_repo.repo_id, pull_request=pr,
111 110 include_drafts=False, count_only=True)
112 111
112 review_statuses = pr.reviewers_statuses(user=self._rhodecode_db_user)
113 my_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
114 if review_statuses and review_statuses[4]:
115 _review_obj, _user, _reasons, _mandatory, statuses = review_statuses
116 my_review_status = statuses[0][1].status
117
113 118 data.append({
114 119 'name': _render('pullrequest_name',
115 120 pr.pull_request_id, pr.pull_request_state,
116 121 pr.work_in_progress, pr.target_repo.repo_name,
117 122 short=True),
118 123 'name_raw': pr.pull_request_id,
119 124 'status': _render('pullrequest_status',
120 125 pr.calculated_review_status()),
126 'my_status': _render('pullrequest_status',
127 my_review_status),
121 128 'title': _render('pullrequest_title', pr.title, pr.description),
122 129 'description': h.escape(pr.description),
123 130 'updated_on': _render('pullrequest_updated_on',
124 131 h.datetime_to_time(pr.updated_on),
125 132 pr.versions_count),
126 133 'updated_on_raw': h.datetime_to_time(pr.updated_on),
127 134 'created_on': _render('pullrequest_updated_on',
128 135 h.datetime_to_time(pr.created_on)),
129 136 'created_on_raw': h.datetime_to_time(pr.created_on),
130 137 'state': pr.pull_request_state,
131 138 'author': _render('pullrequest_author',
132 139 pr.author.full_contact, ),
133 140 'author_raw': pr.author.full_name,
134 141 'comments': _render('pullrequest_comments', comments_count),
135 142 'comments_raw': comments_count,
136 143 'closed': pr.is_closed(),
137 144 })
138 145
139 146 data = ({
140 147 'draw': draw,
141 148 'data': data,
142 149 'recordsTotal': pull_requests_total_count,
143 150 'recordsFiltered': pull_requests_total_count,
144 151 })
145 152 return data
146 153
147 154 @LoginRequired()
148 155 @HasRepoPermissionAnyDecorator(
149 156 'repository.read', 'repository.write', 'repository.admin')
150 157 def pull_request_list(self):
151 158 c = self.load_default_context()
152 159
153 160 req_get = self.request.GET
154 161 c.source = str2bool(req_get.get('source'))
155 162 c.closed = str2bool(req_get.get('closed'))
156 163 c.my = str2bool(req_get.get('my'))
157 164 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 165 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159 166
160 167 c.active = 'open'
161 168 if c.my:
162 169 c.active = 'my'
163 170 if c.closed:
164 171 c.active = 'closed'
165 172 if c.awaiting_review and not c.source:
166 173 c.active = 'awaiting'
167 174 if c.source and not c.awaiting_review:
168 175 c.active = 'source'
169 176 if c.awaiting_my_review:
170 177 c.active = 'awaiting_my'
171 178
172 179 return self._get_template_context(c)
173 180
174 181 @LoginRequired()
175 182 @HasRepoPermissionAnyDecorator(
176 183 'repository.read', 'repository.write', 'repository.admin')
177 184 def pull_request_list_data(self):
178 185 self.load_default_context()
179 186
180 187 # additional filters
181 188 req_get = self.request.GET
182 189 source = str2bool(req_get.get('source'))
183 190 closed = str2bool(req_get.get('closed'))
184 191 my = str2bool(req_get.get('my'))
185 192 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 193 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 194
188 195 filter_type = 'awaiting_review' if awaiting_review \
189 196 else 'awaiting_my_review' if awaiting_my_review \
190 197 else None
191 198
192 199 opened_by = None
193 200 if my:
194 201 opened_by = [self._rhodecode_user.user_id]
195 202
196 203 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 204 if closed:
198 205 statuses = [PullRequest.STATUS_CLOSED]
199 206
200 207 data = self._get_pull_requests_list(
201 208 repo_name=self.db_repo_name, source=source,
202 209 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 210
204 211 return data
205 212
206 213 def _is_diff_cache_enabled(self, target_repo):
207 214 caching_enabled = self._get_general_setting(
208 215 target_repo, 'rhodecode_diff_cache')
209 216 log.debug('Diff caching enabled: %s', caching_enabled)
210 217 return caching_enabled
211 218
212 219 def _get_diffset(self, source_repo_name, source_repo,
213 220 ancestor_commit,
214 221 source_ref_id, target_ref_id,
215 222 target_commit, source_commit, diff_limit, file_limit,
216 223 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
217 224
218 225 target_commit_final = target_commit
219 226 source_commit_final = source_commit
220 227
221 228 if use_ancestor:
222 229 # we might want to not use it for versions
223 230 target_ref_id = ancestor_commit.raw_id
224 231 target_commit_final = ancestor_commit
225 232
226 233 vcs_diff = PullRequestModel().get_diff(
227 234 source_repo, source_ref_id, target_ref_id,
228 235 hide_whitespace_changes, diff_context)
229 236
230 237 diff_processor = diffs.DiffProcessor(
231 238 vcs_diff, format='newdiff', diff_limit=diff_limit,
232 239 file_limit=file_limit, show_full_diff=fulldiff)
233 240
234 241 _parsed = diff_processor.prepare()
235 242
236 243 diffset = codeblocks.DiffSet(
237 244 repo_name=self.db_repo_name,
238 245 source_repo_name=source_repo_name,
239 246 source_node_getter=codeblocks.diffset_node_getter(target_commit_final),
240 247 target_node_getter=codeblocks.diffset_node_getter(source_commit_final),
241 248 )
242 249 diffset = self.path_filter.render_patchset_filtered(
243 250 diffset, _parsed, target_ref_id, source_ref_id)
244 251
245 252 return diffset
246 253
247 254 def _get_range_diffset(self, source_scm, source_repo,
248 255 commit1, commit2, diff_limit, file_limit,
249 256 fulldiff, hide_whitespace_changes, diff_context):
250 257 vcs_diff = source_scm.get_diff(
251 258 commit1, commit2,
252 259 ignore_whitespace=hide_whitespace_changes,
253 260 context=diff_context)
254 261
255 262 diff_processor = diffs.DiffProcessor(
256 263 vcs_diff, format='newdiff', diff_limit=diff_limit,
257 264 file_limit=file_limit, show_full_diff=fulldiff)
258 265
259 266 _parsed = diff_processor.prepare()
260 267
261 268 diffset = codeblocks.DiffSet(
262 269 repo_name=source_repo.repo_name,
263 270 source_node_getter=codeblocks.diffset_node_getter(commit1),
264 271 target_node_getter=codeblocks.diffset_node_getter(commit2))
265 272
266 273 diffset = self.path_filter.render_patchset_filtered(
267 274 diffset, _parsed, commit1.raw_id, commit2.raw_id)
268 275
269 276 return diffset
270 277
271 278 def register_comments_vars(self, c, pull_request, versions, include_drafts=True):
272 279 comments_model = CommentsModel()
273 280
274 281 # GENERAL COMMENTS with versions #
275 282 q = comments_model._all_general_comments_of_pull_request(pull_request)
276 283 q = q.order_by(ChangesetComment.comment_id.asc())
277 284 if not include_drafts:
278 285 q = q.filter(ChangesetComment.draft == false())
279 286 general_comments = q
280 287
281 288 # pick comments we want to render at current version
282 289 c.comment_versions = comments_model.aggregate_comments(
283 290 general_comments, versions, c.at_version_num)
284 291
285 292 # INLINE COMMENTS with versions #
286 293 q = comments_model._all_inline_comments_of_pull_request(pull_request)
287 294 q = q.order_by(ChangesetComment.comment_id.asc())
288 295 if not include_drafts:
289 296 q = q.filter(ChangesetComment.draft == false())
290 297 inline_comments = q
291 298
292 299 c.inline_versions = comments_model.aggregate_comments(
293 300 inline_comments, versions, c.at_version_num, inline=True)
294 301
295 302 # Comments inline+general
296 303 if c.at_version:
297 304 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
298 305 c.comments = c.comment_versions[c.at_version_num]['display']
299 306 else:
300 307 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
301 308 c.comments = c.comment_versions[c.at_version_num]['until']
302 309
303 310 return general_comments, inline_comments
304 311
305 312 @LoginRequired()
306 313 @HasRepoPermissionAnyDecorator(
307 314 'repository.read', 'repository.write', 'repository.admin')
308 315 def pull_request_show(self):
309 316 _ = self.request.translate
310 317 c = self.load_default_context()
311 318
312 319 pull_request = PullRequest.get_or_404(
313 320 self.request.matchdict['pull_request_id'])
314 321 pull_request_id = pull_request.pull_request_id
315 322
316 323 c.state_progressing = pull_request.is_state_changing()
317 324 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
318 325
319 326 _new_state = {
320 327 'created': PullRequest.STATE_CREATED,
321 328 }.get(self.request.GET.get('force_state'))
322 329
323 330 if c.is_super_admin and _new_state:
324 331 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
325 332 h.flash(
326 333 _('Pull Request state was force changed to `{}`').format(_new_state),
327 334 category='success')
328 335 Session().commit()
329 336
330 337 raise HTTPFound(h.route_path(
331 338 'pullrequest_show', repo_name=self.db_repo_name,
332 339 pull_request_id=pull_request_id))
333 340
334 341 version = self.request.GET.get('version')
335 342 from_version = self.request.GET.get('from_version') or version
336 343 merge_checks = self.request.GET.get('merge_checks')
337 344 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
338 345 force_refresh = str2bool(self.request.GET.get('force_refresh'))
339 346 c.range_diff_on = self.request.GET.get('range-diff') == "1"
340 347
341 348 # fetch global flags of ignore ws or context lines
342 349 diff_context = diffs.get_diff_context(self.request)
343 350 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
344 351
345 352 (pull_request_latest,
346 353 pull_request_at_ver,
347 354 pull_request_display_obj,
348 355 at_version) = PullRequestModel().get_pr_version(
349 356 pull_request_id, version=version)
350 357
351 358 pr_closed = pull_request_latest.is_closed()
352 359
353 360 if pr_closed and (version or from_version):
354 361 # not allow to browse versions for closed PR
355 362 raise HTTPFound(h.route_path(
356 363 'pullrequest_show', repo_name=self.db_repo_name,
357 364 pull_request_id=pull_request_id))
358 365
359 366 versions = pull_request_display_obj.versions()
360 367
361 368 c.commit_versions = PullRequestModel().pr_commits_versions(versions)
362 369
363 370 # used to store per-commit range diffs
364 371 c.changes = collections.OrderedDict()
365 372
366 373 c.at_version = at_version
367 374 c.at_version_num = (at_version
368 375 if at_version and at_version != PullRequest.LATEST_VER
369 376 else None)
370 377
371 378 c.at_version_index = ChangesetComment.get_index_from_version(
372 379 c.at_version_num, versions)
373 380
374 381 (prev_pull_request_latest,
375 382 prev_pull_request_at_ver,
376 383 prev_pull_request_display_obj,
377 384 prev_at_version) = PullRequestModel().get_pr_version(
378 385 pull_request_id, version=from_version)
379 386
380 387 c.from_version = prev_at_version
381 388 c.from_version_num = (prev_at_version
382 389 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
383 390 else None)
384 391 c.from_version_index = ChangesetComment.get_index_from_version(
385 392 c.from_version_num, versions)
386 393
387 394 # define if we're in COMPARE mode or VIEW at version mode
388 395 compare = at_version != prev_at_version
389 396
390 397 # pull_requests repo_name we opened it against
391 398 # ie. target_repo must match
392 399 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
393 400 log.warning('Mismatch between the current repo: %s, and target %s',
394 401 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
395 402 raise HTTPNotFound()
396 403
397 404 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
398 405
399 406 c.pull_request = pull_request_display_obj
400 407 c.renderer = pull_request_at_ver.description_renderer or c.renderer
401 408 c.pull_request_latest = pull_request_latest
402 409
403 410 # inject latest version
404 411 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
405 412 c.versions = versions + [latest_ver]
406 413
407 414 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
408 415 c.allowed_to_change_status = False
409 416 c.allowed_to_update = False
410 417 c.allowed_to_merge = False
411 418 c.allowed_to_delete = False
412 419 c.allowed_to_comment = False
413 420 c.allowed_to_close = False
414 421 else:
415 422 can_change_status = PullRequestModel().check_user_change_status(
416 423 pull_request_at_ver, self._rhodecode_user)
417 424 c.allowed_to_change_status = can_change_status and not pr_closed
418 425
419 426 c.allowed_to_update = PullRequestModel().check_user_update(
420 427 pull_request_latest, self._rhodecode_user) and not pr_closed
421 428 c.allowed_to_merge = PullRequestModel().check_user_merge(
422 429 pull_request_latest, self._rhodecode_user) and not pr_closed
423 430 c.allowed_to_delete = PullRequestModel().check_user_delete(
424 431 pull_request_latest, self._rhodecode_user) and not pr_closed
425 432 c.allowed_to_comment = not pr_closed
426 433 c.allowed_to_close = c.allowed_to_merge and not pr_closed
427 434
428 435 c.forbid_adding_reviewers = False
429 436
430 437 if pull_request_latest.reviewer_data and \
431 438 'rules' in pull_request_latest.reviewer_data:
432 439 rules = pull_request_latest.reviewer_data['rules'] or {}
433 440 try:
434 441 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
435 442 except Exception:
436 443 pass
437 444
438 445 # check merge capabilities
439 446 _merge_check = MergeCheck.validate(
440 447 pull_request_latest, auth_user=self._rhodecode_user,
441 448 translator=self.request.translate,
442 449 force_shadow_repo_refresh=force_refresh)
443 450
444 451 c.pr_merge_errors = _merge_check.error_details
445 452 c.pr_merge_possible = not _merge_check.failed
446 453 c.pr_merge_message = _merge_check.merge_msg
447 454 c.pr_merge_source_commit = _merge_check.source_commit
448 455 c.pr_merge_target_commit = _merge_check.target_commit
449 456
450 457 c.pr_merge_info = MergeCheck.get_merge_conditions(
451 458 pull_request_latest, translator=self.request.translate)
452 459
453 460 c.pull_request_review_status = _merge_check.review_status
454 461 if merge_checks:
455 462 self.request.override_renderer = \
456 463 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
457 464 return self._get_template_context(c)
458 465
459 466 c.reviewers_count = pull_request.reviewers_count
460 467 c.observers_count = pull_request.observers_count
461 468
462 469 # reviewers and statuses
463 470 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
464 471 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
465 472 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
466 473
467 474 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
468 475 member_reviewer = h.reviewer_as_json(
469 476 member, reasons=reasons, mandatory=mandatory,
470 477 role=review_obj.role,
471 478 user_group=review_obj.rule_user_group_data()
472 479 )
473 480
474 481 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
475 482 member_reviewer['review_status'] = current_review_status
476 483 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
477 484 member_reviewer['allowed_to_update'] = c.allowed_to_update
478 485 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
479 486
480 487 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
481 488
482 489 for observer_obj, member in pull_request_at_ver.observers():
483 490 member_observer = h.reviewer_as_json(
484 491 member, reasons=[], mandatory=False,
485 492 role=observer_obj.role,
486 493 user_group=observer_obj.rule_user_group_data()
487 494 )
488 495 member_observer['allowed_to_update'] = c.allowed_to_update
489 496 c.pull_request_set_observers_data_json['observers'].append(member_observer)
490 497
491 498 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
492 499
493 500 general_comments, inline_comments = \
494 501 self.register_comments_vars(c, pull_request_latest, versions)
495 502
496 503 # TODOs
497 504 c.unresolved_comments = CommentsModel() \
498 505 .get_pull_request_unresolved_todos(pull_request_latest)
499 506 c.resolved_comments = CommentsModel() \
500 507 .get_pull_request_resolved_todos(pull_request_latest)
501 508
502 509 # Drafts
503 510 c.draft_comments = CommentsModel().get_pull_request_drafts(
504 511 self._rhodecode_db_user.user_id,
505 512 pull_request_latest)
506 513
507 514 # if we use version, then do not show later comments
508 515 # than current version
509 516 display_inline_comments = collections.defaultdict(
510 517 lambda: collections.defaultdict(list))
511 518 for co in inline_comments:
512 519 if c.at_version_num:
513 520 # pick comments that are at least UPTO given version, so we
514 521 # don't render comments for higher version
515 522 should_render = co.pull_request_version_id and \
516 523 co.pull_request_version_id <= c.at_version_num
517 524 else:
518 525 # showing all, for 'latest'
519 526 should_render = True
520 527
521 528 if should_render:
522 529 display_inline_comments[co.f_path][co.line_no].append(co)
523 530
524 531 # load diff data into template context, if we use compare mode then
525 532 # diff is calculated based on changes between versions of PR
526 533
527 534 source_repo = pull_request_at_ver.source_repo
528 535 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
529 536
530 537 target_repo = pull_request_at_ver.target_repo
531 538 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
532 539
533 540 if compare:
534 541 # in compare switch the diff base to latest commit from prev version
535 542 target_ref_id = prev_pull_request_display_obj.revisions[0]
536 543
537 544 # despite opening commits for bookmarks/branches/tags, we always
538 545 # convert this to rev to prevent changes after bookmark or branch change
539 546 c.source_ref_type = 'rev'
540 547 c.source_ref = source_ref_id
541 548
542 549 c.target_ref_type = 'rev'
543 550 c.target_ref = target_ref_id
544 551
545 552 c.source_repo = source_repo
546 553 c.target_repo = target_repo
547 554
548 555 c.commit_ranges = []
549 556 source_commit = EmptyCommit()
550 557 target_commit = EmptyCommit()
551 558 c.missing_requirements = False
552 559
553 560 source_scm = source_repo.scm_instance()
554 561 target_scm = target_repo.scm_instance()
555 562
556 563 shadow_scm = None
557 564 try:
558 565 shadow_scm = pull_request_latest.get_shadow_repo()
559 566 except Exception:
560 567 log.debug('Failed to get shadow repo', exc_info=True)
561 568 # try first the existing source_repo, and then shadow
562 569 # repo if we can obtain one
563 570 commits_source_repo = source_scm
564 571 if shadow_scm:
565 572 commits_source_repo = shadow_scm
566 573
567 574 c.commits_source_repo = commits_source_repo
568 575 c.ancestor = None # set it to None, to hide it from PR view
569 576
570 577 # empty version means latest, so we keep this to prevent
571 578 # double caching
572 579 version_normalized = version or PullRequest.LATEST_VER
573 580 from_version_normalized = from_version or PullRequest.LATEST_VER
574 581
575 582 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
576 583 cache_file_path = diff_cache_exist(
577 584 cache_path, 'pull_request', pull_request_id, version_normalized,
578 585 from_version_normalized, source_ref_id, target_ref_id,
579 586 hide_whitespace_changes, diff_context, c.fulldiff)
580 587
581 588 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
582 589 force_recache = self.get_recache_flag()
583 590
584 591 cached_diff = None
585 592 if caching_enabled:
586 593 cached_diff = load_cached_diff(cache_file_path)
587 594
588 595 has_proper_commit_cache = (
589 596 cached_diff and cached_diff.get('commits')
590 597 and len(cached_diff.get('commits', [])) == 5
591 598 and cached_diff.get('commits')[0]
592 599 and cached_diff.get('commits')[3])
593 600
594 601 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
595 602 diff_commit_cache = \
596 603 (ancestor_commit, commit_cache, missing_requirements,
597 604 source_commit, target_commit) = cached_diff['commits']
598 605 else:
599 606 # NOTE(marcink): we reach potentially unreachable errors when a PR has
600 607 # merge errors resulting in potentially hidden commits in the shadow repo.
601 608 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
602 609 and _merge_check.merge_response
603 610 maybe_unreachable = maybe_unreachable \
604 611 and _merge_check.merge_response.metadata.get('unresolved_files')
605 612 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
606 613 diff_commit_cache = \
607 614 (ancestor_commit, commit_cache, missing_requirements,
608 615 source_commit, target_commit) = self.get_commits(
609 616 commits_source_repo,
610 617 pull_request_at_ver,
611 618 source_commit,
612 619 source_ref_id,
613 620 source_scm,
614 621 target_commit,
615 622 target_ref_id,
616 623 target_scm,
617 624 maybe_unreachable=maybe_unreachable)
618 625
619 626 # register our commit range
620 627 for comm in commit_cache.values():
621 628 c.commit_ranges.append(comm)
622 629
623 630 c.missing_requirements = missing_requirements
624 631 c.ancestor_commit = ancestor_commit
625 632 c.statuses = source_repo.statuses(
626 633 [x.raw_id for x in c.commit_ranges])
627 634
628 635 # auto collapse if we have more than limit
629 636 collapse_limit = diffs.DiffProcessor._collapse_commits_over
630 637 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
631 638 c.compare_mode = compare
632 639
633 640 # diff_limit is the old behavior, will cut off the whole diff
634 641 # if the limit is applied otherwise will just hide the
635 642 # big files from the front-end
636 643 diff_limit = c.visual.cut_off_limit_diff
637 644 file_limit = c.visual.cut_off_limit_file
638 645
639 646 c.missing_commits = False
640 647 if (c.missing_requirements
641 648 or isinstance(source_commit, EmptyCommit)
642 649 or source_commit == target_commit):
643 650
644 651 c.missing_commits = True
645 652 else:
646 653 c.inline_comments = display_inline_comments
647 654
648 655 use_ancestor = True
649 656 if from_version_normalized != version_normalized:
650 657 use_ancestor = False
651 658
652 659 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
653 660 if not force_recache and has_proper_diff_cache:
654 661 c.diffset = cached_diff['diff']
655 662 else:
656 663 try:
657 664 c.diffset = self._get_diffset(
658 665 c.source_repo.repo_name, commits_source_repo,
659 666 c.ancestor_commit,
660 667 source_ref_id, target_ref_id,
661 668 target_commit, source_commit,
662 669 diff_limit, file_limit, c.fulldiff,
663 670 hide_whitespace_changes, diff_context,
664 671 use_ancestor=use_ancestor
665 672 )
666 673
667 674 # save cached diff
668 675 if caching_enabled:
669 676 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
670 677 except CommitDoesNotExistError:
671 678 log.exception('Failed to generate diffset')
672 679 c.missing_commits = True
673 680
674 681 if not c.missing_commits:
675 682
676 683 c.limited_diff = c.diffset.limited_diff
677 684
678 685 # calculate removed files that are bound to comments
679 686 comment_deleted_files = [
680 687 fname for fname in display_inline_comments
681 688 if fname not in c.diffset.file_stats]
682 689
683 690 c.deleted_files_comments = collections.defaultdict(dict)
684 691 for fname, per_line_comments in display_inline_comments.items():
685 692 if fname in comment_deleted_files:
686 693 c.deleted_files_comments[fname]['stats'] = 0
687 694 c.deleted_files_comments[fname]['comments'] = list()
688 695 for lno, comments in per_line_comments.items():
689 696 c.deleted_files_comments[fname]['comments'].extend(comments)
690 697
691 698 # maybe calculate the range diff
692 699 if c.range_diff_on:
693 700 # TODO(marcink): set whitespace/context
694 701 context_lcl = 3
695 702 ign_whitespace_lcl = False
696 703
697 704 for commit in c.commit_ranges:
698 705 commit2 = commit
699 706 commit1 = commit.first_parent
700 707
701 708 range_diff_cache_file_path = diff_cache_exist(
702 709 cache_path, 'diff', commit.raw_id,
703 710 ign_whitespace_lcl, context_lcl, c.fulldiff)
704 711
705 712 cached_diff = None
706 713 if caching_enabled:
707 714 cached_diff = load_cached_diff(range_diff_cache_file_path)
708 715
709 716 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
710 717 if not force_recache and has_proper_diff_cache:
711 718 diffset = cached_diff['diff']
712 719 else:
713 720 diffset = self._get_range_diffset(
714 721 commits_source_repo, source_repo,
715 722 commit1, commit2, diff_limit, file_limit,
716 723 c.fulldiff, ign_whitespace_lcl, context_lcl
717 724 )
718 725
719 726 # save cached diff
720 727 if caching_enabled:
721 728 cache_diff(range_diff_cache_file_path, diffset, None)
722 729
723 730 c.changes[commit.raw_id] = diffset
724 731
725 732 # this is a hack to properly display links, when creating PR, the
726 733 # compare view and others uses different notation, and
727 734 # compare_commits.mako renders links based on the target_repo.
728 735 # We need to swap that here to generate it properly on the html side
729 736 c.target_repo = c.source_repo
730 737
731 738 c.commit_statuses = ChangesetStatus.STATUSES
732 739
733 740 c.show_version_changes = not pr_closed
734 741 if c.show_version_changes:
735 742 cur_obj = pull_request_at_ver
736 743 prev_obj = prev_pull_request_at_ver
737 744
738 745 old_commit_ids = prev_obj.revisions
739 746 new_commit_ids = cur_obj.revisions
740 747 commit_changes = PullRequestModel()._calculate_commit_id_changes(
741 748 old_commit_ids, new_commit_ids)
742 749 c.commit_changes_summary = commit_changes
743 750
744 751 # calculate the diff for commits between versions
745 752 c.commit_changes = []
746 753
747 754 def mark(cs, fw):
748 755 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
749 756
750 757 for c_type, raw_id in mark(commit_changes.added, 'a') \
751 758 + mark(commit_changes.removed, 'r') \
752 759 + mark(commit_changes.common, 'c'):
753 760
754 761 if raw_id in commit_cache:
755 762 commit = commit_cache[raw_id]
756 763 else:
757 764 try:
758 765 commit = commits_source_repo.get_commit(raw_id)
759 766 except CommitDoesNotExistError:
760 767 # in case we fail extracting still use "dummy" commit
761 768 # for display in commit diff
762 769 commit = h.AttributeDict(
763 770 {'raw_id': raw_id,
764 771 'message': 'EMPTY or MISSING COMMIT'})
765 772 c.commit_changes.append([c_type, commit])
766 773
767 774 # current user review statuses for each version
768 775 c.review_versions = {}
769 776 is_reviewer = PullRequestModel().is_user_reviewer(
770 777 pull_request, self._rhodecode_user)
771 778 if is_reviewer:
772 779 for co in general_comments:
773 780 if co.author.user_id == self._rhodecode_user.user_id:
774 781 status = co.status_change
775 782 if status:
776 783 _ver_pr = status[0].comment.pull_request_version_id
777 784 c.review_versions[_ver_pr] = status[0]
778 785
779 786 return self._get_template_context(c)
780 787
781 788 def get_commits(
782 789 self, commits_source_repo, pull_request_at_ver, source_commit,
783 790 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
784 791 maybe_unreachable=False):
785 792
786 793 commit_cache = collections.OrderedDict()
787 794 missing_requirements = False
788 795
789 796 try:
790 797 pre_load = ["author", "date", "message", "branch", "parents"]
791 798
792 799 pull_request_commits = pull_request_at_ver.revisions
793 800 log.debug('Loading %s commits from %s',
794 801 len(pull_request_commits), commits_source_repo)
795 802
796 803 for rev in pull_request_commits:
797 804 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
798 805 maybe_unreachable=maybe_unreachable)
799 806 commit_cache[comm.raw_id] = comm
800 807
801 808 # Order here matters, we first need to get target, and then
802 809 # the source
803 810 target_commit = commits_source_repo.get_commit(
804 811 commit_id=safe_str(target_ref_id))
805 812
806 813 source_commit = commits_source_repo.get_commit(
807 814 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
808 815 except CommitDoesNotExistError:
809 816 log.warning('Failed to get commit from `{}` repo'.format(
810 817 commits_source_repo), exc_info=True)
811 818 except RepositoryRequirementError:
812 819 log.warning('Failed to get all required data from repo', exc_info=True)
813 820 missing_requirements = True
814 821
815 822 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
816 823
817 824 try:
818 825 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
819 826 except Exception:
820 827 ancestor_commit = None
821 828
822 829 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
823 830
824 831 def assure_not_empty_repo(self):
825 832 _ = self.request.translate
826 833
827 834 try:
828 835 self.db_repo.scm_instance().get_commit()
829 836 except EmptyRepositoryError:
830 837 h.flash(h.literal(_('There are no commits yet')),
831 838 category='warning')
832 839 raise HTTPFound(
833 840 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
834 841
835 842 @LoginRequired()
836 843 @NotAnonymous()
837 844 @HasRepoPermissionAnyDecorator(
838 845 'repository.read', 'repository.write', 'repository.admin')
839 846 def pull_request_new(self):
840 847 _ = self.request.translate
841 848 c = self.load_default_context()
842 849
843 850 self.assure_not_empty_repo()
844 851 source_repo = self.db_repo
845 852
846 853 commit_id = self.request.GET.get('commit')
847 854 branch_ref = self.request.GET.get('branch')
848 855 bookmark_ref = self.request.GET.get('bookmark')
849 856
850 857 try:
851 858 source_repo_data = PullRequestModel().generate_repo_data(
852 859 source_repo, commit_id=commit_id,
853 860 branch=branch_ref, bookmark=bookmark_ref,
854 861 translator=self.request.translate)
855 862 except CommitDoesNotExistError as e:
856 863 log.exception(e)
857 864 h.flash(_('Commit does not exist'), 'error')
858 865 raise HTTPFound(
859 866 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
860 867
861 868 default_target_repo = source_repo
862 869
863 870 if source_repo.parent and c.has_origin_repo_read_perm:
864 871 parent_vcs_obj = source_repo.parent.scm_instance()
865 872 if parent_vcs_obj and not parent_vcs_obj.is_empty():
866 873 # change default if we have a parent repo
867 874 default_target_repo = source_repo.parent
868 875
869 876 target_repo_data = PullRequestModel().generate_repo_data(
870 877 default_target_repo, translator=self.request.translate)
871 878
872 879 selected_source_ref = source_repo_data['refs']['selected_ref']
873 880 title_source_ref = ''
874 881 if selected_source_ref:
875 882 title_source_ref = selected_source_ref.split(':', 2)[1]
876 883 c.default_title = PullRequestModel().generate_pullrequest_title(
877 884 source=source_repo.repo_name,
878 885 source_ref=title_source_ref,
879 886 target=default_target_repo.repo_name
880 887 )
881 888
882 889 c.default_repo_data = {
883 890 'source_repo_name': source_repo.repo_name,
884 891 'source_refs_json': json.dumps(source_repo_data),
885 892 'target_repo_name': default_target_repo.repo_name,
886 893 'target_refs_json': json.dumps(target_repo_data),
887 894 }
888 895 c.default_source_ref = selected_source_ref
889 896
890 897 return self._get_template_context(c)
891 898
892 899 @LoginRequired()
893 900 @NotAnonymous()
894 901 @HasRepoPermissionAnyDecorator(
895 902 'repository.read', 'repository.write', 'repository.admin')
896 903 def pull_request_repo_refs(self):
897 904 self.load_default_context()
898 905 target_repo_name = self.request.matchdict['target_repo_name']
899 906 repo = Repository.get_by_repo_name(target_repo_name)
900 907 if not repo:
901 908 raise HTTPNotFound()
902 909
903 910 target_perm = HasRepoPermissionAny(
904 911 'repository.read', 'repository.write', 'repository.admin')(
905 912 target_repo_name)
906 913 if not target_perm:
907 914 raise HTTPNotFound()
908 915
909 916 return PullRequestModel().generate_repo_data(
910 917 repo, translator=self.request.translate)
911 918
912 919 @LoginRequired()
913 920 @NotAnonymous()
914 921 @HasRepoPermissionAnyDecorator(
915 922 'repository.read', 'repository.write', 'repository.admin')
916 923 def pullrequest_repo_targets(self):
917 924 _ = self.request.translate
918 925 filter_query = self.request.GET.get('query')
919 926
920 927 # get the parents
921 928 parent_target_repos = []
922 929 if self.db_repo.parent:
923 930 parents_query = Repository.query() \
924 931 .order_by(func.length(Repository.repo_name)) \
925 932 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
926 933
927 934 if filter_query:
928 935 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
929 936 parents_query = parents_query.filter(
930 937 Repository.repo_name.ilike(ilike_expression))
931 938 parents = parents_query.limit(20).all()
932 939
933 940 for parent in parents:
934 941 parent_vcs_obj = parent.scm_instance()
935 942 if parent_vcs_obj and not parent_vcs_obj.is_empty():
936 943 parent_target_repos.append(parent)
937 944
938 945 # get other forks, and repo itself
939 946 query = Repository.query() \
940 947 .order_by(func.length(Repository.repo_name)) \
941 948 .filter(
942 949 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
943 950 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
944 951 ) \
945 952 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
946 953
947 954 if filter_query:
948 955 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
949 956 query = query.filter(Repository.repo_name.ilike(ilike_expression))
950 957
951 958 limit = max(20 - len(parent_target_repos), 5) # not less then 5
952 959 target_repos = query.limit(limit).all()
953 960
954 961 all_target_repos = target_repos + parent_target_repos
955 962
956 963 repos = []
957 964 # This checks permissions to the repositories
958 965 for obj in ScmModel().get_repos(all_target_repos):
959 966 repos.append({
960 967 'id': obj['name'],
961 968 'text': obj['name'],
962 969 'type': 'repo',
963 970 'repo_id': obj['dbrepo']['repo_id'],
964 971 'repo_type': obj['dbrepo']['repo_type'],
965 972 'private': obj['dbrepo']['private'],
966 973
967 974 })
968 975
969 976 data = {
970 977 'more': False,
971 978 'results': [{
972 979 'text': _('Repositories'),
973 980 'children': repos
974 981 }] if repos else []
975 982 }
976 983 return data
977 984
978 985 @classmethod
979 986 def get_comment_ids(cls, post_data):
980 987 return filter(lambda e: e > 0, map(safe_int, aslist(post_data.get('comments'), ',')))
981 988
982 989 @LoginRequired()
983 990 @NotAnonymous()
984 991 @HasRepoPermissionAnyDecorator(
985 992 'repository.read', 'repository.write', 'repository.admin')
986 993 def pullrequest_comments(self):
987 994 self.load_default_context()
988 995
989 996 pull_request = PullRequest.get_or_404(
990 997 self.request.matchdict['pull_request_id'])
991 998 pull_request_id = pull_request.pull_request_id
992 999 version = self.request.GET.get('version')
993 1000
994 1001 _render = self.request.get_partial_renderer(
995 1002 'rhodecode:templates/base/sidebar.mako')
996 1003 c = _render.get_call_context()
997 1004
998 1005 (pull_request_latest,
999 1006 pull_request_at_ver,
1000 1007 pull_request_display_obj,
1001 1008 at_version) = PullRequestModel().get_pr_version(
1002 1009 pull_request_id, version=version)
1003 1010 versions = pull_request_display_obj.versions()
1004 1011 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1005 1012 c.versions = versions + [latest_ver]
1006 1013
1007 1014 c.at_version = at_version
1008 1015 c.at_version_num = (at_version
1009 1016 if at_version and at_version != PullRequest.LATEST_VER
1010 1017 else None)
1011 1018
1012 1019 self.register_comments_vars(c, pull_request_latest, versions, include_drafts=False)
1013 1020 all_comments = c.inline_comments_flat + c.comments
1014 1021
1015 1022 existing_ids = self.get_comment_ids(self.request.POST)
1016 1023 return _render('comments_table', all_comments, len(all_comments),
1017 1024 existing_ids=existing_ids)
1018 1025
1019 1026 @LoginRequired()
1020 1027 @NotAnonymous()
1021 1028 @HasRepoPermissionAnyDecorator(
1022 1029 'repository.read', 'repository.write', 'repository.admin')
1023 1030 def pullrequest_todos(self):
1024 1031 self.load_default_context()
1025 1032
1026 1033 pull_request = PullRequest.get_or_404(
1027 1034 self.request.matchdict['pull_request_id'])
1028 1035 pull_request_id = pull_request.pull_request_id
1029 1036 version = self.request.GET.get('version')
1030 1037
1031 1038 _render = self.request.get_partial_renderer(
1032 1039 'rhodecode:templates/base/sidebar.mako')
1033 1040 c = _render.get_call_context()
1034 1041 (pull_request_latest,
1035 1042 pull_request_at_ver,
1036 1043 pull_request_display_obj,
1037 1044 at_version) = PullRequestModel().get_pr_version(
1038 1045 pull_request_id, version=version)
1039 1046 versions = pull_request_display_obj.versions()
1040 1047 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1041 1048 c.versions = versions + [latest_ver]
1042 1049
1043 1050 c.at_version = at_version
1044 1051 c.at_version_num = (at_version
1045 1052 if at_version and at_version != PullRequest.LATEST_VER
1046 1053 else None)
1047 1054
1048 1055 c.unresolved_comments = CommentsModel() \
1049 1056 .get_pull_request_unresolved_todos(pull_request, include_drafts=False)
1050 1057 c.resolved_comments = CommentsModel() \
1051 1058 .get_pull_request_resolved_todos(pull_request, include_drafts=False)
1052 1059
1053 1060 all_comments = c.unresolved_comments + c.resolved_comments
1054 1061 existing_ids = self.get_comment_ids(self.request.POST)
1055 1062 return _render('comments_table', all_comments, len(c.unresolved_comments),
1056 1063 todo_comments=True, existing_ids=existing_ids)
1057 1064
1058 1065 @LoginRequired()
1059 1066 @NotAnonymous()
1060 1067 @HasRepoPermissionAnyDecorator(
1061 1068 'repository.read', 'repository.write', 'repository.admin')
1062 1069 def pullrequest_drafts(self):
1063 1070 self.load_default_context()
1064 1071
1065 1072 pull_request = PullRequest.get_or_404(
1066 1073 self.request.matchdict['pull_request_id'])
1067 1074 pull_request_id = pull_request.pull_request_id
1068 1075 version = self.request.GET.get('version')
1069 1076
1070 1077 _render = self.request.get_partial_renderer(
1071 1078 'rhodecode:templates/base/sidebar.mako')
1072 1079 c = _render.get_call_context()
1073 1080
1074 1081 (pull_request_latest,
1075 1082 pull_request_at_ver,
1076 1083 pull_request_display_obj,
1077 1084 at_version) = PullRequestModel().get_pr_version(
1078 1085 pull_request_id, version=version)
1079 1086 versions = pull_request_display_obj.versions()
1080 1087 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1081 1088 c.versions = versions + [latest_ver]
1082 1089
1083 1090 c.at_version = at_version
1084 1091 c.at_version_num = (at_version
1085 1092 if at_version and at_version != PullRequest.LATEST_VER
1086 1093 else None)
1087 1094
1088 1095 c.draft_comments = CommentsModel() \
1089 1096 .get_pull_request_drafts(self._rhodecode_db_user.user_id, pull_request)
1090 1097
1091 1098 all_comments = c.draft_comments
1092 1099
1093 1100 existing_ids = self.get_comment_ids(self.request.POST)
1094 1101 return _render('comments_table', all_comments, len(all_comments),
1095 1102 existing_ids=existing_ids, draft_comments=True)
1096 1103
1097 1104 @LoginRequired()
1098 1105 @NotAnonymous()
1099 1106 @HasRepoPermissionAnyDecorator(
1100 1107 'repository.read', 'repository.write', 'repository.admin')
1101 1108 @CSRFRequired()
1102 1109 def pull_request_create(self):
1103 1110 _ = self.request.translate
1104 1111 self.assure_not_empty_repo()
1105 1112 self.load_default_context()
1106 1113
1107 1114 controls = peppercorn.parse(self.request.POST.items())
1108 1115
1109 1116 try:
1110 1117 form = PullRequestForm(
1111 1118 self.request.translate, self.db_repo.repo_id)()
1112 1119 _form = form.to_python(controls)
1113 1120 except formencode.Invalid as errors:
1114 1121 if errors.error_dict.get('revisions'):
1115 1122 msg = 'Revisions: %s' % errors.error_dict['revisions']
1116 1123 elif errors.error_dict.get('pullrequest_title'):
1117 1124 msg = errors.error_dict.get('pullrequest_title')
1118 1125 else:
1119 1126 msg = _('Error creating pull request: {}').format(errors)
1120 1127 log.exception(msg)
1121 1128 h.flash(msg, 'error')
1122 1129
1123 1130 # would rather just go back to form ...
1124 1131 raise HTTPFound(
1125 1132 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1126 1133
1127 1134 source_repo = _form['source_repo']
1128 1135 source_ref = _form['source_ref']
1129 1136 target_repo = _form['target_repo']
1130 1137 target_ref = _form['target_ref']
1131 1138 commit_ids = _form['revisions'][::-1]
1132 1139 common_ancestor_id = _form['common_ancestor']
1133 1140
1134 1141 # find the ancestor for this pr
1135 1142 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1136 1143 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1137 1144
1138 1145 if not (source_db_repo or target_db_repo):
1139 1146 h.flash(_('source_repo or target repo not found'), category='error')
1140 1147 raise HTTPFound(
1141 1148 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1142 1149
1143 1150 # re-check permissions again here
1144 1151 # source_repo we must have read permissions
1145 1152
1146 1153 source_perm = HasRepoPermissionAny(
1147 1154 'repository.read', 'repository.write', 'repository.admin')(
1148 1155 source_db_repo.repo_name)
1149 1156 if not source_perm:
1150 1157 msg = _('Not Enough permissions to source repo `{}`.'.format(
1151 1158 source_db_repo.repo_name))
1152 1159 h.flash(msg, category='error')
1153 1160 # copy the args back to redirect
1154 1161 org_query = self.request.GET.mixed()
1155 1162 raise HTTPFound(
1156 1163 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1157 1164 _query=org_query))
1158 1165
1159 1166 # target repo we must have read permissions, and also later on
1160 1167 # we want to check branch permissions here
1161 1168 target_perm = HasRepoPermissionAny(
1162 1169 'repository.read', 'repository.write', 'repository.admin')(
1163 1170 target_db_repo.repo_name)
1164 1171 if not target_perm:
1165 1172 msg = _('Not Enough permissions to target repo `{}`.'.format(
1166 1173 target_db_repo.repo_name))
1167 1174 h.flash(msg, category='error')
1168 1175 # copy the args back to redirect
1169 1176 org_query = self.request.GET.mixed()
1170 1177 raise HTTPFound(
1171 1178 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1172 1179 _query=org_query))
1173 1180
1174 1181 source_scm = source_db_repo.scm_instance()
1175 1182 target_scm = target_db_repo.scm_instance()
1176 1183
1177 1184 source_ref_obj = unicode_to_reference(source_ref)
1178 1185 target_ref_obj = unicode_to_reference(target_ref)
1179 1186
1180 1187 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1181 1188 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1182 1189
1183 1190 ancestor = source_scm.get_common_ancestor(
1184 1191 source_commit.raw_id, target_commit.raw_id, target_scm)
1185 1192
1186 1193 # recalculate target ref based on ancestor
1187 1194 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1188 1195
1189 1196 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1190 1197 PullRequestModel().get_reviewer_functions()
1191 1198
1192 1199 # recalculate reviewers logic, to make sure we can validate this
1193 1200 reviewer_rules = get_default_reviewers_data(
1194 1201 self._rhodecode_db_user,
1195 1202 source_db_repo,
1196 1203 source_ref_obj,
1197 1204 target_db_repo,
1198 1205 target_ref_obj,
1199 1206 include_diff_info=False)
1200 1207
1201 1208 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1202 1209 observers = validate_observers(_form['observer_members'], reviewer_rules)
1203 1210
1204 1211 pullrequest_title = _form['pullrequest_title']
1205 1212 title_source_ref = source_ref_obj.name
1206 1213 if not pullrequest_title:
1207 1214 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1208 1215 source=source_repo,
1209 1216 source_ref=title_source_ref,
1210 1217 target=target_repo
1211 1218 )
1212 1219
1213 1220 description = _form['pullrequest_desc']
1214 1221 description_renderer = _form['description_renderer']
1215 1222
1216 1223 try:
1217 1224 pull_request = PullRequestModel().create(
1218 1225 created_by=self._rhodecode_user.user_id,
1219 1226 source_repo=source_repo,
1220 1227 source_ref=source_ref,
1221 1228 target_repo=target_repo,
1222 1229 target_ref=target_ref,
1223 1230 revisions=commit_ids,
1224 1231 common_ancestor_id=common_ancestor_id,
1225 1232 reviewers=reviewers,
1226 1233 observers=observers,
1227 1234 title=pullrequest_title,
1228 1235 description=description,
1229 1236 description_renderer=description_renderer,
1230 1237 reviewer_data=reviewer_rules,
1231 1238 auth_user=self._rhodecode_user
1232 1239 )
1233 1240 Session().commit()
1234 1241
1235 1242 h.flash(_('Successfully opened new pull request'),
1236 1243 category='success')
1237 1244 except Exception:
1238 1245 msg = _('Error occurred during creation of this pull request.')
1239 1246 log.exception(msg)
1240 1247 h.flash(msg, category='error')
1241 1248
1242 1249 # copy the args back to redirect
1243 1250 org_query = self.request.GET.mixed()
1244 1251 raise HTTPFound(
1245 1252 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1246 1253 _query=org_query))
1247 1254
1248 1255 raise HTTPFound(
1249 1256 h.route_path('pullrequest_show', repo_name=target_repo,
1250 1257 pull_request_id=pull_request.pull_request_id))
1251 1258
1252 1259 @LoginRequired()
1253 1260 @NotAnonymous()
1254 1261 @HasRepoPermissionAnyDecorator(
1255 1262 'repository.read', 'repository.write', 'repository.admin')
1256 1263 @CSRFRequired()
1257 1264 def pull_request_update(self):
1258 1265 pull_request = PullRequest.get_or_404(
1259 1266 self.request.matchdict['pull_request_id'])
1260 1267 _ = self.request.translate
1261 1268
1262 1269 c = self.load_default_context()
1263 1270 redirect_url = None
1264 1271
1265 1272 if pull_request.is_closed():
1266 1273 log.debug('update: forbidden because pull request is closed')
1267 1274 msg = _(u'Cannot update closed pull requests.')
1268 1275 h.flash(msg, category='error')
1269 1276 return {'response': True,
1270 1277 'redirect_url': redirect_url}
1271 1278
1272 1279 is_state_changing = pull_request.is_state_changing()
1273 1280 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1274 1281
1275 1282 # only owner or admin can update it
1276 1283 allowed_to_update = PullRequestModel().check_user_update(
1277 1284 pull_request, self._rhodecode_user)
1278 1285
1279 1286 if allowed_to_update:
1280 1287 controls = peppercorn.parse(self.request.POST.items())
1281 1288 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1282 1289
1283 1290 if 'review_members' in controls:
1284 1291 self._update_reviewers(
1285 1292 c,
1286 1293 pull_request, controls['review_members'],
1287 1294 pull_request.reviewer_data,
1288 1295 PullRequestReviewers.ROLE_REVIEWER)
1289 1296 elif 'observer_members' in controls:
1290 1297 self._update_reviewers(
1291 1298 c,
1292 1299 pull_request, controls['observer_members'],
1293 1300 pull_request.reviewer_data,
1294 1301 PullRequestReviewers.ROLE_OBSERVER)
1295 1302 elif str2bool(self.request.POST.get('update_commits', 'false')):
1296 1303 if is_state_changing:
1297 1304 log.debug('commits update: forbidden because pull request is in state %s',
1298 1305 pull_request.pull_request_state)
1299 1306 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1300 1307 u'Current state is: `{}`').format(
1301 1308 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1302 1309 h.flash(msg, category='error')
1303 1310 return {'response': True,
1304 1311 'redirect_url': redirect_url}
1305 1312
1306 1313 self._update_commits(c, pull_request)
1307 1314 if force_refresh:
1308 1315 redirect_url = h.route_path(
1309 1316 'pullrequest_show', repo_name=self.db_repo_name,
1310 1317 pull_request_id=pull_request.pull_request_id,
1311 1318 _query={"force_refresh": 1})
1312 1319 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1313 1320 self._edit_pull_request(pull_request)
1314 1321 else:
1315 1322 log.error('Unhandled update data.')
1316 1323 raise HTTPBadRequest()
1317 1324
1318 1325 return {'response': True,
1319 1326 'redirect_url': redirect_url}
1320 1327 raise HTTPForbidden()
1321 1328
1322 1329 def _edit_pull_request(self, pull_request):
1323 1330 """
1324 1331 Edit title and description
1325 1332 """
1326 1333 _ = self.request.translate
1327 1334
1328 1335 try:
1329 1336 PullRequestModel().edit(
1330 1337 pull_request,
1331 1338 self.request.POST.get('title'),
1332 1339 self.request.POST.get('description'),
1333 1340 self.request.POST.get('description_renderer'),
1334 1341 self._rhodecode_user)
1335 1342 except ValueError:
1336 1343 msg = _(u'Cannot update closed pull requests.')
1337 1344 h.flash(msg, category='error')
1338 1345 return
1339 1346 else:
1340 1347 Session().commit()
1341 1348
1342 1349 msg = _(u'Pull request title & description updated.')
1343 1350 h.flash(msg, category='success')
1344 1351 return
1345 1352
1346 1353 def _update_commits(self, c, pull_request):
1347 1354 _ = self.request.translate
1348 1355
1349 1356 with pull_request.set_state(PullRequest.STATE_UPDATING):
1350 1357 resp = PullRequestModel().update_commits(
1351 1358 pull_request, self._rhodecode_db_user)
1352 1359
1353 1360 if resp.executed:
1354 1361
1355 1362 if resp.target_changed and resp.source_changed:
1356 1363 changed = 'target and source repositories'
1357 1364 elif resp.target_changed and not resp.source_changed:
1358 1365 changed = 'target repository'
1359 1366 elif not resp.target_changed and resp.source_changed:
1360 1367 changed = 'source repository'
1361 1368 else:
1362 1369 changed = 'nothing'
1363 1370
1364 1371 msg = _(u'Pull request updated to "{source_commit_id}" with '
1365 1372 u'{count_added} added, {count_removed} removed commits. '
1366 1373 u'Source of changes: {change_source}.')
1367 1374 msg = msg.format(
1368 1375 source_commit_id=pull_request.source_ref_parts.commit_id,
1369 1376 count_added=len(resp.changes.added),
1370 1377 count_removed=len(resp.changes.removed),
1371 1378 change_source=changed)
1372 1379 h.flash(msg, category='success')
1373 1380 channelstream.pr_update_channelstream_push(
1374 1381 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1375 1382 else:
1376 1383 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1377 1384 warning_reasons = [
1378 1385 UpdateFailureReason.NO_CHANGE,
1379 1386 UpdateFailureReason.WRONG_REF_TYPE,
1380 1387 ]
1381 1388 category = 'warning' if resp.reason in warning_reasons else 'error'
1382 1389 h.flash(msg, category=category)
1383 1390
1384 1391 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1385 1392 _ = self.request.translate
1386 1393
1387 1394 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1388 1395 PullRequestModel().get_reviewer_functions()
1389 1396
1390 1397 if role == PullRequestReviewers.ROLE_REVIEWER:
1391 1398 try:
1392 1399 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1393 1400 except ValueError as e:
1394 1401 log.error('Reviewers Validation: {}'.format(e))
1395 1402 h.flash(e, category='error')
1396 1403 return
1397 1404
1398 1405 old_calculated_status = pull_request.calculated_review_status()
1399 1406 PullRequestModel().update_reviewers(
1400 1407 pull_request, reviewers, self._rhodecode_db_user)
1401 1408
1402 1409 Session().commit()
1403 1410
1404 1411 msg = _('Pull request reviewers updated.')
1405 1412 h.flash(msg, category='success')
1406 1413 channelstream.pr_update_channelstream_push(
1407 1414 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1408 1415
1409 1416 # trigger status changed if change in reviewers changes the status
1410 1417 calculated_status = pull_request.calculated_review_status()
1411 1418 if old_calculated_status != calculated_status:
1412 1419 PullRequestModel().trigger_pull_request_hook(
1413 1420 pull_request, self._rhodecode_user, 'review_status_change',
1414 1421 data={'status': calculated_status})
1415 1422
1416 1423 elif role == PullRequestReviewers.ROLE_OBSERVER:
1417 1424 try:
1418 1425 observers = validate_observers(review_members, reviewer_rules)
1419 1426 except ValueError as e:
1420 1427 log.error('Observers Validation: {}'.format(e))
1421 1428 h.flash(e, category='error')
1422 1429 return
1423 1430
1424 1431 PullRequestModel().update_observers(
1425 1432 pull_request, observers, self._rhodecode_db_user)
1426 1433
1427 1434 Session().commit()
1428 1435 msg = _('Pull request observers updated.')
1429 1436 h.flash(msg, category='success')
1430 1437 channelstream.pr_update_channelstream_push(
1431 1438 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1432 1439
1433 1440 @LoginRequired()
1434 1441 @NotAnonymous()
1435 1442 @HasRepoPermissionAnyDecorator(
1436 1443 'repository.read', 'repository.write', 'repository.admin')
1437 1444 @CSRFRequired()
1438 1445 def pull_request_merge(self):
1439 1446 """
1440 1447 Merge will perform a server-side merge of the specified
1441 1448 pull request, if the pull request is approved and mergeable.
1442 1449 After successful merging, the pull request is automatically
1443 1450 closed, with a relevant comment.
1444 1451 """
1445 1452 pull_request = PullRequest.get_or_404(
1446 1453 self.request.matchdict['pull_request_id'])
1447 1454 _ = self.request.translate
1448 1455
1449 1456 if pull_request.is_state_changing():
1450 1457 log.debug('show: forbidden because pull request is in state %s',
1451 1458 pull_request.pull_request_state)
1452 1459 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1453 1460 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1454 1461 pull_request.pull_request_state)
1455 1462 h.flash(msg, category='error')
1456 1463 raise HTTPFound(
1457 1464 h.route_path('pullrequest_show',
1458 1465 repo_name=pull_request.target_repo.repo_name,
1459 1466 pull_request_id=pull_request.pull_request_id))
1460 1467
1461 1468 self.load_default_context()
1462 1469
1463 1470 with pull_request.set_state(PullRequest.STATE_UPDATING):
1464 1471 check = MergeCheck.validate(
1465 1472 pull_request, auth_user=self._rhodecode_user,
1466 1473 translator=self.request.translate)
1467 1474 merge_possible = not check.failed
1468 1475
1469 1476 for err_type, error_msg in check.errors:
1470 1477 h.flash(error_msg, category=err_type)
1471 1478
1472 1479 if merge_possible:
1473 1480 log.debug("Pre-conditions checked, trying to merge.")
1474 1481 extras = vcs_operation_context(
1475 1482 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1476 1483 username=self._rhodecode_db_user.username, action='push',
1477 1484 scm=pull_request.target_repo.repo_type)
1478 1485 with pull_request.set_state(PullRequest.STATE_UPDATING):
1479 1486 self._merge_pull_request(
1480 1487 pull_request, self._rhodecode_db_user, extras)
1481 1488 else:
1482 1489 log.debug("Pre-conditions failed, NOT merging.")
1483 1490
1484 1491 raise HTTPFound(
1485 1492 h.route_path('pullrequest_show',
1486 1493 repo_name=pull_request.target_repo.repo_name,
1487 1494 pull_request_id=pull_request.pull_request_id))
1488 1495
1489 1496 def _merge_pull_request(self, pull_request, user, extras):
1490 1497 _ = self.request.translate
1491 1498 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1492 1499
1493 1500 if merge_resp.executed:
1494 1501 log.debug("The merge was successful, closing the pull request.")
1495 1502 PullRequestModel().close_pull_request(
1496 1503 pull_request.pull_request_id, user)
1497 1504 Session().commit()
1498 1505 msg = _('Pull request was successfully merged and closed.')
1499 1506 h.flash(msg, category='success')
1500 1507 else:
1501 1508 log.debug(
1502 1509 "The merge was not successful. Merge response: %s", merge_resp)
1503 1510 msg = merge_resp.merge_status_message
1504 1511 h.flash(msg, category='error')
1505 1512
1506 1513 @LoginRequired()
1507 1514 @NotAnonymous()
1508 1515 @HasRepoPermissionAnyDecorator(
1509 1516 'repository.read', 'repository.write', 'repository.admin')
1510 1517 @CSRFRequired()
1511 1518 def pull_request_delete(self):
1512 1519 _ = self.request.translate
1513 1520
1514 1521 pull_request = PullRequest.get_or_404(
1515 1522 self.request.matchdict['pull_request_id'])
1516 1523 self.load_default_context()
1517 1524
1518 1525 pr_closed = pull_request.is_closed()
1519 1526 allowed_to_delete = PullRequestModel().check_user_delete(
1520 1527 pull_request, self._rhodecode_user) and not pr_closed
1521 1528
1522 1529 # only owner can delete it !
1523 1530 if allowed_to_delete:
1524 1531 PullRequestModel().delete(pull_request, self._rhodecode_user)
1525 1532 Session().commit()
1526 1533 h.flash(_('Successfully deleted pull request'),
1527 1534 category='success')
1528 1535 raise HTTPFound(h.route_path('pullrequest_show_all',
1529 1536 repo_name=self.db_repo_name))
1530 1537
1531 1538 log.warning('user %s tried to delete pull request without access',
1532 1539 self._rhodecode_user)
1533 1540 raise HTTPNotFound()
1534 1541
1535 1542 def _pull_request_comments_create(self, pull_request, comments):
1536 1543 _ = self.request.translate
1537 1544 data = {}
1538 1545 if not comments:
1539 1546 return
1540 1547 pull_request_id = pull_request.pull_request_id
1541 1548
1542 1549 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
1543 1550
1544 1551 for entry in comments:
1545 1552 c = self.load_default_context()
1546 1553 comment_type = entry['comment_type']
1547 1554 text = entry['text']
1548 1555 status = entry['status']
1549 1556 is_draft = str2bool(entry['is_draft'])
1550 1557 resolves_comment_id = entry['resolves_comment_id']
1551 1558 close_pull_request = entry['close_pull_request']
1552 1559 f_path = entry['f_path']
1553 1560 line_no = entry['line']
1554 1561 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
1555 1562
1556 1563 # the logic here should work like following, if we submit close
1557 1564 # pr comment, use `close_pull_request_with_comment` function
1558 1565 # else handle regular comment logic
1559 1566
1560 1567 if close_pull_request:
1561 1568 # only owner or admin or person with write permissions
1562 1569 allowed_to_close = PullRequestModel().check_user_update(
1563 1570 pull_request, self._rhodecode_user)
1564 1571 if not allowed_to_close:
1565 1572 log.debug('comment: forbidden because not allowed to close '
1566 1573 'pull request %s', pull_request_id)
1567 1574 raise HTTPForbidden()
1568 1575
1569 1576 # This also triggers `review_status_change`
1570 1577 comment, status = PullRequestModel().close_pull_request_with_comment(
1571 1578 pull_request, self._rhodecode_user, self.db_repo, message=text,
1572 1579 auth_user=self._rhodecode_user)
1573 1580 Session().flush()
1574 1581 is_inline = comment.is_inline
1575 1582
1576 1583 PullRequestModel().trigger_pull_request_hook(
1577 1584 pull_request, self._rhodecode_user, 'comment',
1578 1585 data={'comment': comment})
1579 1586
1580 1587 else:
1581 1588 # regular comment case, could be inline, or one with status.
1582 1589 # for that one we check also permissions
1583 1590 # Additionally ENSURE if somehow draft is sent we're then unable to change status
1584 1591 allowed_to_change_status = PullRequestModel().check_user_change_status(
1585 1592 pull_request, self._rhodecode_user) and not is_draft
1586 1593
1587 1594 if status and allowed_to_change_status:
1588 1595 message = (_('Status change %(transition_icon)s %(status)s')
1589 1596 % {'transition_icon': '>',
1590 1597 'status': ChangesetStatus.get_status_lbl(status)})
1591 1598 text = text or message
1592 1599
1593 1600 comment = CommentsModel().create(
1594 1601 text=text,
1595 1602 repo=self.db_repo.repo_id,
1596 1603 user=self._rhodecode_user.user_id,
1597 1604 pull_request=pull_request,
1598 1605 f_path=f_path,
1599 1606 line_no=line_no,
1600 1607 status_change=(ChangesetStatus.get_status_lbl(status)
1601 1608 if status and allowed_to_change_status else None),
1602 1609 status_change_type=(status
1603 1610 if status and allowed_to_change_status else None),
1604 1611 comment_type=comment_type,
1605 1612 is_draft=is_draft,
1606 1613 resolves_comment_id=resolves_comment_id,
1607 1614 auth_user=self._rhodecode_user,
1608 1615 send_email=not is_draft, # skip notification for draft comments
1609 1616 )
1610 1617 is_inline = comment.is_inline
1611 1618
1612 1619 if allowed_to_change_status:
1613 1620 # calculate old status before we change it
1614 1621 old_calculated_status = pull_request.calculated_review_status()
1615 1622
1616 1623 # get status if set !
1617 1624 if status:
1618 1625 ChangesetStatusModel().set_status(
1619 1626 self.db_repo.repo_id,
1620 1627 status,
1621 1628 self._rhodecode_user.user_id,
1622 1629 comment,
1623 1630 pull_request=pull_request
1624 1631 )
1625 1632
1626 1633 Session().flush()
1627 1634 # this is somehow required to get access to some relationship
1628 1635 # loaded on comment
1629 1636 Session().refresh(comment)
1630 1637
1631 1638 # skip notifications for drafts
1632 1639 if not is_draft:
1633 1640 PullRequestModel().trigger_pull_request_hook(
1634 1641 pull_request, self._rhodecode_user, 'comment',
1635 1642 data={'comment': comment})
1636 1643
1637 1644 # we now calculate the status of pull request, and based on that
1638 1645 # calculation we set the commits status
1639 1646 calculated_status = pull_request.calculated_review_status()
1640 1647 if old_calculated_status != calculated_status:
1641 1648 PullRequestModel().trigger_pull_request_hook(
1642 1649 pull_request, self._rhodecode_user, 'review_status_change',
1643 1650 data={'status': calculated_status})
1644 1651
1645 1652 comment_id = comment.comment_id
1646 1653 data[comment_id] = {
1647 1654 'target_id': target_elem_id
1648 1655 }
1649 1656 Session().flush()
1650 1657
1651 1658 c.co = comment
1652 1659 c.at_version_num = None
1653 1660 c.is_new = True
1654 1661 rendered_comment = render(
1655 1662 'rhodecode:templates/changeset/changeset_comment_block.mako',
1656 1663 self._get_template_context(c), self.request)
1657 1664
1658 1665 data[comment_id].update(comment.get_dict())
1659 1666 data[comment_id].update({'rendered_text': rendered_comment})
1660 1667
1661 1668 Session().commit()
1662 1669
1663 1670 # skip channelstream for draft comments
1664 1671 if not all_drafts:
1665 1672 comment_broadcast_channel = channelstream.comment_channel(
1666 1673 self.db_repo_name, pull_request_obj=pull_request)
1667 1674
1668 1675 comment_data = data
1669 1676 posted_comment_type = 'inline' if is_inline else 'general'
1670 1677 if len(data) == 1:
1671 1678 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
1672 1679 else:
1673 1680 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
1674 1681
1675 1682 channelstream.comment_channelstream_push(
1676 1683 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
1677 1684 comment_data=comment_data)
1678 1685
1679 1686 return data
1680 1687
1681 1688 @LoginRequired()
1682 1689 @NotAnonymous()
1683 1690 @HasRepoPermissionAnyDecorator(
1684 1691 'repository.read', 'repository.write', 'repository.admin')
1685 1692 @CSRFRequired()
1686 1693 def pull_request_comment_create(self):
1687 1694 _ = self.request.translate
1688 1695
1689 1696 pull_request = PullRequest.get_or_404(self.request.matchdict['pull_request_id'])
1690 1697
1691 1698 if pull_request.is_closed():
1692 1699 log.debug('comment: forbidden because pull request is closed')
1693 1700 raise HTTPForbidden()
1694 1701
1695 1702 allowed_to_comment = PullRequestModel().check_user_comment(
1696 1703 pull_request, self._rhodecode_user)
1697 1704 if not allowed_to_comment:
1698 1705 log.debug('comment: forbidden because pull request is from forbidden repo')
1699 1706 raise HTTPForbidden()
1700 1707
1701 1708 comment_data = {
1702 1709 'comment_type': self.request.POST.get('comment_type'),
1703 1710 'text': self.request.POST.get('text'),
1704 1711 'status': self.request.POST.get('changeset_status', None),
1705 1712 'is_draft': self.request.POST.get('draft'),
1706 1713 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
1707 1714 'close_pull_request': self.request.POST.get('close_pull_request'),
1708 1715 'f_path': self.request.POST.get('f_path'),
1709 1716 'line': self.request.POST.get('line'),
1710 1717 }
1711 1718 data = self._pull_request_comments_create(pull_request, [comment_data])
1712 1719
1713 1720 return data
1714 1721
1715 1722 @LoginRequired()
1716 1723 @NotAnonymous()
1717 1724 @HasRepoPermissionAnyDecorator(
1718 1725 'repository.read', 'repository.write', 'repository.admin')
1719 1726 @CSRFRequired()
1720 1727 def pull_request_comment_delete(self):
1721 1728 pull_request = PullRequest.get_or_404(
1722 1729 self.request.matchdict['pull_request_id'])
1723 1730
1724 1731 comment = ChangesetComment.get_or_404(
1725 1732 self.request.matchdict['comment_id'])
1726 1733 comment_id = comment.comment_id
1727 1734
1728 1735 if comment.immutable:
1729 1736 # don't allow deleting comments that are immutable
1730 1737 raise HTTPForbidden()
1731 1738
1732 1739 if pull_request.is_closed():
1733 1740 log.debug('comment: forbidden because pull request is closed')
1734 1741 raise HTTPForbidden()
1735 1742
1736 1743 if not comment:
1737 1744 log.debug('Comment with id:%s not found, skipping', comment_id)
1738 1745 # comment already deleted in another call probably
1739 1746 return True
1740 1747
1741 1748 if comment.pull_request.is_closed():
1742 1749 # don't allow deleting comments on closed pull request
1743 1750 raise HTTPForbidden()
1744 1751
1745 1752 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1746 1753 super_admin = h.HasPermissionAny('hg.admin')()
1747 1754 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1748 1755 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1749 1756 comment_repo_admin = is_repo_admin and is_repo_comment
1750 1757
1751 1758 if comment.draft and not comment_owner:
1752 1759 # We never allow to delete draft comments for other than owners
1753 1760 raise HTTPNotFound()
1754 1761
1755 1762 if super_admin or comment_owner or comment_repo_admin:
1756 1763 old_calculated_status = comment.pull_request.calculated_review_status()
1757 1764 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1758 1765 Session().commit()
1759 1766 calculated_status = comment.pull_request.calculated_review_status()
1760 1767 if old_calculated_status != calculated_status:
1761 1768 PullRequestModel().trigger_pull_request_hook(
1762 1769 comment.pull_request, self._rhodecode_user, 'review_status_change',
1763 1770 data={'status': calculated_status})
1764 1771 return True
1765 1772 else:
1766 1773 log.warning('No permissions for user %s to delete comment_id: %s',
1767 1774 self._rhodecode_db_user, comment_id)
1768 1775 raise HTTPNotFound()
1769 1776
1770 1777 @LoginRequired()
1771 1778 @NotAnonymous()
1772 1779 @HasRepoPermissionAnyDecorator(
1773 1780 'repository.read', 'repository.write', 'repository.admin')
1774 1781 @CSRFRequired()
1775 1782 def pull_request_comment_edit(self):
1776 1783 self.load_default_context()
1777 1784
1778 1785 pull_request = PullRequest.get_or_404(
1779 1786 self.request.matchdict['pull_request_id']
1780 1787 )
1781 1788 comment = ChangesetComment.get_or_404(
1782 1789 self.request.matchdict['comment_id']
1783 1790 )
1784 1791 comment_id = comment.comment_id
1785 1792
1786 1793 if comment.immutable:
1787 1794 # don't allow deleting comments that are immutable
1788 1795 raise HTTPForbidden()
1789 1796
1790 1797 if pull_request.is_closed():
1791 1798 log.debug('comment: forbidden because pull request is closed')
1792 1799 raise HTTPForbidden()
1793 1800
1794 1801 if comment.pull_request.is_closed():
1795 1802 # don't allow deleting comments on closed pull request
1796 1803 raise HTTPForbidden()
1797 1804
1798 1805 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1799 1806 super_admin = h.HasPermissionAny('hg.admin')()
1800 1807 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1801 1808 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1802 1809 comment_repo_admin = is_repo_admin and is_repo_comment
1803 1810
1804 1811 if super_admin or comment_owner or comment_repo_admin:
1805 1812 text = self.request.POST.get('text')
1806 1813 version = self.request.POST.get('version')
1807 1814 if text == comment.text:
1808 1815 log.warning(
1809 1816 'Comment(PR): '
1810 1817 'Trying to create new version '
1811 1818 'with the same comment body {}'.format(
1812 1819 comment_id,
1813 1820 )
1814 1821 )
1815 1822 raise HTTPNotFound()
1816 1823
1817 1824 if version.isdigit():
1818 1825 version = int(version)
1819 1826 else:
1820 1827 log.warning(
1821 1828 'Comment(PR): Wrong version type {} {} '
1822 1829 'for comment {}'.format(
1823 1830 version,
1824 1831 type(version),
1825 1832 comment_id,
1826 1833 )
1827 1834 )
1828 1835 raise HTTPNotFound()
1829 1836
1830 1837 try:
1831 1838 comment_history = CommentsModel().edit(
1832 1839 comment_id=comment_id,
1833 1840 text=text,
1834 1841 auth_user=self._rhodecode_user,
1835 1842 version=version,
1836 1843 )
1837 1844 except CommentVersionMismatch:
1838 1845 raise HTTPConflict()
1839 1846
1840 1847 if not comment_history:
1841 1848 raise HTTPNotFound()
1842 1849
1843 1850 Session().commit()
1844 1851 if not comment.draft:
1845 1852 PullRequestModel().trigger_pull_request_hook(
1846 1853 pull_request, self._rhodecode_user, 'comment_edit',
1847 1854 data={'comment': comment})
1848 1855
1849 1856 return {
1850 1857 'comment_history_id': comment_history.comment_history_id,
1851 1858 'comment_id': comment.comment_id,
1852 1859 'comment_version': comment_history.version,
1853 1860 'comment_author_username': comment_history.author.username,
1854 1861 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1855 1862 'comment_created_on': h.age_component(comment_history.created_on,
1856 1863 time_is_local=True),
1857 1864 }
1858 1865 else:
1859 1866 log.warning('No permissions for user %s to edit comment_id: %s',
1860 1867 self._rhodecode_db_user, comment_id)
1861 1868 raise HTTPNotFound()
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 1
Under Review
author

Auto status change to "Under Review"

You need to be logged in to leave comments. Login now

Merge is not currently possible because of below failed checks.

  • - User `default` not allowed to perform merge.
  • - Pull request reviewer approval is pending.