##// END OF EJS Templates
merged with stable
super-admin -
r4738:610dc89a merge default
parent child Browse files
Show More
@@ -1,80 +1,81 b''
1 1bd3e92b7e2e2d2024152b34bb88dff1db544a71 v4.0.0
1 1bd3e92b7e2e2d2024152b34bb88dff1db544a71 v4.0.0
2 170c5398320ea6cddd50955e88d408794c21d43a v4.0.1
2 170c5398320ea6cddd50955e88d408794c21d43a v4.0.1
3 c3fe200198f5aa34cf2e4066df2881a9cefe3704 v4.1.0
3 c3fe200198f5aa34cf2e4066df2881a9cefe3704 v4.1.0
4 7fd5c850745e2ea821fb4406af5f4bff9b0a7526 v4.1.1
4 7fd5c850745e2ea821fb4406af5f4bff9b0a7526 v4.1.1
5 41c87da28a179953df86061d817bc35533c66dd2 v4.1.2
5 41c87da28a179953df86061d817bc35533c66dd2 v4.1.2
6 baaf9f5bcea3bae0ef12ae20c8b270482e62abb6 v4.2.0
6 baaf9f5bcea3bae0ef12ae20c8b270482e62abb6 v4.2.0
7 32a70c7e56844a825f61df496ee5eaf8c3c4e189 v4.2.1
7 32a70c7e56844a825f61df496ee5eaf8c3c4e189 v4.2.1
8 fa695cdb411d294679ac081d595ac654e5613b03 v4.3.0
8 fa695cdb411d294679ac081d595ac654e5613b03 v4.3.0
9 0e4dc11b58cad833c513fe17bac39e6850edf959 v4.3.1
9 0e4dc11b58cad833c513fe17bac39e6850edf959 v4.3.1
10 8a876f48f5cb1d018b837db28ff928500cb32cfb v4.4.0
10 8a876f48f5cb1d018b837db28ff928500cb32cfb v4.4.0
11 8dd86b410b1aac086ffdfc524ef300f896af5047 v4.4.1
11 8dd86b410b1aac086ffdfc524ef300f896af5047 v4.4.1
12 d2514226abc8d3b4f6fb57765f47d1b6fb360a05 v4.4.2
12 d2514226abc8d3b4f6fb57765f47d1b6fb360a05 v4.4.2
13 27d783325930af6dad2741476c0d0b1b7c8415c2 v4.5.0
13 27d783325930af6dad2741476c0d0b1b7c8415c2 v4.5.0
14 7f2016f352abcbdba4a19d4039c386e9629449da v4.5.1
14 7f2016f352abcbdba4a19d4039c386e9629449da v4.5.1
15 416fec799314c70a5c780fb28b3357b08869333a v4.5.2
15 416fec799314c70a5c780fb28b3357b08869333a v4.5.2
16 27c3b85fafc83143e6678fbc3da69e1615bcac55 v4.6.0
16 27c3b85fafc83143e6678fbc3da69e1615bcac55 v4.6.0
17 5ad13deb9118c2a5243d4032d4d9cc174e5872db v4.6.1
17 5ad13deb9118c2a5243d4032d4d9cc174e5872db v4.6.1
18 2be921e01fa24bb102696ada596f87464c3666f6 v4.7.0
18 2be921e01fa24bb102696ada596f87464c3666f6 v4.7.0
19 7198bdec29c2872c974431d55200d0398354cdb1 v4.7.1
19 7198bdec29c2872c974431d55200d0398354cdb1 v4.7.1
20 bd1c8d230fe741c2dfd7100a0ef39fd0774fd581 v4.7.2
20 bd1c8d230fe741c2dfd7100a0ef39fd0774fd581 v4.7.2
21 9731914f89765d9628dc4dddc84bc9402aa124c8 v4.8.0
21 9731914f89765d9628dc4dddc84bc9402aa124c8 v4.8.0
22 c5a2b7d0e4bbdebc4a62d7b624befe375207b659 v4.9.0
22 c5a2b7d0e4bbdebc4a62d7b624befe375207b659 v4.9.0
23 d9aa3b27ac9f7e78359775c75fedf7bfece232f1 v4.9.1
23 d9aa3b27ac9f7e78359775c75fedf7bfece232f1 v4.9.1
24 4ba4d74981cec5d6b28b158f875a2540952c2f74 v4.10.0
24 4ba4d74981cec5d6b28b158f875a2540952c2f74 v4.10.0
25 0a6821cbd6b0b3c21503002f88800679fa35ab63 v4.10.1
25 0a6821cbd6b0b3c21503002f88800679fa35ab63 v4.10.1
26 434ad90ec8d621f4416074b84f6e9ce03964defb v4.10.2
26 434ad90ec8d621f4416074b84f6e9ce03964defb v4.10.2
27 68baee10e698da2724c6e0f698c03a6abb993bf2 v4.10.3
27 68baee10e698da2724c6e0f698c03a6abb993bf2 v4.10.3
28 00821d3afd1dce3f4767cc353f84a17f7d5218a1 v4.10.4
28 00821d3afd1dce3f4767cc353f84a17f7d5218a1 v4.10.4
29 22f6744ad8cc274311825f63f953e4dee2ea5cb9 v4.10.5
29 22f6744ad8cc274311825f63f953e4dee2ea5cb9 v4.10.5
30 96eb24bea2f5f9258775245e3f09f6fa0a4dda01 v4.10.6
30 96eb24bea2f5f9258775245e3f09f6fa0a4dda01 v4.10.6
31 3121217a812c956d7dd5a5875821bd73e8002a32 v4.11.0
31 3121217a812c956d7dd5a5875821bd73e8002a32 v4.11.0
32 fa98b454715ac5b912f39e84af54345909a2a805 v4.11.1
32 fa98b454715ac5b912f39e84af54345909a2a805 v4.11.1
33 3982abcfdcc229a723cebe52d3a9bcff10bba08e v4.11.2
33 3982abcfdcc229a723cebe52d3a9bcff10bba08e v4.11.2
34 33195f145db9172f0a8f1487e09207178a6ab065 v4.11.3
34 33195f145db9172f0a8f1487e09207178a6ab065 v4.11.3
35 194c74f33e32bbae6fc4d71ec5a999cff3c13605 v4.11.4
35 194c74f33e32bbae6fc4d71ec5a999cff3c13605 v4.11.4
36 8fbd8b0c3ddc2fa4ac9e4ca16942a03eb593df2d v4.11.5
36 8fbd8b0c3ddc2fa4ac9e4ca16942a03eb593df2d v4.11.5
37 f0609aa5d5d05a1ca2f97c3995542236131c9d8a v4.11.6
37 f0609aa5d5d05a1ca2f97c3995542236131c9d8a v4.11.6
38 b5b30547d90d2e088472a70c84878f429ffbf40d v4.12.0
38 b5b30547d90d2e088472a70c84878f429ffbf40d v4.12.0
39 9072253aa8894d20c00b4a43dc61c2168c1eff94 v4.12.1
39 9072253aa8894d20c00b4a43dc61c2168c1eff94 v4.12.1
40 6a517543ea9ef9987d74371bd2a315eb0b232dc9 v4.12.2
40 6a517543ea9ef9987d74371bd2a315eb0b232dc9 v4.12.2
41 7fc0731b024c3114be87865eda7ab621cc957e32 v4.12.3
41 7fc0731b024c3114be87865eda7ab621cc957e32 v4.12.3
42 6d531c0b068c6eda62dddceedc9f845ecb6feb6f v4.12.4
42 6d531c0b068c6eda62dddceedc9f845ecb6feb6f v4.12.4
43 3d6bf2d81b1564830eb5e83396110d2a9a93eb1e v4.13.0
43 3d6bf2d81b1564830eb5e83396110d2a9a93eb1e v4.13.0
44 5468fc89e708bd90e413cd0d54350017abbdbc0e v4.13.1
44 5468fc89e708bd90e413cd0d54350017abbdbc0e v4.13.1
45 610d621550521c314ee97b3d43473ac0bcf06fb8 v4.13.2
45 610d621550521c314ee97b3d43473ac0bcf06fb8 v4.13.2
46 7dc62c090881fb5d03268141e71e0940d7c3295d v4.13.3
46 7dc62c090881fb5d03268141e71e0940d7c3295d v4.13.3
47 9151328c1c46b72ba6f00d7640d9141e75aa1ca2 v4.14.0
47 9151328c1c46b72ba6f00d7640d9141e75aa1ca2 v4.14.0
48 a47eeac5dfa41fa6779d90452affba4091c3ade8 v4.14.1
48 a47eeac5dfa41fa6779d90452affba4091c3ade8 v4.14.1
49 4b34ce0d2c3c10510626b3b65044939bb7a2cddf v4.15.0
49 4b34ce0d2c3c10510626b3b65044939bb7a2cddf v4.15.0
50 14502561d22e6b70613674cd675ae9a604b7989f v4.15.1
50 14502561d22e6b70613674cd675ae9a604b7989f v4.15.1
51 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2
51 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2
52 797744642eca86640ed20bef2cd77445780abaec v4.16.0
52 797744642eca86640ed20bef2cd77445780abaec v4.16.0
53 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1
53 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1
54 5d8057df561c4b6b81b6401aed7d2f911e6e77f7 v4.16.2
54 5d8057df561c4b6b81b6401aed7d2f911e6e77f7 v4.16.2
55 13acfc008896ef4c62546bab5074e8f6f89b4fa7 v4.17.0
55 13acfc008896ef4c62546bab5074e8f6f89b4fa7 v4.17.0
56 45b9b610976f483877142fe75321808ce9ebac59 v4.17.1
56 45b9b610976f483877142fe75321808ce9ebac59 v4.17.1
57 ad5bd0c4bd322fdbd04bb825a3d027e08f7a3901 v4.17.2
57 ad5bd0c4bd322fdbd04bb825a3d027e08f7a3901 v4.17.2
58 037f5794b55a6236d68f6485a485372dde6566e0 v4.17.3
58 037f5794b55a6236d68f6485a485372dde6566e0 v4.17.3
59 83bc3100cfd6094c1d04f475ddb299b7dc3d0b33 v4.17.4
59 83bc3100cfd6094c1d04f475ddb299b7dc3d0b33 v4.17.4
60 e3de8c95baf8cc9109ca56aee8193a2cb6a54c8a v4.17.4
60 e3de8c95baf8cc9109ca56aee8193a2cb6a54c8a v4.17.4
61 f37a3126570477543507f0bc9d245ce75546181a v4.18.0
61 f37a3126570477543507f0bc9d245ce75546181a v4.18.0
62 71d8791463e87b64c1a18475de330ee600d37561 v4.18.1
62 71d8791463e87b64c1a18475de330ee600d37561 v4.18.1
63 4bd6b75dac1d25c64885d4d49385e5533f21c525 v4.18.2
63 4bd6b75dac1d25c64885d4d49385e5533f21c525 v4.18.2
64 12ed92fe57f2e9fc7b71dc0b65e26c2da5c7085f v4.18.3
64 12ed92fe57f2e9fc7b71dc0b65e26c2da5c7085f v4.18.3
65 ddef396a6567117de531d67d44c739cbbfc3eebb v4.19.0
65 ddef396a6567117de531d67d44c739cbbfc3eebb v4.19.0
66 c0c65acd73914bf4368222d510afe1161ab8c07c v4.19.1
66 c0c65acd73914bf4368222d510afe1161ab8c07c v4.19.1
67 7ac623a4a2405917e2af660d645ded662011e40d v4.19.2
67 7ac623a4a2405917e2af660d645ded662011e40d v4.19.2
68 ef7ffda65eeb90c3ba88590a6cb816ef9b0bc232 v4.19.3
68 ef7ffda65eeb90c3ba88590a6cb816ef9b0bc232 v4.19.3
69 3e635489bb7961df93b01e42454ad1a8730ae968 v4.20.0
69 3e635489bb7961df93b01e42454ad1a8730ae968 v4.20.0
70 7e2eb896a02ca7cd2cd9f0f853ef3dac3f0039e3 v4.20.1
70 7e2eb896a02ca7cd2cd9f0f853ef3dac3f0039e3 v4.20.1
71 8bb5fece08ab65986225b184e46f53d2a71729cb v4.21.0
71 8bb5fece08ab65986225b184e46f53d2a71729cb v4.21.0
72 90734aac31ee4563bbe665a43ff73190cc762275 v4.22.0
72 90734aac31ee4563bbe665a43ff73190cc762275 v4.22.0
73 a9655707f7cf4146affc51c12fe5ed8e02898a57 v4.23.0
73 a9655707f7cf4146affc51c12fe5ed8e02898a57 v4.23.0
74 56310d93b33b97535908ef9c7b0985b89bb7fad2 v4.23.1
74 56310d93b33b97535908ef9c7b0985b89bb7fad2 v4.23.1
75 7637c38528fa38c1eabc1fde6a869c20995a0da7 v4.23.2
75 7637c38528fa38c1eabc1fde6a869c20995a0da7 v4.23.2
76 6aeb4ac3ef7f0ac699c914740dad3688c9495e83 v4.24.0
76 6aeb4ac3ef7f0ac699c914740dad3688c9495e83 v4.24.0
77 6eaf953da06e468a4c4e5239d3d0e700bda6b163 v4.24.1
77 6eaf953da06e468a4c4e5239d3d0e700bda6b163 v4.24.1
78 f8161cbc2d94a935d3c395a0e758d9a094287169 v4.25.0
78 f8161cbc2d94a935d3c395a0e758d9a094287169 v4.25.0
79 77fe47b5b39338e71b2c040de2c0359b529b6251 v4.25.1
79 77fe47b5b39338e71b2c040de2c0359b529b6251 v4.25.1
80 27475bd8a718b9a00a37a8563c4927120865ad85 v4.25.2
80 27475bd8a718b9a00a37a8563c4927120865ad85 v4.25.2
81 b4ba10dcb4ab67d02b8c5cff32a3827f6c4fdedb v4.26.0
@@ -1,33 +1,34 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:rc_tools_pinned]
7 [task:rc_tools_pinned]
8 done = true
8 done = true
9
9
10 [task:fixes_on_stable]
10 [task:fixes_on_stable]
11 done = true
11 done = true
12
12
13 [task:pip2nix_generated]
13 [task:pip2nix_generated]
14 done = true
14 done = true
15
15
16 [task:changelog_updated]
16 [task:changelog_updated]
17 done = true
17 done = true
18
18
19 [task:generate_api_docs]
19 [task:generate_api_docs]
20 done = true
20 done = true
21
21
22 [task:updated_translation]
23 done = true
24
22 [release]
25 [release]
23 state = prepared
26 state = prepared
24 version = 4.25.2
27 version = 4.26.0
25
26 [task:updated_translation]
27
28
28 [task:generate_js_routes]
29 [task:generate_js_routes]
29
30
30 [task:updated_trial_license]
31 [task:updated_trial_license]
31
32
32 [task:generate_oss_licenses]
33 [task:generate_oss_licenses]
33
34
@@ -1,104 +1,118 b''
1 .. _repo-admin-tasks:
1 .. _repo-admin-tasks:
2
2
3 Common Admin Tasks for Repositories
3 Common Admin Tasks for Repositories
4 -----------------------------------
4 -----------------------------------
5
5
6
6
7 Manually Force Delete Repository
7 Manually Force Delete Repository
8 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
8 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
9
9
10 In case of attached forks or pull-requests repositories should be archived.
10 In case of attached forks or pull-requests repositories should be archived.
11 Here is how to force delete a repository and remove all dependent objects
11 Here is how to force delete a repository and remove all dependent objects
12
12
13
13
14 .. code-block:: bash
14 .. code-block:: bash
15 :dedent: 1
15 :dedent: 1
16
16
17 # starts the ishell interactive prompt
17 # starts the ishell interactive prompt
18 $ rccontrol ishell enterprise-1
18 $ rccontrol ishell enterprise-1
19
19
20 .. code-block:: python
20 .. code-block:: python
21 :dedent: 1
21 :dedent: 1
22
22
23 In [4]: from rhodecode.model.repo import RepoModel
23 In [4]: from rhodecode.model.repo import RepoModel
24 In [3]: repo = Repository.get_by_repo_name('test_repos/repo_with_prs')
24 In [3]: repo = Repository.get_by_repo_name('test_repos/repo_with_prs')
25 In [5]: RepoModel().delete(repo, forks='detach', pull_requests='delete')
25 In [5]: RepoModel().delete(repo, forks='detach', pull_requests='delete')
26 In [6]: Session().commit()
26 In [6]: Session().commit()
27
27
28
28
29 Below is a fully automated example to force delete repositories reading from a
29 Below is a fully automated example to force delete repositories reading from a
30 file where each line is a repository name. This can be executed via simple CLI command
30 file where each line is a repository name. This can be executed via simple CLI command
31 without entering the interactive shell.
31 without entering the interactive shell.
32
32
33 Save the below content as a file named `repo_delete_task.py`
33 Save the below content as a file named `repo_delete_task.py`
34
34
35
35
36 .. code-block:: python
36 .. code-block:: python
37 :dedent: 1
37 :dedent: 1
38
38
39 from rhodecode.model.db import *
39 from rhodecode.model.db import *
40 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.repo import RepoModel
41 with open('delete_repos.txt', 'rb') as f:
41 with open('delete_repos.txt', 'rb') as f:
42 # read all lines from file
42 # read all lines from file
43 repos = f.readlines()
43 repos = f.readlines()
44 for repo_name in repos:
44 for repo_name in repos:
45 repo_name = repo_name.strip() # cleanup the name just in case
45 repo_name = repo_name.strip() # cleanup the name just in case
46 repo = Repository.get_by_repo_name(repo_name)
46 repo = Repository.get_by_repo_name(repo_name)
47 if not repo:
47 if not repo:
48 raise Exception('Repo with name {} not found'.format(repo_name))
48 raise Exception('Repo with name {} not found'.format(repo_name))
49 RepoModel().delete(repo, forks='detach', pull_requests='delete')
49 RepoModel().delete(repo, forks='detach', pull_requests='delete')
50 Session().commit()
50 Session().commit()
51 print('Removed repository {}'.format(repo_name))
51 print('Removed repository {}'.format(repo_name))
52
52
53
53
54 The code above will read the names of repositories from a file called `delete_repos.txt`
54 The code above will read the names of repositories from a file called `delete_repos.txt`
55 Each lines should represent a single name e.g `repo_name_1` or `repo_group/repo_name_2`
55 Each lines should represent a single name e.g `repo_name_1` or `repo_group/repo_name_2`
56
56
57 Run this line from CLI to execute the code from the `repo_delete_task.py` file and
57 Run this line from CLI to execute the code from the `repo_delete_task.py` file and
58 exit the ishell after the execution::
58 exit the ishell after the execution::
59
59
60 echo "%run repo_delete_task.py" | rccontrol ishell enterprise-1
60 echo "%run repo_delete_task.py" | rccontrol ishell enterprise-1
61
61
62
62
63
63
64
64
65 Bulk edit permissions for all repositories or groups
65 Bulk edit permissions for all repositories or groups
66 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
66 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
67
67
68 In case when a permissions should be applied in bulk here are two ways to apply
68 In case when a permissions should be applied in bulk here are two ways to apply
69 the permissions onto *all* repositories and/or repository groups.
69 the permissions onto *all* repositories and/or repository groups.
70
70
71 1) Start by running the interactive ishell interface
71 1) Start by running the interactive ishell interface
72
72
73 .. code-block:: bash
73 .. code-block:: bash
74 :dedent: 1
74 :dedent: 1
75
75
76 # starts the ishell interactive prompt
76 # starts the ishell interactive prompt
77 $ rccontrol ishell enterprise-1
77 $ rccontrol ishell enterprise-1
78
78
79
79
80 2a) Add user called 'admin' into all repositories with write permission.
80 2a) Add user called 'admin' into all repositories with write permission.
81 Permissions can be also `repository.read`, `repository.admin`, `repository.none`
81 Permissions can be also `repository.read`, `repository.admin`, `repository.none`
82
82
83 .. code-block:: python
83 .. code-block:: python
84 :dedent: 1
84 :dedent: 1
85
85
86 In [1]: from rhodecode.model.repo import RepoModel
86 In [1]: from rhodecode.model.repo import RepoModel
87 In [2]: user = User.get_by_username('admin')
87 In [2]: user = User.get_by_username('admin')
88 In [3]: permission_name = 'repository.write'
88 In [3]: permission_name = 'repository.write'
89 In [4]: for repo in Repository.get_all():
89 In [4]: for repo in Repository.get_all():
90 ...: RepoModel().grant_user_permission(repo, user, permission_name)
90 ...: RepoModel().grant_user_permission(repo, user, permission_name)
91 ...: Session().commit()
91 ...: Session().commit()
92
92
93 2b) Add user called 'admin' into all repository groups with write permission.
93 2b) Add user called 'admin' into all repository groups with write permission.
94 Permissions can be also can be `group.read`, `group.admin`, `group.none`
94 Permissions can be also can be `group.read`, `group.admin`, `group.none`
95
95
96 .. code-block:: python
96 .. code-block:: python
97 :dedent: 1
97 :dedent: 1
98
98
99 In [1]: from rhodecode.model.repo import RepoModel
99 In [1]: from rhodecode.model.repo import RepoModel
100 In [2]: user = User.get_by_username('admin')
100 In [2]: user = User.get_by_username('admin')
101 In [3]: permission_name = 'group.write'
101 In [3]: permission_name = 'group.write'
102 In [4]: for repo_group in RepoGroup.get_all():
102 In [4]: for repo_group in RepoGroup.get_all():
103 ...: RepoGroupModel().grant_user_permission(repo_group, user, permission_name)
103 ...: RepoGroupModel().grant_user_permission(repo_group, user, permission_name)
104 ...: Session().commit() No newline at end of file
104 ...: Session().commit()
105
106
107 Delete a problematic pull request
108 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
109
110 .. code-block:: python
111 :dedent: 1
112
113 In [1]: from rhodecode.model.pull_request import PullRequestModel
114 In [2]: pullrequest_id = 123
115 In [3]: pr = PullRequest.get(pullrequest_id)
116 In [4]: super_admin = User.get_first_super_admin()
117 In [5]: PullRequestModel().delete(pr, super_admin)
118 In [6]: Session().commit()
@@ -1,56 +1,64 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from rhodecode.apps._base import ADMIN_PREFIX
21 from rhodecode.apps._base import ADMIN_PREFIX
22
22
23
23
24 def admin_routes(config):
24 def admin_routes(config):
25 from rhodecode.apps.ops.views import OpsView
25 from rhodecode.apps.ops.views import OpsView
26
26
27 config.add_route(
27 config.add_route(
28 name='ops_ping',
28 name='ops_ping',
29 pattern='/ping')
29 pattern='/ping')
30 config.add_view(
30 config.add_view(
31 OpsView,
31 OpsView,
32 attr='ops_ping',
32 attr='ops_ping',
33 route_name='ops_ping', request_method='GET',
33 route_name='ops_ping', request_method='GET',
34 renderer='json_ext')
34 renderer='json_ext')
35
35
36 config.add_route(
36 config.add_route(
37 name='ops_error_test',
37 name='ops_error_test',
38 pattern='/error')
38 pattern='/error')
39 config.add_view(
39 config.add_view(
40 OpsView,
40 OpsView,
41 attr='ops_error_test',
41 attr='ops_error_test',
42 route_name='ops_error_test', request_method='GET',
42 route_name='ops_error_test', request_method='GET',
43 renderer='json_ext')
43 renderer='json_ext')
44
44
45 config.add_route(
45 config.add_route(
46 name='ops_redirect_test',
46 name='ops_redirect_test',
47 pattern='/redirect')
47 pattern='/redirect')
48 config.add_view(
48 config.add_view(
49 OpsView,
49 OpsView,
50 attr='ops_redirect_test',
50 attr='ops_redirect_test',
51 route_name='ops_redirect_test', request_method='GET',
51 route_name='ops_redirect_test', request_method='GET',
52 renderer='json_ext')
52 renderer='json_ext')
53
53
54 config.add_route(
55 name='ops_healthcheck',
56 pattern='/status')
57 config.add_view(
58 OpsView,
59 attr='ops_healthcheck',
60 route_name='ops_healthcheck', request_method='GET',
61 renderer='json_ext')
54
62
55 def includeme(config):
63 def includeme(config):
56 config.include(admin_routes, route_prefix=ADMIN_PREFIX + '/ops')
64 config.include(admin_routes, route_prefix=ADMIN_PREFIX + '/ops')
@@ -1,74 +1,97 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23
23
24
24
25 from pyramid.httpexceptions import HTTPFound
25 from pyramid.httpexceptions import HTTPFound
26
26
27 from rhodecode.apps._base import BaseAppView
27 from rhodecode.apps._base import BaseAppView
28 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
29 from rhodecode.lib.auth import LoginRequired
30 from rhodecode.model.db import UserApiKeys
29
31
30 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
31
33
32
34
33 class OpsView(BaseAppView):
35 class OpsView(BaseAppView):
34
36
35 def load_default_context(self):
37 def load_default_context(self):
36 c = self._get_local_tmpl_context()
38 c = self._get_local_tmpl_context()
37 c.user = c.auth_user.get_instance()
39 c.user = c.auth_user.get_instance()
38
40
39 return c
41 return c
40
42
41 def ops_ping(self):
43 def ops_ping(self):
42 data = {
44 data = {
43 'instance': self.request.registry.settings.get('instance_id'),
45 'instance': self.request.registry.settings.get('instance_id'),
44 }
46 }
45 if getattr(self.request, 'user'):
47 if getattr(self.request, 'user'):
46 caller_name = 'anonymous'
48 caller_name = 'anonymous'
47 if self.request.user.user_id:
49 if self.request.user.user_id:
48 caller_name = self.request.user.username
50 caller_name = self.request.user.username
49
51
50 data.update({
52 data.update({
51 'caller_ip': self.request.user.ip_addr,
53 'caller_ip': self.request.user.ip_addr,
52 'caller_name': caller_name,
54 'caller_name': caller_name,
53 })
55 })
54 return {'ok': data}
56 return {'ok': data}
55
57
56 def ops_error_test(self):
58 def ops_error_test(self):
57 """
59 """
58 Test exception handling and emails on errors
60 Test exception handling and emails on errors
59 """
61 """
60
62
61 class TestException(Exception):
63 class TestException(Exception):
62 pass
64 pass
63 # add timeout so we add some sort of rate limiter
65 # add timeout so we add some sort of rate limiter
64 time.sleep(2)
66 time.sleep(2)
65 msg = ('RhodeCode Enterprise test exception. '
67 msg = ('RhodeCode Enterprise test exception. '
66 'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
68 'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
67 raise TestException(msg)
69 raise TestException(msg)
68
70
69 def ops_redirect_test(self):
71 def ops_redirect_test(self):
70 """
72 """
71 Test redirect handling
73 Test redirect handling
72 """
74 """
73 redirect_to = self.request.GET.get('to') or h.route_path('home')
75 redirect_to = self.request.GET.get('to') or h.route_path('home')
74 raise HTTPFound(redirect_to)
76 raise HTTPFound(redirect_to)
77
78 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_HTTP])
79 def ops_healthcheck(self):
80 from rhodecode.lib.system_info import load_system_info
81
82 vcsserver_info = load_system_info('vcs_server')
83 if vcsserver_info:
84 vcsserver_info = vcsserver_info['human_value']
85
86 db_info = load_system_info('database_info')
87 if db_info:
88 db_info = db_info['human_value']
89
90 health_spec = {
91 'caller_ip': self.request.user.ip_addr,
92 'vcsserver': vcsserver_info,
93 'db': db_info,
94 }
95
96 return {'healthcheck': health_spec}
97
@@ -1,69 +1,72 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import rhodecode
24 import rhodecode
25 from zope.cachedescriptors.property import Lazy as LazyProperty
25 from zope.cachedescriptors.property import Lazy as LazyProperty
26 from rhodecode.lib.celerylib.loader import (
26 from rhodecode.lib.celerylib.loader import (
27 celery_app, RequestContextTask, get_logger)
27 celery_app, RequestContextTask, get_logger)
28
28
29 async_task = celery_app.task
29 async_task = celery_app.task
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 class ResultWrapper(object):
35 class ResultWrapper(object):
36 def __init__(self, task):
36 def __init__(self, task):
37 self.task = task
37 self.task = task
38
38
39 @LazyProperty
39 @LazyProperty
40 def result(self):
40 def result(self):
41 return self.task
41 return self.task
42
42
43
43
44 def run_task(task, *args, **kwargs):
44 def run_task(task, *args, **kwargs):
45 log.debug('Got task `%s` for execution', task)
45 log.debug('Got task `%s` for execution', task)
46 if task is None:
47 raise ValueError('Got non-existing task for execution')
48
46 if rhodecode.CELERY_ENABLED:
49 if rhodecode.CELERY_ENABLED:
47 celery_is_up = False
50 celery_is_up = False
48 try:
51 try:
49 t = task.apply_async(args=args, kwargs=kwargs)
52 t = task.apply_async(args=args, kwargs=kwargs)
50 celery_is_up = True
53 celery_is_up = True
51 log.debug('executing task %s:%s in async mode', t.task_id, task)
54 log.debug('executing task %s:%s in async mode', t.task_id, task)
52 return t
55 return t
53
56
54 except socket.error as e:
57 except socket.error as e:
55 if isinstance(e, IOError) and e.errno == 111:
58 if isinstance(e, IOError) and e.errno == 111:
56 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
59 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
57 else:
60 else:
58 log.exception("Exception while connecting to celeryd.")
61 log.exception("Exception while connecting to celeryd.")
59 except KeyError as e:
62 except KeyError as e:
60 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
63 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
61 except Exception as e:
64 except Exception as e:
62 log.exception(
65 log.exception(
63 "Exception while trying to run task asynchronous. "
66 "Exception while trying to run task asynchronous. "
64 "Fallback to sync execution.")
67 "Fallback to sync execution.")
65
68
66 else:
69 else:
67 log.debug('executing task %s:%s in sync mode', 'TASK', task)
70 log.debug('executing task %s:%s in sync mode', 'TASK', task)
68
71
69 return ResultWrapper(task(*args, **kwargs))
72 return ResultWrapper(task(*args, **kwargs))
@@ -1,407 +1,410 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 RhodeCode task modules, containing all task that suppose to be run
22 RhodeCode task modules, containing all task that suppose to be run
23 by celery daemon
23 by celery daemon
24 """
24 """
25
25
26 import os
26 import os
27 import time
27 import time
28
28
29 from pyramid import compat
29 from pyramid import compat
30 from pyramid_mailer.mailer import Mailer
30 from pyramid_mailer.mailer import Mailer
31 from pyramid_mailer.message import Message
31 from pyramid_mailer.message import Message
32 from email.utils import formatdate
32 from email.utils import formatdate
33
33
34 import rhodecode
34 import rhodecode
35 from rhodecode.lib import audit_logger
35 from rhodecode.lib import audit_logger
36 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
36 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
37 from rhodecode.lib import hooks_base
37 from rhodecode.lib import hooks_base
38 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
38 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
39 from rhodecode.model.db import (
39 from rhodecode.model.db import (
40 Session, IntegrityError, true, Repository, RepoGroup, User)
40 Session, IntegrityError, true, Repository, RepoGroup, User)
41 from rhodecode.model.permission import PermissionModel
41 from rhodecode.model.permission import PermissionModel
42
42
43
43
44 @async_task(ignore_result=True, base=RequestContextTask)
44 @async_task(ignore_result=True, base=RequestContextTask)
45 def send_email(recipients, subject, body='', html_body='', email_config=None,
45 def send_email(recipients, subject, body='', html_body='', email_config=None,
46 extra_headers=None):
46 extra_headers=None):
47 """
47 """
48 Sends an email with defined parameters from the .ini files.
48 Sends an email with defined parameters from the .ini files.
49
49
50 :param recipients: list of recipients, it this is empty the defined email
50 :param recipients: list of recipients, it this is empty the defined email
51 address from field 'email_to' is used instead
51 address from field 'email_to' is used instead
52 :param subject: subject of the mail
52 :param subject: subject of the mail
53 :param body: body of the mail
53 :param body: body of the mail
54 :param html_body: html version of body
54 :param html_body: html version of body
55 :param email_config: specify custom configuration for mailer
55 :param email_config: specify custom configuration for mailer
56 :param extra_headers: specify custom headers
56 :param extra_headers: specify custom headers
57 """
57 """
58 log = get_logger(send_email)
58 log = get_logger(send_email)
59
59
60 email_config = email_config or rhodecode.CONFIG
60 email_config = email_config or rhodecode.CONFIG
61
61
62 mail_server = email_config.get('smtp_server') or None
62 mail_server = email_config.get('smtp_server') or None
63 if mail_server is None:
63 if mail_server is None:
64 log.error("SMTP server information missing. Sending email failed. "
64 log.error("SMTP server information missing. Sending email failed. "
65 "Make sure that `smtp_server` variable is configured "
65 "Make sure that `smtp_server` variable is configured "
66 "inside the .ini file")
66 "inside the .ini file")
67 return False
67 return False
68
68
69 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
69 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
70
70
71 if recipients:
71 if recipients:
72 if isinstance(recipients, compat.string_types):
72 if isinstance(recipients, compat.string_types):
73 recipients = recipients.split(',')
73 recipients = recipients.split(',')
74 else:
74 else:
75 # if recipients are not defined we send to email_config + all admins
75 # if recipients are not defined we send to email_config + all admins
76 admins = []
76 admins = []
77 for u in User.query().filter(User.admin == true()).all():
77 for u in User.query().filter(User.admin == true()).all():
78 if u.email:
78 if u.email:
79 admins.append(u.email)
79 admins.append(u.email)
80 recipients = []
80 recipients = []
81 config_email = email_config.get('email_to')
81 config_email = email_config.get('email_to')
82 if config_email:
82 if config_email:
83 recipients += [config_email]
83 recipients += [config_email]
84 recipients += admins
84 recipients += admins
85
85
86 # translate our LEGACY config into the one that pyramid_mailer supports
86 # translate our LEGACY config into the one that pyramid_mailer supports
87 email_conf = dict(
87 email_conf = dict(
88 host=mail_server,
88 host=mail_server,
89 port=email_config.get('smtp_port', 25),
89 port=email_config.get('smtp_port', 25),
90 username=email_config.get('smtp_username'),
90 username=email_config.get('smtp_username'),
91 password=email_config.get('smtp_password'),
91 password=email_config.get('smtp_password'),
92
92
93 tls=str2bool(email_config.get('smtp_use_tls')),
93 tls=str2bool(email_config.get('smtp_use_tls')),
94 ssl=str2bool(email_config.get('smtp_use_ssl')),
94 ssl=str2bool(email_config.get('smtp_use_ssl')),
95
95
96 # SSL key file
96 # SSL key file
97 # keyfile='',
97 # keyfile='',
98
98
99 # SSL certificate file
99 # SSL certificate file
100 # certfile='',
100 # certfile='',
101
101
102 # Location of maildir
102 # Location of maildir
103 # queue_path='',
103 # queue_path='',
104
104
105 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
105 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
106
106
107 debug=str2bool(email_config.get('smtp_debug')),
107 debug=str2bool(email_config.get('smtp_debug')),
108 # /usr/sbin/sendmail Sendmail executable
108 # /usr/sbin/sendmail Sendmail executable
109 # sendmail_app='',
109 # sendmail_app='',
110
110
111 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
111 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
112 # sendmail_template='',
112 # sendmail_template='',
113 )
113 )
114
114
115 if extra_headers is None:
115 if extra_headers is None:
116 extra_headers = {}
116 extra_headers = {}
117
117
118 extra_headers.setdefault('Date', formatdate(time.time()))
118 extra_headers.setdefault('Date', formatdate(time.time()))
119
119
120 if 'thread_ids' in extra_headers:
120 if 'thread_ids' in extra_headers:
121 thread_ids = extra_headers.pop('thread_ids')
121 thread_ids = extra_headers.pop('thread_ids')
122 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
122 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
123
123
124 try:
124 try:
125 mailer = Mailer(**email_conf)
125 mailer = Mailer(**email_conf)
126
126
127 message = Message(subject=subject,
127 message = Message(subject=subject,
128 sender=email_conf['default_sender'],
128 sender=email_conf['default_sender'],
129 recipients=recipients,
129 recipients=recipients,
130 body=body, html=html_body,
130 body=body, html=html_body,
131 extra_headers=extra_headers)
131 extra_headers=extra_headers)
132 mailer.send_immediately(message)
132 mailer.send_immediately(message)
133
133
134 except Exception:
134 except Exception:
135 log.exception('Mail sending failed')
135 log.exception('Mail sending failed')
136 return False
136 return False
137 return True
137 return True
138
138
139
139
140 @async_task(ignore_result=True, base=RequestContextTask)
140 @async_task(ignore_result=True, base=RequestContextTask)
141 def create_repo(form_data, cur_user):
141 def create_repo(form_data, cur_user):
142 from rhodecode.model.repo import RepoModel
142 from rhodecode.model.repo import RepoModel
143 from rhodecode.model.user import UserModel
143 from rhodecode.model.user import UserModel
144 from rhodecode.model.scm import ScmModel
144 from rhodecode.model.scm import ScmModel
145 from rhodecode.model.settings import SettingsModel
145 from rhodecode.model.settings import SettingsModel
146
146
147 log = get_logger(create_repo)
147 log = get_logger(create_repo)
148
148
149 cur_user = UserModel()._get_user(cur_user)
149 cur_user = UserModel()._get_user(cur_user)
150 owner = cur_user
150 owner = cur_user
151
151
152 repo_name = form_data['repo_name']
152 repo_name = form_data['repo_name']
153 repo_name_full = form_data['repo_name_full']
153 repo_name_full = form_data['repo_name_full']
154 repo_type = form_data['repo_type']
154 repo_type = form_data['repo_type']
155 description = form_data['repo_description']
155 description = form_data['repo_description']
156 private = form_data['repo_private']
156 private = form_data['repo_private']
157 clone_uri = form_data.get('clone_uri')
157 clone_uri = form_data.get('clone_uri')
158 repo_group = safe_int(form_data['repo_group'])
158 repo_group = safe_int(form_data['repo_group'])
159 copy_fork_permissions = form_data.get('copy_permissions')
159 copy_fork_permissions = form_data.get('copy_permissions')
160 copy_group_permissions = form_data.get('repo_copy_permissions')
160 copy_group_permissions = form_data.get('repo_copy_permissions')
161 fork_of = form_data.get('fork_parent_id')
161 fork_of = form_data.get('fork_parent_id')
162 state = form_data.get('repo_state', Repository.STATE_PENDING)
162 state = form_data.get('repo_state', Repository.STATE_PENDING)
163
163
164 # repo creation defaults, private and repo_type are filled in form
164 # repo creation defaults, private and repo_type are filled in form
165 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
165 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
166 enable_statistics = form_data.get(
166 enable_statistics = form_data.get(
167 'enable_statistics', defs.get('repo_enable_statistics'))
167 'enable_statistics', defs.get('repo_enable_statistics'))
168 enable_locking = form_data.get(
168 enable_locking = form_data.get(
169 'enable_locking', defs.get('repo_enable_locking'))
169 'enable_locking', defs.get('repo_enable_locking'))
170 enable_downloads = form_data.get(
170 enable_downloads = form_data.get(
171 'enable_downloads', defs.get('repo_enable_downloads'))
171 'enable_downloads', defs.get('repo_enable_downloads'))
172
172
173 # set landing rev based on default branches for SCM
173 # set landing rev based on default branches for SCM
174 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
174 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
175
175
176 try:
176 try:
177 RepoModel()._create_repo(
177 RepoModel()._create_repo(
178 repo_name=repo_name_full,
178 repo_name=repo_name_full,
179 repo_type=repo_type,
179 repo_type=repo_type,
180 description=description,
180 description=description,
181 owner=owner,
181 owner=owner,
182 private=private,
182 private=private,
183 clone_uri=clone_uri,
183 clone_uri=clone_uri,
184 repo_group=repo_group,
184 repo_group=repo_group,
185 landing_rev=landing_ref,
185 landing_rev=landing_ref,
186 fork_of=fork_of,
186 fork_of=fork_of,
187 copy_fork_permissions=copy_fork_permissions,
187 copy_fork_permissions=copy_fork_permissions,
188 copy_group_permissions=copy_group_permissions,
188 copy_group_permissions=copy_group_permissions,
189 enable_statistics=enable_statistics,
189 enable_statistics=enable_statistics,
190 enable_locking=enable_locking,
190 enable_locking=enable_locking,
191 enable_downloads=enable_downloads,
191 enable_downloads=enable_downloads,
192 state=state
192 state=state
193 )
193 )
194 Session().commit()
194 Session().commit()
195
195
196 # now create this repo on Filesystem
196 # now create this repo on Filesystem
197 RepoModel()._create_filesystem_repo(
197 RepoModel()._create_filesystem_repo(
198 repo_name=repo_name,
198 repo_name=repo_name,
199 repo_type=repo_type,
199 repo_type=repo_type,
200 repo_group=RepoModel()._get_repo_group(repo_group),
200 repo_group=RepoModel()._get_repo_group(repo_group),
201 clone_uri=clone_uri,
201 clone_uri=clone_uri,
202 )
202 )
203 repo = Repository.get_by_repo_name(repo_name_full)
203 repo = Repository.get_by_repo_name(repo_name_full)
204 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
204 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
205
205
206 # update repo commit caches initially
206 # update repo commit caches initially
207 repo.update_commit_cache()
207 repo.update_commit_cache()
208
208
209 # set new created state
209 # set new created state
210 repo.set_state(Repository.STATE_CREATED)
210 repo.set_state(Repository.STATE_CREATED)
211 repo_id = repo.repo_id
211 repo_id = repo.repo_id
212 repo_data = repo.get_api_data()
212 repo_data = repo.get_api_data()
213
213
214 audit_logger.store(
214 audit_logger.store(
215 'repo.create', action_data={'data': repo_data},
215 'repo.create', action_data={'data': repo_data},
216 user=cur_user,
216 user=cur_user,
217 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
217 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
218
218
219 Session().commit()
219 Session().commit()
220
220
221 PermissionModel().trigger_permission_flush()
221 PermissionModel().trigger_permission_flush()
222
222
223 except Exception as e:
223 except Exception as e:
224 log.warning('Exception occurred when creating repository, '
224 log.warning('Exception occurred when creating repository, '
225 'doing cleanup...', exc_info=True)
225 'doing cleanup...', exc_info=True)
226 if isinstance(e, IntegrityError):
226 if isinstance(e, IntegrityError):
227 Session().rollback()
227 Session().rollback()
228
228
229 # rollback things manually !
229 # rollback things manually !
230 repo = Repository.get_by_repo_name(repo_name_full)
230 repo = Repository.get_by_repo_name(repo_name_full)
231 if repo:
231 if repo:
232 Repository.delete(repo.repo_id)
232 Repository.delete(repo.repo_id)
233 Session().commit()
233 Session().commit()
234 RepoModel()._delete_filesystem_repo(repo)
234 RepoModel()._delete_filesystem_repo(repo)
235 log.info('Cleanup of repo %s finished', repo_name_full)
235 log.info('Cleanup of repo %s finished', repo_name_full)
236 raise
236 raise
237
237
238 return True
238 return True
239
239
240
240
241 @async_task(ignore_result=True, base=RequestContextTask)
241 @async_task(ignore_result=True, base=RequestContextTask)
242 def create_repo_fork(form_data, cur_user):
242 def create_repo_fork(form_data, cur_user):
243 """
243 """
244 Creates a fork of repository using internal VCS methods
244 Creates a fork of repository using internal VCS methods
245 """
245 """
246 from rhodecode.model.repo import RepoModel
246 from rhodecode.model.repo import RepoModel
247 from rhodecode.model.user import UserModel
247 from rhodecode.model.user import UserModel
248
248
249 log = get_logger(create_repo_fork)
249 log = get_logger(create_repo_fork)
250
250
251 cur_user = UserModel()._get_user(cur_user)
251 cur_user = UserModel()._get_user(cur_user)
252 owner = cur_user
252 owner = cur_user
253
253
254 repo_name = form_data['repo_name'] # fork in this case
254 repo_name = form_data['repo_name'] # fork in this case
255 repo_name_full = form_data['repo_name_full']
255 repo_name_full = form_data['repo_name_full']
256 repo_type = form_data['repo_type']
256 repo_type = form_data['repo_type']
257 description = form_data['description']
257 description = form_data['description']
258 private = form_data['private']
258 private = form_data['private']
259 clone_uri = form_data.get('clone_uri')
259 clone_uri = form_data.get('clone_uri')
260 repo_group = safe_int(form_data['repo_group'])
260 repo_group = safe_int(form_data['repo_group'])
261 landing_ref = form_data['landing_rev']
261 landing_ref = form_data['landing_rev']
262 copy_fork_permissions = form_data.get('copy_permissions')
262 copy_fork_permissions = form_data.get('copy_permissions')
263 fork_id = safe_int(form_data.get('fork_parent_id'))
263 fork_id = safe_int(form_data.get('fork_parent_id'))
264
264
265 try:
265 try:
266 fork_of = RepoModel()._get_repo(fork_id)
266 fork_of = RepoModel()._get_repo(fork_id)
267 RepoModel()._create_repo(
267 RepoModel()._create_repo(
268 repo_name=repo_name_full,
268 repo_name=repo_name_full,
269 repo_type=repo_type,
269 repo_type=repo_type,
270 description=description,
270 description=description,
271 owner=owner,
271 owner=owner,
272 private=private,
272 private=private,
273 clone_uri=clone_uri,
273 clone_uri=clone_uri,
274 repo_group=repo_group,
274 repo_group=repo_group,
275 landing_rev=landing_ref,
275 landing_rev=landing_ref,
276 fork_of=fork_of,
276 fork_of=fork_of,
277 copy_fork_permissions=copy_fork_permissions
277 copy_fork_permissions=copy_fork_permissions
278 )
278 )
279
279
280 Session().commit()
280 Session().commit()
281
281
282 base_path = Repository.base_path()
282 base_path = Repository.base_path()
283 source_repo_path = os.path.join(base_path, fork_of.repo_name)
283 source_repo_path = os.path.join(base_path, fork_of.repo_name)
284
284
285 # now create this repo on Filesystem
285 # now create this repo on Filesystem
286 RepoModel()._create_filesystem_repo(
286 RepoModel()._create_filesystem_repo(
287 repo_name=repo_name,
287 repo_name=repo_name,
288 repo_type=repo_type,
288 repo_type=repo_type,
289 repo_group=RepoModel()._get_repo_group(repo_group),
289 repo_group=RepoModel()._get_repo_group(repo_group),
290 clone_uri=source_repo_path,
290 clone_uri=source_repo_path,
291 )
291 )
292 repo = Repository.get_by_repo_name(repo_name_full)
292 repo = Repository.get_by_repo_name(repo_name_full)
293 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
293 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
294
294
295 # update repo commit caches initially
295 # update repo commit caches initially
296 config = repo._config
296 config = repo._config
297 config.set('extensions', 'largefiles', '')
297 config.set('extensions', 'largefiles', '')
298 repo.update_commit_cache(config=config)
298 repo.update_commit_cache(config=config)
299
299
300 # set new created state
300 # set new created state
301 repo.set_state(Repository.STATE_CREATED)
301 repo.set_state(Repository.STATE_CREATED)
302
302
303 repo_id = repo.repo_id
303 repo_id = repo.repo_id
304 repo_data = repo.get_api_data()
304 repo_data = repo.get_api_data()
305 audit_logger.store(
305 audit_logger.store(
306 'repo.fork', action_data={'data': repo_data},
306 'repo.fork', action_data={'data': repo_data},
307 user=cur_user,
307 user=cur_user,
308 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
308 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
309
309
310 Session().commit()
310 Session().commit()
311 except Exception as e:
311 except Exception as e:
312 log.warning('Exception occurred when forking repository, '
312 log.warning('Exception occurred when forking repository, '
313 'doing cleanup...', exc_info=True)
313 'doing cleanup...', exc_info=True)
314 if isinstance(e, IntegrityError):
314 if isinstance(e, IntegrityError):
315 Session().rollback()
315 Session().rollback()
316
316
317 # rollback things manually !
317 # rollback things manually !
318 repo = Repository.get_by_repo_name(repo_name_full)
318 repo = Repository.get_by_repo_name(repo_name_full)
319 if repo:
319 if repo:
320 Repository.delete(repo.repo_id)
320 Repository.delete(repo.repo_id)
321 Session().commit()
321 Session().commit()
322 RepoModel()._delete_filesystem_repo(repo)
322 RepoModel()._delete_filesystem_repo(repo)
323 log.info('Cleanup of repo %s finished', repo_name_full)
323 log.info('Cleanup of repo %s finished', repo_name_full)
324 raise
324 raise
325
325
326 return True
326 return True
327
327
328
328
329 @async_task(ignore_result=True)
329 @async_task(ignore_result=True)
330 def repo_maintenance(repoid):
330 def repo_maintenance(repoid):
331 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
331 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
332 log = get_logger(repo_maintenance)
332 log = get_logger(repo_maintenance)
333 repo = Repository.get_by_id_or_repo_name(repoid)
333 repo = Repository.get_by_id_or_repo_name(repoid)
334 if repo:
334 if repo:
335 maintenance = repo_maintenance_lib.RepoMaintenance()
335 maintenance = repo_maintenance_lib.RepoMaintenance()
336 tasks = maintenance.get_tasks_for_repo(repo)
336 tasks = maintenance.get_tasks_for_repo(repo)
337 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
337 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
338 executed_types = maintenance.execute(repo)
338 executed_types = maintenance.execute(repo)
339 log.debug('Got execution results %s', executed_types)
339 log.debug('Got execution results %s', executed_types)
340 else:
340 else:
341 log.debug('Repo `%s` not found or without a clone_url', repoid)
341 log.debug('Repo `%s` not found or without a clone_url', repoid)
342
342
343
343
344 @async_task(ignore_result=True)
344 @async_task(ignore_result=True)
345 def check_for_update(send_email_notification=True, email_recipients=None):
345 def check_for_update(send_email_notification=True, email_recipients=None):
346 from rhodecode.model.update import UpdateModel
346 from rhodecode.model.update import UpdateModel
347 from rhodecode.model.notification import EmailNotificationModel
347 from rhodecode.model.notification import EmailNotificationModel
348
348
349 log = get_logger(check_for_update)
349 log = get_logger(check_for_update)
350 update_url = UpdateModel().get_update_url()
350 update_url = UpdateModel().get_update_url()
351 cur_ver = rhodecode.__version__
351 cur_ver = rhodecode.__version__
352
352
353 try:
353 try:
354 data = UpdateModel().get_update_data(update_url)
354 data = UpdateModel().get_update_data(update_url)
355
355
356 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
356 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
357 latest_ver = data['versions'][0]['version']
357 latest_ver = data['versions'][0]['version']
358 UpdateModel().store_version(latest_ver)
358 UpdateModel().store_version(latest_ver)
359
359
360 if send_email_notification:
360 if send_email_notification:
361 log.debug('Send email notification is enabled. '
361 log.debug('Send email notification is enabled. '
362 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
362 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
363 if UpdateModel().is_outdated(current_ver, latest_ver):
363 if UpdateModel().is_outdated(current_ver, latest_ver):
364
364
365 email_kwargs = {
365 email_kwargs = {
366 'current_ver': current_ver,
366 'current_ver': current_ver,
367 'latest_ver': latest_ver,
367 'latest_ver': latest_ver,
368 }
368 }
369
369
370 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
370 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
371 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
371 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
372
372
373 email_recipients = aslist(email_recipients, sep=',') or \
373 email_recipients = aslist(email_recipients, sep=',') or \
374 [user.email for user in User.get_all_super_admins()]
374 [user.email for user in User.get_all_super_admins()]
375 run_task(send_email, email_recipients, subject,
375 run_task(send_email, email_recipients, subject,
376 email_body_plaintext, email_body)
376 email_body_plaintext, email_body)
377
377
378 except Exception:
378 except Exception:
379 pass
379 pass
380
380
381
381
382 @async_task(ignore_result=False)
382 @async_task(ignore_result=False)
383 def beat_check(*args, **kwargs):
383 def beat_check(*args, **kwargs):
384 log = get_logger(beat_check)
384 log = get_logger(beat_check)
385 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
385 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
386 return time.time()
386 return time.time()
387
387
388
388
389 @async_task(ignore_result=True)
389 def sync_last_update_for_objects(*args, **kwargs):
390 def sync_last_update(*args, **kwargs):
391
392 skip_repos = kwargs.get('skip_repos')
390 skip_repos = kwargs.get('skip_repos')
393 if not skip_repos:
391 if not skip_repos:
394 repos = Repository.query() \
392 repos = Repository.query() \
395 .order_by(Repository.group_id.asc())
393 .order_by(Repository.group_id.asc())
396
394
397 for repo in repos:
395 for repo in repos:
398 repo.update_commit_cache()
396 repo.update_commit_cache()
399
397
400 skip_groups = kwargs.get('skip_groups')
398 skip_groups = kwargs.get('skip_groups')
401 if not skip_groups:
399 if not skip_groups:
402 repo_groups = RepoGroup.query() \
400 repo_groups = RepoGroup.query() \
403 .filter(RepoGroup.group_parent_id == None)
401 .filter(RepoGroup.group_parent_id == None)
404
402
405 for root_gr in repo_groups:
403 for root_gr in repo_groups:
406 for repo_gr in reversed(root_gr.recursive_groups()):
404 for repo_gr in reversed(root_gr.recursive_groups()):
407 repo_gr.update_commit_cache()
405 repo_gr.update_commit_cache()
406
407
408 @async_task(ignore_result=True)
409 def sync_last_update(*args, **kwargs):
410 sync_last_update_for_objects(*args, **kwargs)
@@ -1,283 +1,284 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import gzip
21 import gzip
22 import shutil
22 import shutil
23 import logging
23 import logging
24 import tempfile
24 import tempfile
25 import urlparse
25 import urlparse
26
26
27 from webob.exc import HTTPNotFound
27 from webob.exc import HTTPNotFound
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
32 from rhodecode.lib.middleware.simplehg import SimpleHg
32 from rhodecode.lib.middleware.simplehg import SimpleHg
33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
34 from rhodecode.model.settings import VcsSettingsModel
34 from rhodecode.model.settings import VcsSettingsModel
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38 VCS_TYPE_KEY = '_rc_vcs_type'
38 VCS_TYPE_KEY = '_rc_vcs_type'
39 VCS_TYPE_SKIP = '_rc_vcs_skip'
39 VCS_TYPE_SKIP = '_rc_vcs_skip'
40
40
41
41
42 def is_git(environ):
42 def is_git(environ):
43 """
43 """
44 Returns True if requests should be handled by GIT wsgi middleware
44 Returns True if requests should be handled by GIT wsgi middleware
45 """
45 """
46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
47 log.debug(
47 log.debug(
48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
49 is_git_path is not None)
49 is_git_path is not None)
50
50
51 return is_git_path
51 return is_git_path
52
52
53
53
54 def is_hg(environ):
54 def is_hg(environ):
55 """
55 """
56 Returns True if requests target is mercurial server - header
56 Returns True if requests target is mercurial server - header
57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
58 """
58 """
59 is_hg_path = False
59 is_hg_path = False
60
60
61 http_accept = environ.get('HTTP_ACCEPT')
61 http_accept = environ.get('HTTP_ACCEPT')
62
62
63 if http_accept and http_accept.startswith('application/mercurial'):
63 if http_accept and http_accept.startswith('application/mercurial'):
64 query = urlparse.parse_qs(environ['QUERY_STRING'])
64 query = urlparse.parse_qs(environ['QUERY_STRING'])
65 if 'cmd' in query:
65 if 'cmd' in query:
66 is_hg_path = True
66 is_hg_path = True
67
67
68 log.debug(
68 log.debug(
69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
70 is_hg_path)
70 is_hg_path)
71
71
72 return is_hg_path
72 return is_hg_path
73
73
74
74
75 def is_svn(environ):
75 def is_svn(environ):
76 """
76 """
77 Returns True if requests target is Subversion server
77 Returns True if requests target is Subversion server
78 """
78 """
79
79
80 http_dav = environ.get('HTTP_DAV', '')
80 http_dav = environ.get('HTTP_DAV', '')
81 magic_path_segment = rhodecode.CONFIG.get(
81 magic_path_segment = rhodecode.CONFIG.get(
82 'rhodecode_subversion_magic_path', '/!svn')
82 'rhodecode_subversion_magic_path', '/!svn')
83 is_svn_path = (
83 is_svn_path = (
84 'subversion' in http_dav or
84 'subversion' in http_dav or
85 magic_path_segment in environ['PATH_INFO']
85 magic_path_segment in environ['PATH_INFO']
86 or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH']
86 or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH']
87 )
87 )
88 log.debug(
88 log.debug(
89 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
89 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
90 is_svn_path)
90 is_svn_path)
91
91
92 return is_svn_path
92 return is_svn_path
93
93
94
94
95 class GunzipMiddleware(object):
95 class GunzipMiddleware(object):
96 """
96 """
97 WSGI middleware that unzips gzip-encoded requests before
97 WSGI middleware that unzips gzip-encoded requests before
98 passing on to the underlying application.
98 passing on to the underlying application.
99 """
99 """
100
100
101 def __init__(self, application):
101 def __init__(self, application):
102 self.app = application
102 self.app = application
103
103
104 def __call__(self, environ, start_response):
104 def __call__(self, environ, start_response):
105 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
105 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
106
106
107 if b'gzip' in accepts_encoding_header:
107 if b'gzip' in accepts_encoding_header:
108 log.debug('gzip detected, now running gunzip wrapper')
108 log.debug('gzip detected, now running gunzip wrapper')
109 wsgi_input = environ['wsgi.input']
109 wsgi_input = environ['wsgi.input']
110
110
111 if not hasattr(environ['wsgi.input'], 'seek'):
111 if not hasattr(environ['wsgi.input'], 'seek'):
112 # The gzip implementation in the standard library of Python 2.x
112 # The gzip implementation in the standard library of Python 2.x
113 # requires the '.seek()' and '.tell()' methods to be available
113 # requires the '.seek()' and '.tell()' methods to be available
114 # on the input stream. Read the data into a temporary file to
114 # on the input stream. Read the data into a temporary file to
115 # work around this limitation.
115 # work around this limitation.
116
116
117 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
117 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
118 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
118 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
119 wsgi_input.seek(0)
119 wsgi_input.seek(0)
120
120
121 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
121 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
122 # since we "Ungzipped" the content we say now it's no longer gzip
122 # since we "Ungzipped" the content we say now it's no longer gzip
123 # content encoding
123 # content encoding
124 del environ['HTTP_CONTENT_ENCODING']
124 del environ['HTTP_CONTENT_ENCODING']
125
125
126 # content length has changes ? or i'm not sure
126 # content length has changes ? or i'm not sure
127 if 'CONTENT_LENGTH' in environ:
127 if 'CONTENT_LENGTH' in environ:
128 del environ['CONTENT_LENGTH']
128 del environ['CONTENT_LENGTH']
129 else:
129 else:
130 log.debug('content not gzipped, gzipMiddleware passing '
130 log.debug('content not gzipped, gzipMiddleware passing '
131 'request further')
131 'request further')
132 return self.app(environ, start_response)
132 return self.app(environ, start_response)
133
133
134
134
135 def is_vcs_call(environ):
135 def is_vcs_call(environ):
136 if VCS_TYPE_KEY in environ:
136 if VCS_TYPE_KEY in environ:
137 raw_type = environ[VCS_TYPE_KEY]
137 raw_type = environ[VCS_TYPE_KEY]
138 return raw_type and raw_type != VCS_TYPE_SKIP
138 return raw_type and raw_type != VCS_TYPE_SKIP
139 return False
139 return False
140
140
141
141
142 def get_path_elem(route_path):
142 def get_path_elem(route_path):
143 if not route_path:
143 if not route_path:
144 return None
144 return None
145
145
146 cleaned_route_path = route_path.lstrip('/')
146 cleaned_route_path = route_path.lstrip('/')
147 if cleaned_route_path:
147 if cleaned_route_path:
148 cleaned_route_path_elems = cleaned_route_path.split('/')
148 cleaned_route_path_elems = cleaned_route_path.split('/')
149 if cleaned_route_path_elems:
149 if cleaned_route_path_elems:
150 return cleaned_route_path_elems[0]
150 return cleaned_route_path_elems[0]
151 return None
151 return None
152
152
153
153
154 def detect_vcs_request(environ, backends):
154 def detect_vcs_request(environ, backends):
155 checks = {
155 checks = {
156 'hg': (is_hg, SimpleHg),
156 'hg': (is_hg, SimpleHg),
157 'git': (is_git, SimpleGit),
157 'git': (is_git, SimpleGit),
158 'svn': (is_svn, SimpleSvn),
158 'svn': (is_svn, SimpleSvn),
159 }
159 }
160 handler = None
160 handler = None
161 # List of path views first chunk we don't do any checks
161 # List of path views first chunk we don't do any checks
162 white_list = [
162 white_list = [
163 # e.g /_file_store/download
163 # e.g /_file_store/download
164 '_file_store',
164 '_file_store',
165
165
166 # static files no detection
166 # static files no detection
167 '_static',
167 '_static',
168
168
169 # skip ops ping
169 # skip ops ping, status
170 '_admin/ops/ping',
170 '_admin/ops/ping',
171 '_admin/ops/status',
171
172
172 # full channelstream connect should be VCS skipped
173 # full channelstream connect should be VCS skipped
173 '_admin/channelstream/connect',
174 '_admin/channelstream/connect',
174 ]
175 ]
175
176
176 path_info = environ['PATH_INFO']
177 path_info = environ['PATH_INFO']
177
178
178 path_elem = get_path_elem(path_info)
179 path_elem = get_path_elem(path_info)
179
180
180 if path_elem in white_list:
181 if path_elem in white_list:
181 log.debug('path `%s` in whitelist, skipping...', path_info)
182 log.debug('path `%s` in whitelist, skipping...', path_info)
182 return handler
183 return handler
183
184
184 path_url = path_info.lstrip('/')
185 path_url = path_info.lstrip('/')
185 if path_url in white_list:
186 if path_url in white_list:
186 log.debug('full url path `%s` in whitelist, skipping...', path_url)
187 log.debug('full url path `%s` in whitelist, skipping...', path_url)
187 return handler
188 return handler
188
189
189 if VCS_TYPE_KEY in environ:
190 if VCS_TYPE_KEY in environ:
190 raw_type = environ[VCS_TYPE_KEY]
191 raw_type = environ[VCS_TYPE_KEY]
191 if raw_type == VCS_TYPE_SKIP:
192 if raw_type == VCS_TYPE_SKIP:
192 log.debug('got `skip` marker for vcs detection, skipping...')
193 log.debug('got `skip` marker for vcs detection, skipping...')
193 return handler
194 return handler
194
195
195 _check, handler = checks.get(raw_type) or [None, None]
196 _check, handler = checks.get(raw_type) or [None, None]
196 if handler:
197 if handler:
197 log.debug('got handler:%s from environ', handler)
198 log.debug('got handler:%s from environ', handler)
198
199
199 if not handler:
200 if not handler:
200 log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends)
201 log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends)
201 for vcs_type in backends:
202 for vcs_type in backends:
202 vcs_check, _handler = checks[vcs_type]
203 vcs_check, _handler = checks[vcs_type]
203 if vcs_check(environ):
204 if vcs_check(environ):
204 log.debug('vcs handler found %s', _handler)
205 log.debug('vcs handler found %s', _handler)
205 handler = _handler
206 handler = _handler
206 break
207 break
207
208
208 return handler
209 return handler
209
210
210
211
211 class VCSMiddleware(object):
212 class VCSMiddleware(object):
212
213
213 def __init__(self, app, registry, config, appenlight_client):
214 def __init__(self, app, registry, config, appenlight_client):
214 self.application = app
215 self.application = app
215 self.registry = registry
216 self.registry = registry
216 self.config = config
217 self.config = config
217 self.appenlight_client = appenlight_client
218 self.appenlight_client = appenlight_client
218 self.use_gzip = True
219 self.use_gzip = True
219 # order in which we check the middlewares, based on vcs.backends config
220 # order in which we check the middlewares, based on vcs.backends config
220 self.check_middlewares = config['vcs.backends']
221 self.check_middlewares = config['vcs.backends']
221
222
222 def vcs_config(self, repo_name=None):
223 def vcs_config(self, repo_name=None):
223 """
224 """
224 returns serialized VcsSettings
225 returns serialized VcsSettings
225 """
226 """
226 try:
227 try:
227 return VcsSettingsModel(
228 return VcsSettingsModel(
228 repo=repo_name).get_ui_settings_as_config_obj()
229 repo=repo_name).get_ui_settings_as_config_obj()
229 except Exception:
230 except Exception:
230 pass
231 pass
231
232
232 def wrap_in_gzip_if_enabled(self, app, config):
233 def wrap_in_gzip_if_enabled(self, app, config):
233 if self.use_gzip:
234 if self.use_gzip:
234 app = GunzipMiddleware(app)
235 app = GunzipMiddleware(app)
235 return app
236 return app
236
237
237 def _get_handler_app(self, environ):
238 def _get_handler_app(self, environ):
238 app = None
239 app = None
239 log.debug('VCSMiddleware: detecting vcs type.')
240 log.debug('VCSMiddleware: detecting vcs type.')
240 handler = detect_vcs_request(environ, self.check_middlewares)
241 handler = detect_vcs_request(environ, self.check_middlewares)
241 if handler:
242 if handler:
242 app = handler(self.config, self.registry)
243 app = handler(self.config, self.registry)
243
244
244 return app
245 return app
245
246
246 def __call__(self, environ, start_response):
247 def __call__(self, environ, start_response):
247 # check if we handle one of interesting protocols, optionally extract
248 # check if we handle one of interesting protocols, optionally extract
248 # specific vcsSettings and allow changes of how things are wrapped
249 # specific vcsSettings and allow changes of how things are wrapped
249 vcs_handler = self._get_handler_app(environ)
250 vcs_handler = self._get_handler_app(environ)
250 if vcs_handler:
251 if vcs_handler:
251 # translate the _REPO_ID into real repo NAME for usage
252 # translate the _REPO_ID into real repo NAME for usage
252 # in middleware
253 # in middleware
253 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
254 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
254
255
255 # Set acl, url and vcs repo names.
256 # Set acl, url and vcs repo names.
256 vcs_handler.set_repo_names(environ)
257 vcs_handler.set_repo_names(environ)
257
258
258 # register repo config back to the handler
259 # register repo config back to the handler
259 vcs_conf = self.vcs_config(vcs_handler.acl_repo_name)
260 vcs_conf = self.vcs_config(vcs_handler.acl_repo_name)
260 # maybe damaged/non existent settings. We still want to
261 # maybe damaged/non existent settings. We still want to
261 # pass that point to validate on is_valid_and_existing_repo
262 # pass that point to validate on is_valid_and_existing_repo
262 # and return proper HTTP Code back to client
263 # and return proper HTTP Code back to client
263 if vcs_conf:
264 if vcs_conf:
264 vcs_handler.repo_vcs_config = vcs_conf
265 vcs_handler.repo_vcs_config = vcs_conf
265
266
266 # check for type, presence in database and on filesystem
267 # check for type, presence in database and on filesystem
267 if not vcs_handler.is_valid_and_existing_repo(
268 if not vcs_handler.is_valid_and_existing_repo(
268 vcs_handler.acl_repo_name,
269 vcs_handler.acl_repo_name,
269 vcs_handler.base_path,
270 vcs_handler.base_path,
270 vcs_handler.SCM):
271 vcs_handler.SCM):
271 return HTTPNotFound()(environ, start_response)
272 return HTTPNotFound()(environ, start_response)
272
273
273 environ['REPO_NAME'] = vcs_handler.url_repo_name
274 environ['REPO_NAME'] = vcs_handler.url_repo_name
274
275
275 # Wrap handler in middlewares if they are enabled.
276 # Wrap handler in middlewares if they are enabled.
276 vcs_handler = self.wrap_in_gzip_if_enabled(
277 vcs_handler = self.wrap_in_gzip_if_enabled(
277 vcs_handler, self.config)
278 vcs_handler, self.config)
278 vcs_handler, _ = wrap_in_appenlight_if_enabled(
279 vcs_handler, _ = wrap_in_appenlight_if_enabled(
279 vcs_handler, self.config, self.appenlight_client)
280 vcs_handler, self.config, self.appenlight_client)
280
281
281 return vcs_handler(environ, start_response)
282 return vcs_handler(environ, start_response)
282
283
283 return self.application(environ, start_response)
284 return self.application(environ, start_response)
@@ -1,354 +1,363 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2020 RhodeCode GmbH
3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import errno
22 import errno
23 import logging
23 import logging
24
24
25 import msgpack
25 import msgpack
26 import gevent
26 import gevent
27 import redis
27 import redis
28
28
29 from dogpile.cache.api import CachedValue
29 from dogpile.cache.api import CachedValue
30 from dogpile.cache.backends import memory as memory_backend
30 from dogpile.cache.backends import memory as memory_backend
31 from dogpile.cache.backends import file as file_backend
31 from dogpile.cache.backends import file as file_backend
32 from dogpile.cache.backends import redis as redis_backend
32 from dogpile.cache.backends import redis as redis_backend
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
34 from dogpile.cache.util import memoized_property
34 from dogpile.cache.util import memoized_property
35
35
36 from pyramid.settings import asbool
36 from pyramid.settings import asbool
37
37
38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
39
39
40
40
41 _default_max_size = 1024
41 _default_max_size = 1024
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 class LRUMemoryBackend(memory_backend.MemoryBackend):
46 class LRUMemoryBackend(memory_backend.MemoryBackend):
47 key_prefix = 'lru_mem_backend'
47 key_prefix = 'lru_mem_backend'
48 pickle_values = False
48 pickle_values = False
49
49
50 def __init__(self, arguments):
50 def __init__(self, arguments):
51 max_size = arguments.pop('max_size', _default_max_size)
51 max_size = arguments.pop('max_size', _default_max_size)
52
52
53 LRUDictClass = LRUDict
53 LRUDictClass = LRUDict
54 if arguments.pop('log_key_count', None):
54 if arguments.pop('log_key_count', None):
55 LRUDictClass = LRUDictDebug
55 LRUDictClass = LRUDictDebug
56
56
57 arguments['cache_dict'] = LRUDictClass(max_size)
57 arguments['cache_dict'] = LRUDictClass(max_size)
58 super(LRUMemoryBackend, self).__init__(arguments)
58 super(LRUMemoryBackend, self).__init__(arguments)
59
59
60 def delete(self, key):
60 def delete(self, key):
61 try:
61 try:
62 del self._cache[key]
62 del self._cache[key]
63 except KeyError:
63 except KeyError:
64 # we don't care if key isn't there at deletion
64 # we don't care if key isn't there at deletion
65 pass
65 pass
66
66
67 def delete_multi(self, keys):
67 def delete_multi(self, keys):
68 for key in keys:
68 for key in keys:
69 self.delete(key)
69 self.delete(key)
70
70
71
71
72 class PickleSerializer(object):
72 class PickleSerializer(object):
73
73
74 def _dumps(self, value, safe=False):
74 def _dumps(self, value, safe=False):
75 try:
75 try:
76 return compat.pickle.dumps(value)
76 return compat.pickle.dumps(value)
77 except Exception:
77 except Exception:
78 if safe:
78 if safe:
79 return NO_VALUE
79 return NO_VALUE
80 else:
80 else:
81 raise
81 raise
82
82
83 def _loads(self, value, safe=True):
83 def _loads(self, value, safe=True):
84 try:
84 try:
85 return compat.pickle.loads(value)
85 return compat.pickle.loads(value)
86 except Exception:
86 except Exception:
87 if safe:
87 if safe:
88 return NO_VALUE
88 return NO_VALUE
89 else:
89 else:
90 raise
90 raise
91
91
92
92
93 class MsgPackSerializer(object):
93 class MsgPackSerializer(object):
94
94
95 def _dumps(self, value, safe=False):
95 def _dumps(self, value, safe=False):
96 try:
96 try:
97 return msgpack.packb(value)
97 return msgpack.packb(value)
98 except Exception:
98 except Exception:
99 if safe:
99 if safe:
100 return NO_VALUE
100 return NO_VALUE
101 else:
101 else:
102 raise
102 raise
103
103
104 def _loads(self, value, safe=True):
104 def _loads(self, value, safe=True):
105 """
105 """
106 pickle maintained the `CachedValue` wrapper of the tuple
106 pickle maintained the `CachedValue` wrapper of the tuple
107 msgpack does not, so it must be added back in.
107 msgpack does not, so it must be added back in.
108 """
108 """
109 try:
109 try:
110 value = msgpack.unpackb(value, use_list=False)
110 value = msgpack.unpackb(value, use_list=False)
111 return CachedValue(*value)
111 return CachedValue(*value)
112 except Exception:
112 except Exception:
113 if safe:
113 if safe:
114 return NO_VALUE
114 return NO_VALUE
115 else:
115 else:
116 raise
116 raise
117
117
118
118
119 import fcntl
119 import fcntl
120 flock_org = fcntl.flock
120 flock_org = fcntl.flock
121
121
122
122
123 class CustomLockFactory(FileLock):
123 class CustomLockFactory(FileLock):
124
124
125 @memoized_property
125 @memoized_property
126 def _module(self):
126 def _module(self):
127
127
128 def gevent_flock(fd, operation):
128 def gevent_flock(fd, operation):
129 """
129 """
130 Gevent compatible flock
130 Gevent compatible flock
131 """
131 """
132 # set non-blocking, this will cause an exception if we cannot acquire a lock
132 # set non-blocking, this will cause an exception if we cannot acquire a lock
133 operation |= fcntl.LOCK_NB
133 operation |= fcntl.LOCK_NB
134 start_lock_time = time.time()
134 start_lock_time = time.time()
135 timeout = 60 * 15 # 15min
135 timeout = 60 * 15 # 15min
136 while True:
136 while True:
137 try:
137 try:
138 flock_org(fd, operation)
138 flock_org(fd, operation)
139 # lock has been acquired
139 # lock has been acquired
140 break
140 break
141 except (OSError, IOError) as e:
141 except (OSError, IOError) as e:
142 # raise on other errors than Resource temporarily unavailable
142 # raise on other errors than Resource temporarily unavailable
143 if e.errno != errno.EAGAIN:
143 if e.errno != errno.EAGAIN:
144 raise
144 raise
145 elif (time.time() - start_lock_time) > timeout:
145 elif (time.time() - start_lock_time) > timeout:
146 # waited to much time on a lock, better fail than loop for ever
146 # waited to much time on a lock, better fail than loop for ever
147 log.error('Failed to acquire lock on `%s` after waiting %ss',
147 log.error('Failed to acquire lock on `%s` after waiting %ss',
148 self.filename, timeout)
148 self.filename, timeout)
149 raise
149 raise
150 wait_timeout = 0.03
150 wait_timeout = 0.03
151 log.debug('Failed to acquire lock on `%s`, retry in %ss',
151 log.debug('Failed to acquire lock on `%s`, retry in %ss',
152 self.filename, wait_timeout)
152 self.filename, wait_timeout)
153 gevent.sleep(wait_timeout)
153 gevent.sleep(wait_timeout)
154
154
155 fcntl.flock = gevent_flock
155 fcntl.flock = gevent_flock
156 return fcntl
156 return fcntl
157
157
158
158
159 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
159 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
160 key_prefix = 'file_backend'
160 key_prefix = 'file_backend'
161
161
162 def __init__(self, arguments):
162 def __init__(self, arguments):
163 arguments['lock_factory'] = CustomLockFactory
163 arguments['lock_factory'] = CustomLockFactory
164 db_file = arguments.get('filename')
164 db_file = arguments.get('filename')
165
165
166 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
166 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
167 try:
167 try:
168 super(FileNamespaceBackend, self).__init__(arguments)
168 super(FileNamespaceBackend, self).__init__(arguments)
169 except Exception:
169 except Exception:
170 log.error('Failed to initialize db at: %s', db_file)
170 log.error('Failed to initialize db at: %s', db_file)
171 raise
171 raise
172
172
173 def __repr__(self):
173 def __repr__(self):
174 return '{} `{}`'.format(self.__class__, self.filename)
174 return '{} `{}`'.format(self.__class__, self.filename)
175
175
176 def list_keys(self, prefix=''):
176 def list_keys(self, prefix=''):
177 prefix = '{}:{}'.format(self.key_prefix, prefix)
177 prefix = '{}:{}'.format(self.key_prefix, prefix)
178
178
179 def cond(v):
179 def cond(v):
180 if not prefix:
180 if not prefix:
181 return True
181 return True
182
182
183 if v.startswith(prefix):
183 if v.startswith(prefix):
184 return True
184 return True
185 return False
185 return False
186
186
187 with self._dbm_file(True) as dbm:
187 with self._dbm_file(True) as dbm:
188 try:
188 try:
189 return filter(cond, dbm.keys())
189 return filter(cond, dbm.keys())
190 except Exception:
190 except Exception:
191 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
191 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
192 raise
192 raise
193
193
194 def get_store(self):
194 def get_store(self):
195 return self.filename
195 return self.filename
196
196
197 def _dbm_get(self, key):
197 def _dbm_get(self, key):
198 with self._dbm_file(False) as dbm:
198 with self._dbm_file(False) as dbm:
199 if hasattr(dbm, 'get'):
199 if hasattr(dbm, 'get'):
200 value = dbm.get(key, NO_VALUE)
200 value = dbm.get(key, NO_VALUE)
201 else:
201 else:
202 # gdbm objects lack a .get method
202 # gdbm objects lack a .get method
203 try:
203 try:
204 value = dbm[key]
204 value = dbm[key]
205 except KeyError:
205 except KeyError:
206 value = NO_VALUE
206 value = NO_VALUE
207 if value is not NO_VALUE:
207 if value is not NO_VALUE:
208 value = self._loads(value)
208 value = self._loads(value)
209 return value
209 return value
210
210
211 def get(self, key):
211 def get(self, key):
212 try:
212 try:
213 return self._dbm_get(key)
213 return self._dbm_get(key)
214 except Exception:
214 except Exception:
215 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
215 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
216 raise
216 raise
217
217
218 def set(self, key, value):
218 def set(self, key, value):
219 with self._dbm_file(True) as dbm:
219 with self._dbm_file(True) as dbm:
220 dbm[key] = self._dumps(value)
220 dbm[key] = self._dumps(value)
221
221
222 def set_multi(self, mapping):
222 def set_multi(self, mapping):
223 with self._dbm_file(True) as dbm:
223 with self._dbm_file(True) as dbm:
224 for key, value in mapping.items():
224 for key, value in mapping.items():
225 dbm[key] = self._dumps(value)
225 dbm[key] = self._dumps(value)
226
226
227
227
228 class BaseRedisBackend(redis_backend.RedisBackend):
228 class BaseRedisBackend(redis_backend.RedisBackend):
229 key_prefix = ''
229 key_prefix = ''
230
230
231 def __init__(self, arguments):
231 def __init__(self, arguments):
232 super(BaseRedisBackend, self).__init__(arguments)
232 super(BaseRedisBackend, self).__init__(arguments)
233 self._lock_timeout = self.lock_timeout
233 self._lock_timeout = self.lock_timeout
234 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
234 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
235
235
236 if self._lock_auto_renewal and not self._lock_timeout:
236 if self._lock_auto_renewal and not self._lock_timeout:
237 # set default timeout for auto_renewal
237 # set default timeout for auto_renewal
238 self._lock_timeout = 30
238 self._lock_timeout = 30
239
239
240 def _create_client(self):
240 def _create_client(self):
241 args = {}
241 args = {}
242
242
243 if self.url is not None:
243 if self.url is not None:
244 args.update(url=self.url)
244 args.update(url=self.url)
245
245
246 else:
246 else:
247 args.update(
247 args.update(
248 host=self.host, password=self.password,
248 host=self.host, password=self.password,
249 port=self.port, db=self.db
249 port=self.port, db=self.db
250 )
250 )
251
251
252 connection_pool = redis.ConnectionPool(**args)
252 connection_pool = redis.ConnectionPool(**args)
253
253
254 return redis.StrictRedis(connection_pool=connection_pool)
254 return redis.StrictRedis(connection_pool=connection_pool)
255
255
256 def list_keys(self, prefix=''):
256 def list_keys(self, prefix=''):
257 prefix = '{}:{}*'.format(self.key_prefix, prefix)
257 prefix = '{}:{}*'.format(self.key_prefix, prefix)
258 return self.client.keys(prefix)
258 return self.client.keys(prefix)
259
259
260 def get_store(self):
260 def get_store(self):
261 return self.client.connection_pool
261 return self.client.connection_pool
262
262
263 def get(self, key):
263 def get(self, key):
264 value = self.client.get(key)
264 value = self.client.get(key)
265 if value is None:
265 if value is None:
266 return NO_VALUE
266 return NO_VALUE
267 return self._loads(value)
267 return self._loads(value)
268
268
269 def get_multi(self, keys):
269 def get_multi(self, keys):
270 if not keys:
270 if not keys:
271 return []
271 return []
272 values = self.client.mget(keys)
272 values = self.client.mget(keys)
273 loads = self._loads
273 loads = self._loads
274 return [
274 return [
275 loads(v) if v is not None else NO_VALUE
275 loads(v) if v is not None else NO_VALUE
276 for v in values]
276 for v in values]
277
277
278 def set(self, key, value):
278 def set(self, key, value):
279 if self.redis_expiration_time:
279 if self.redis_expiration_time:
280 self.client.setex(key, self.redis_expiration_time,
280 self.client.setex(key, self.redis_expiration_time,
281 self._dumps(value))
281 self._dumps(value))
282 else:
282 else:
283 self.client.set(key, self._dumps(value))
283 self.client.set(key, self._dumps(value))
284
284
285 def set_multi(self, mapping):
285 def set_multi(self, mapping):
286 dumps = self._dumps
286 dumps = self._dumps
287 mapping = dict(
287 mapping = dict(
288 (k, dumps(v))
288 (k, dumps(v))
289 for k, v in mapping.items()
289 for k, v in mapping.items()
290 )
290 )
291
291
292 if not self.redis_expiration_time:
292 if not self.redis_expiration_time:
293 self.client.mset(mapping)
293 self.client.mset(mapping)
294 else:
294 else:
295 pipe = self.client.pipeline()
295 pipe = self.client.pipeline()
296 for key, value in mapping.items():
296 for key, value in mapping.items():
297 pipe.setex(key, self.redis_expiration_time, value)
297 pipe.setex(key, self.redis_expiration_time, value)
298 pipe.execute()
298 pipe.execute()
299
299
300 def get_mutex(self, key):
300 def get_mutex(self, key):
301 if self.distributed_lock:
301 if self.distributed_lock:
302 lock_key = redis_backend.u('_lock_{0}').format(key)
302 lock_key = redis_backend.u('_lock_{0}').format(key)
303 log.debug('Trying to acquire Redis lock for key %s', lock_key)
304 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
303 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
305 auto_renewal=self._lock_auto_renewal)
304 auto_renewal=self._lock_auto_renewal)
306 else:
305 else:
307 return None
306 return None
308
307
309
308
310 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
309 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
311 key_prefix = 'redis_pickle_backend'
310 key_prefix = 'redis_pickle_backend'
312 pass
311 pass
313
312
314
313
315 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
314 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
316 key_prefix = 'redis_msgpack_backend'
315 key_prefix = 'redis_msgpack_backend'
317 pass
316 pass
318
317
319
318
320 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
319 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
321 import redis_lock
320 import redis_lock
322
321
323 class _RedisLockWrapper(object):
322 class _RedisLockWrapper(object):
324 """LockWrapper for redis_lock"""
323 """LockWrapper for redis_lock"""
325
324
326 @classmethod
325 @classmethod
327 def get_lock(cls):
326 def get_lock(cls):
328 return redis_lock.Lock(
327 return redis_lock.Lock(
329 redis_client=client,
328 redis_client=client,
330 name=lock_key,
329 name=lock_key,
331 expire=lock_timeout,
330 expire=lock_timeout,
332 auto_renewal=auto_renewal,
331 auto_renewal=auto_renewal,
333 strict=True,
332 strict=True,
334 )
333 )
335
334
335 def __repr__(self):
336 return "{}:{}".format(self.__class__.__name__, lock_key)
337
338 def __str__(self):
339 return "{}:{}".format(self.__class__.__name__, lock_key)
340
336 def __init__(self):
341 def __init__(self):
337 self.lock = self.get_lock()
342 self.lock = self.get_lock()
343 self.lock_key = lock_key
338
344
339 def acquire(self, wait=True):
345 def acquire(self, wait=True):
346 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
340 try:
347 try:
341 return self.lock.acquire(wait)
348 acquired = self.lock.acquire(wait)
349 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
350 return acquired
342 except redis_lock.AlreadyAcquired:
351 except redis_lock.AlreadyAcquired:
343 return False
352 return False
344 except redis_lock.AlreadyStarted:
353 except redis_lock.AlreadyStarted:
345 # refresh thread exists, but it also means we acquired the lock
354 # refresh thread exists, but it also means we acquired the lock
346 return True
355 return True
347
356
348 def release(self):
357 def release(self):
349 try:
358 try:
350 self.lock.release()
359 self.lock.release()
351 except redis_lock.NotAcquired:
360 except redis_lock.NotAcquired:
352 pass
361 pass
353
362
354 return _RedisLockWrapper()
363 return _RedisLockWrapper()
@@ -1,418 +1,422 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2020 RhodeCode GmbH
3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import time
21 import time
22 import logging
22 import logging
23 import functools
23 import functools
24 import threading
24 import threading
25
25
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27 from dogpile.cache.util import compat
27 from dogpile.cache.util import compat
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.utils import safe_str, sha1
30 from rhodecode.lib.utils import safe_str, sha1
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33
33
34 from rhodecode.lib.rc_cache import cache_key_meta
34 from rhodecode.lib.rc_cache import cache_key_meta
35 from rhodecode.lib.rc_cache import region_meta
35 from rhodecode.lib.rc_cache import region_meta
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 def isCython(func):
40 def isCython(func):
41 """
41 """
42 Private helper that checks if a function is a cython function.
42 Private helper that checks if a function is a cython function.
43 """
43 """
44 return func.__class__.__name__ == 'cython_function_or_method'
44 return func.__class__.__name__ == 'cython_function_or_method'
45
45
46
46
47 class RhodeCodeCacheRegion(CacheRegion):
47 class RhodeCodeCacheRegion(CacheRegion):
48
48
49 def conditional_cache_on_arguments(
49 def conditional_cache_on_arguments(
50 self, namespace=None,
50 self, namespace=None,
51 expiration_time=None,
51 expiration_time=None,
52 should_cache_fn=None,
52 should_cache_fn=None,
53 to_str=compat.string_type,
53 to_str=compat.string_type,
54 function_key_generator=None,
54 function_key_generator=None,
55 condition=True):
55 condition=True):
56 """
56 """
57 Custom conditional decorator, that will not touch any dogpile internals if
57 Custom conditional decorator, that will not touch any dogpile internals if
58 condition isn't meet. This works a bit different than should_cache_fn
58 condition isn't meet. This works a bit different than should_cache_fn
59 And it's faster in cases we don't ever want to compute cached values
59 And it's faster in cases we don't ever want to compute cached values
60 """
60 """
61 expiration_time_is_callable = compat.callable(expiration_time)
61 expiration_time_is_callable = compat.callable(expiration_time)
62
62
63 if function_key_generator is None:
63 if function_key_generator is None:
64 function_key_generator = self.function_key_generator
64 function_key_generator = self.function_key_generator
65
65
66 # workaround for py2 and cython problems, this block should be removed
66 # workaround for py2 and cython problems, this block should be removed
67 # once we've migrated to py3
67 # once we've migrated to py3
68 if 'cython' == 'cython':
68 if 'cython' == 'cython':
69 def decorator(fn):
69 def decorator(fn):
70 if to_str is compat.string_type:
70 if to_str is compat.string_type:
71 # backwards compatible
71 # backwards compatible
72 key_generator = function_key_generator(namespace, fn)
72 key_generator = function_key_generator(namespace, fn)
73 else:
73 else:
74 key_generator = function_key_generator(namespace, fn, to_str=to_str)
74 key_generator = function_key_generator(namespace, fn, to_str=to_str)
75
75
76 @functools.wraps(fn)
76 @functools.wraps(fn)
77 def decorate(*arg, **kw):
77 def decorate(*arg, **kw):
78 key = key_generator(*arg, **kw)
78 key = key_generator(*arg, **kw)
79
79
80 @functools.wraps(fn)
80 @functools.wraps(fn)
81 def creator():
81 def creator():
82 return fn(*arg, **kw)
82 return fn(*arg, **kw)
83
83
84 if not condition:
84 if not condition:
85 return creator()
85 return creator()
86
86
87 timeout = expiration_time() if expiration_time_is_callable \
87 timeout = expiration_time() if expiration_time_is_callable \
88 else expiration_time
88 else expiration_time
89
89
90 return self.get_or_create(key, creator, timeout, should_cache_fn)
90 return self.get_or_create(key, creator, timeout, should_cache_fn)
91
91
92 def invalidate(*arg, **kw):
92 def invalidate(*arg, **kw):
93 key = key_generator(*arg, **kw)
93 key = key_generator(*arg, **kw)
94 self.delete(key)
94 self.delete(key)
95
95
96 def set_(value, *arg, **kw):
96 def set_(value, *arg, **kw):
97 key = key_generator(*arg, **kw)
97 key = key_generator(*arg, **kw)
98 self.set(key, value)
98 self.set(key, value)
99
99
100 def get(*arg, **kw):
100 def get(*arg, **kw):
101 key = key_generator(*arg, **kw)
101 key = key_generator(*arg, **kw)
102 return self.get(key)
102 return self.get(key)
103
103
104 def refresh(*arg, **kw):
104 def refresh(*arg, **kw):
105 key = key_generator(*arg, **kw)
105 key = key_generator(*arg, **kw)
106 value = fn(*arg, **kw)
106 value = fn(*arg, **kw)
107 self.set(key, value)
107 self.set(key, value)
108 return value
108 return value
109
109
110 decorate.set = set_
110 decorate.set = set_
111 decorate.invalidate = invalidate
111 decorate.invalidate = invalidate
112 decorate.refresh = refresh
112 decorate.refresh = refresh
113 decorate.get = get
113 decorate.get = get
114 decorate.original = fn
114 decorate.original = fn
115 decorate.key_generator = key_generator
115 decorate.key_generator = key_generator
116 decorate.__wrapped__ = fn
116 decorate.__wrapped__ = fn
117
117
118 return decorate
118 return decorate
119 return decorator
119 return decorator
120
120
121 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
121 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
122
122
123 if not condition:
123 if not condition:
124 log.debug('Calling un-cached func:%s', user_func.func_name)
124 log.debug('Calling un-cached func:%s', user_func.func_name)
125 return user_func(*arg, **kw)
125 start = time.time()
126 result = user_func(*arg, **kw)
127 total = time.time() - start
128 log.debug('un-cached func:%s took %.4fs', user_func.func_name, total)
129 return result
126
130
127 key = key_generator(*arg, **kw)
131 key = key_generator(*arg, **kw)
128
132
129 timeout = expiration_time() if expiration_time_is_callable \
133 timeout = expiration_time() if expiration_time_is_callable \
130 else expiration_time
134 else expiration_time
131
135
132 log.debug('Calling cached fn:%s', user_func.func_name)
136 log.debug('Calling cached fn:%s', user_func.func_name)
133 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
137 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
134
138
135 def cache_decorator(user_func):
139 def cache_decorator(user_func):
136 if to_str is compat.string_type:
140 if to_str is compat.string_type:
137 # backwards compatible
141 # backwards compatible
138 key_generator = function_key_generator(namespace, user_func)
142 key_generator = function_key_generator(namespace, user_func)
139 else:
143 else:
140 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
144 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
141
145
142 def refresh(*arg, **kw):
146 def refresh(*arg, **kw):
143 """
147 """
144 Like invalidate, but regenerates the value instead
148 Like invalidate, but regenerates the value instead
145 """
149 """
146 key = key_generator(*arg, **kw)
150 key = key_generator(*arg, **kw)
147 value = user_func(*arg, **kw)
151 value = user_func(*arg, **kw)
148 self.set(key, value)
152 self.set(key, value)
149 return value
153 return value
150
154
151 def invalidate(*arg, **kw):
155 def invalidate(*arg, **kw):
152 key = key_generator(*arg, **kw)
156 key = key_generator(*arg, **kw)
153 self.delete(key)
157 self.delete(key)
154
158
155 def set_(value, *arg, **kw):
159 def set_(value, *arg, **kw):
156 key = key_generator(*arg, **kw)
160 key = key_generator(*arg, **kw)
157 self.set(key, value)
161 self.set(key, value)
158
162
159 def get(*arg, **kw):
163 def get(*arg, **kw):
160 key = key_generator(*arg, **kw)
164 key = key_generator(*arg, **kw)
161 return self.get(key)
165 return self.get(key)
162
166
163 user_func.set = set_
167 user_func.set = set_
164 user_func.invalidate = invalidate
168 user_func.invalidate = invalidate
165 user_func.get = get
169 user_func.get = get
166 user_func.refresh = refresh
170 user_func.refresh = refresh
167 user_func.key_generator = key_generator
171 user_func.key_generator = key_generator
168 user_func.original = user_func
172 user_func.original = user_func
169
173
170 # Use `decorate` to preserve the signature of :param:`user_func`.
174 # Use `decorate` to preserve the signature of :param:`user_func`.
171 return decorator.decorate(user_func, functools.partial(
175 return decorator.decorate(user_func, functools.partial(
172 get_or_create_for_user_func, key_generator))
176 get_or_create_for_user_func, key_generator))
173
177
174 return cache_decorator
178 return cache_decorator
175
179
176
180
177 def make_region(*arg, **kw):
181 def make_region(*arg, **kw):
178 return RhodeCodeCacheRegion(*arg, **kw)
182 return RhodeCodeCacheRegion(*arg, **kw)
179
183
180
184
181 def get_default_cache_settings(settings, prefixes=None):
185 def get_default_cache_settings(settings, prefixes=None):
182 prefixes = prefixes or []
186 prefixes = prefixes or []
183 cache_settings = {}
187 cache_settings = {}
184 for key in settings.keys():
188 for key in settings.keys():
185 for prefix in prefixes:
189 for prefix in prefixes:
186 if key.startswith(prefix):
190 if key.startswith(prefix):
187 name = key.split(prefix)[1].strip()
191 name = key.split(prefix)[1].strip()
188 val = settings[key]
192 val = settings[key]
189 if isinstance(val, compat.string_types):
193 if isinstance(val, compat.string_types):
190 val = val.strip()
194 val = val.strip()
191 cache_settings[name] = val
195 cache_settings[name] = val
192 return cache_settings
196 return cache_settings
193
197
194
198
195 def compute_key_from_params(*args):
199 def compute_key_from_params(*args):
196 """
200 """
197 Helper to compute key from given params to be used in cache manager
201 Helper to compute key from given params to be used in cache manager
198 """
202 """
199 return sha1("_".join(map(safe_str, args)))
203 return sha1("_".join(map(safe_str, args)))
200
204
201
205
202 def backend_key_generator(backend):
206 def backend_key_generator(backend):
203 """
207 """
204 Special wrapper that also sends over the backend to the key generator
208 Special wrapper that also sends over the backend to the key generator
205 """
209 """
206 def wrapper(namespace, fn):
210 def wrapper(namespace, fn):
207 return key_generator(backend, namespace, fn)
211 return key_generator(backend, namespace, fn)
208 return wrapper
212 return wrapper
209
213
210
214
211 def key_generator(backend, namespace, fn):
215 def key_generator(backend, namespace, fn):
212 fname = fn.__name__
216 fname = fn.__name__
213
217
214 def generate_key(*args):
218 def generate_key(*args):
215 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
219 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
216 namespace_pref = namespace or 'default_namespace'
220 namespace_pref = namespace or 'default_namespace'
217 arg_key = compute_key_from_params(*args)
221 arg_key = compute_key_from_params(*args)
218 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
222 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
219
223
220 return final_key
224 return final_key
221
225
222 return generate_key
226 return generate_key
223
227
224
228
225 def get_or_create_region(region_name, region_namespace=None):
229 def get_or_create_region(region_name, region_namespace=None):
226 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
230 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
227 region_obj = region_meta.dogpile_cache_regions.get(region_name)
231 region_obj = region_meta.dogpile_cache_regions.get(region_name)
228 if not region_obj:
232 if not region_obj:
229 raise EnvironmentError(
233 raise EnvironmentError(
230 'Region `{}` not in configured: {}.'.format(
234 'Region `{}` not in configured: {}.'.format(
231 region_name, region_meta.dogpile_cache_regions.keys()))
235 region_name, region_meta.dogpile_cache_regions.keys()))
232
236
233 region_uid_name = '{}:{}'.format(region_name, region_namespace)
237 region_uid_name = '{}:{}'.format(region_name, region_namespace)
234 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
238 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
235 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
239 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
236 if region_exist:
240 if region_exist:
237 log.debug('Using already configured region: %s', region_namespace)
241 log.debug('Using already configured region: %s', region_namespace)
238 return region_exist
242 return region_exist
239 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
243 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
240 expiration_time = region_obj.expiration_time
244 expiration_time = region_obj.expiration_time
241
245
242 if not os.path.isdir(cache_dir):
246 if not os.path.isdir(cache_dir):
243 os.makedirs(cache_dir)
247 os.makedirs(cache_dir)
244 new_region = make_region(
248 new_region = make_region(
245 name=region_uid_name,
249 name=region_uid_name,
246 function_key_generator=backend_key_generator(region_obj.actual_backend)
250 function_key_generator=backend_key_generator(region_obj.actual_backend)
247 )
251 )
248 namespace_filename = os.path.join(
252 namespace_filename = os.path.join(
249 cache_dir, "{}.cache.dbm".format(region_namespace))
253 cache_dir, "{}.cache.dbm".format(region_namespace))
250 # special type that allows 1db per namespace
254 # special type that allows 1db per namespace
251 new_region.configure(
255 new_region.configure(
252 backend='dogpile.cache.rc.file_namespace',
256 backend='dogpile.cache.rc.file_namespace',
253 expiration_time=expiration_time,
257 expiration_time=expiration_time,
254 arguments={"filename": namespace_filename}
258 arguments={"filename": namespace_filename}
255 )
259 )
256
260
257 # create and save in region caches
261 # create and save in region caches
258 log.debug('configuring new region: %s', region_uid_name)
262 log.debug('configuring new region: %s', region_uid_name)
259 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
263 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
260
264
261 return region_obj
265 return region_obj
262
266
263
267
264 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
268 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
265 region = get_or_create_region(cache_region, cache_namespace_uid)
269 region = get_or_create_region(cache_region, cache_namespace_uid)
266 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
270 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
267 num_delete_keys = len(cache_keys)
271 num_delete_keys = len(cache_keys)
268 if invalidate:
272 if invalidate:
269 region.invalidate(hard=False)
273 region.invalidate(hard=False)
270 else:
274 else:
271 if num_delete_keys:
275 if num_delete_keys:
272 region.delete_multi(cache_keys)
276 region.delete_multi(cache_keys)
273 return num_delete_keys
277 return num_delete_keys
274
278
275
279
276 class ActiveRegionCache(object):
280 class ActiveRegionCache(object):
277 def __init__(self, context, cache_data):
281 def __init__(self, context, cache_data):
278 self.context = context
282 self.context = context
279 self.cache_data = cache_data
283 self.cache_data = cache_data
280
284
281 def should_invalidate(self):
285 def should_invalidate(self):
282 return False
286 return False
283
287
284
288
285 class FreshRegionCache(object):
289 class FreshRegionCache(object):
286 def __init__(self, context, cache_data):
290 def __init__(self, context, cache_data):
287 self.context = context
291 self.context = context
288 self.cache_data = cache_data
292 self.cache_data = cache_data
289
293
290 def should_invalidate(self):
294 def should_invalidate(self):
291 return True
295 return True
292
296
293
297
294 class InvalidationContext(object):
298 class InvalidationContext(object):
295 """
299 """
296 usage::
300 usage::
297
301
298 from rhodecode.lib import rc_cache
302 from rhodecode.lib import rc_cache
299
303
300 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
304 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
301 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
305 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
302
306
303 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
307 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
304 def heavy_compute(cache_name, param1, param2):
308 def heavy_compute(cache_name, param1, param2):
305 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
309 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
306
310
307 # invalidation namespace is shared namespace key for all process caches
311 # invalidation namespace is shared namespace key for all process caches
308 # we use it to send a global signal
312 # we use it to send a global signal
309 invalidation_namespace = 'repo_cache:1'
313 invalidation_namespace = 'repo_cache:1'
310
314
311 inv_context_manager = rc_cache.InvalidationContext(
315 inv_context_manager = rc_cache.InvalidationContext(
312 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
316 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
313 with inv_context_manager as invalidation_context:
317 with inv_context_manager as invalidation_context:
314 args = ('one', 'two')
318 args = ('one', 'two')
315 # re-compute and store cache if we get invalidate signal
319 # re-compute and store cache if we get invalidate signal
316 if invalidation_context.should_invalidate():
320 if invalidation_context.should_invalidate():
317 result = heavy_compute.refresh(*args)
321 result = heavy_compute.refresh(*args)
318 else:
322 else:
319 result = heavy_compute(*args)
323 result = heavy_compute(*args)
320
324
321 compute_time = inv_context_manager.compute_time
325 compute_time = inv_context_manager.compute_time
322 log.debug('result computed in %.4fs', compute_time)
326 log.debug('result computed in %.4fs', compute_time)
323
327
324 # To send global invalidation signal, simply run
328 # To send global invalidation signal, simply run
325 CacheKey.set_invalidate(invalidation_namespace)
329 CacheKey.set_invalidate(invalidation_namespace)
326
330
327 """
331 """
328
332
329 def __repr__(self):
333 def __repr__(self):
330 return '<InvalidationContext:{}[{}]>'.format(
334 return '<InvalidationContext:{}[{}]>'.format(
331 safe_str(self.cache_key), safe_str(self.uid))
335 safe_str(self.cache_key), safe_str(self.uid))
332
336
333 def __init__(self, uid, invalidation_namespace='',
337 def __init__(self, uid, invalidation_namespace='',
334 raise_exception=False, thread_scoped=None):
338 raise_exception=False, thread_scoped=None):
335 self.uid = uid
339 self.uid = uid
336 self.invalidation_namespace = invalidation_namespace
340 self.invalidation_namespace = invalidation_namespace
337 self.raise_exception = raise_exception
341 self.raise_exception = raise_exception
338 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
342 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
339 self.thread_id = 'global'
343 self.thread_id = 'global'
340
344
341 if thread_scoped is None:
345 if thread_scoped is None:
342 # if we set "default" we can override this via .ini settings
346 # if we set "default" we can override this via .ini settings
343 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
347 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
344
348
345 # Append the thread id to the cache key if this invalidation context
349 # Append the thread id to the cache key if this invalidation context
346 # should be scoped to the current thread.
350 # should be scoped to the current thread.
347 if thread_scoped is True:
351 if thread_scoped is True:
348 self.thread_id = threading.current_thread().ident
352 self.thread_id = threading.current_thread().ident
349
353
350 self.cache_key = compute_key_from_params(uid)
354 self.cache_key = compute_key_from_params(uid)
351 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
355 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
352 self.proc_id, self.thread_id, self.cache_key)
356 self.proc_id, self.thread_id, self.cache_key)
353 self.compute_time = 0
357 self.compute_time = 0
354
358
355 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
359 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
356 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
360 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
357 # fetch all cache keys for this namespace and convert them to a map to find if we
361 # fetch all cache keys for this namespace and convert them to a map to find if we
358 # have specific cache_key object registered. We do this because we want to have
362 # have specific cache_key object registered. We do this because we want to have
359 # all consistent cache_state_uid for newly registered objects
363 # all consistent cache_state_uid for newly registered objects
360 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
364 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
361 cache_obj = cache_obj_map.get(self.cache_key)
365 cache_obj = cache_obj_map.get(self.cache_key)
362 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
366 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
363 if not cache_obj:
367 if not cache_obj:
364 new_cache_args = invalidation_namespace
368 new_cache_args = invalidation_namespace
365 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
369 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
366 cache_state_uid = None
370 cache_state_uid = None
367 if first_cache_obj:
371 if first_cache_obj:
368 cache_state_uid = first_cache_obj.cache_state_uid
372 cache_state_uid = first_cache_obj.cache_state_uid
369 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
373 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
370 cache_state_uid=cache_state_uid)
374 cache_state_uid=cache_state_uid)
371 cache_key_meta.cache_keys_by_pid.append(self.cache_key)
375 cache_key_meta.cache_keys_by_pid.append(self.cache_key)
372
376
373 return cache_obj
377 return cache_obj
374
378
375 def __enter__(self):
379 def __enter__(self):
376 """
380 """
377 Test if current object is valid, and return CacheRegion function
381 Test if current object is valid, and return CacheRegion function
378 that does invalidation and calculation
382 that does invalidation and calculation
379 """
383 """
380 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
384 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
381 # register or get a new key based on uid
385 # register or get a new key based on uid
382 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
386 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
383 cache_data = self.cache_obj.get_dict()
387 cache_data = self.cache_obj.get_dict()
384 self._start_time = time.time()
388 self._start_time = time.time()
385 if self.cache_obj.cache_active:
389 if self.cache_obj.cache_active:
386 # means our cache obj is existing and marked as it's
390 # means our cache obj is existing and marked as it's
387 # cache is not outdated, we return ActiveRegionCache
391 # cache is not outdated, we return ActiveRegionCache
388 self.skip_cache_active_change = True
392 self.skip_cache_active_change = True
389
393
390 return ActiveRegionCache(context=self, cache_data=cache_data)
394 return ActiveRegionCache(context=self, cache_data=cache_data)
391
395
392 # the key is either not existing or set to False, we return
396 # the key is either not existing or set to False, we return
393 # the real invalidator which re-computes value. We additionally set
397 # the real invalidator which re-computes value. We additionally set
394 # the flag to actually update the Database objects
398 # the flag to actually update the Database objects
395 self.skip_cache_active_change = False
399 self.skip_cache_active_change = False
396 return FreshRegionCache(context=self, cache_data=cache_data)
400 return FreshRegionCache(context=self, cache_data=cache_data)
397
401
398 def __exit__(self, exc_type, exc_val, exc_tb):
402 def __exit__(self, exc_type, exc_val, exc_tb):
399 # save compute time
403 # save compute time
400 self.compute_time = time.time() - self._start_time
404 self.compute_time = time.time() - self._start_time
401
405
402 if self.skip_cache_active_change:
406 if self.skip_cache_active_change:
403 return
407 return
404
408
405 try:
409 try:
406 self.cache_obj.cache_active = True
410 self.cache_obj.cache_active = True
407 Session().add(self.cache_obj)
411 Session().add(self.cache_obj)
408 Session().commit()
412 Session().commit()
409 except IntegrityError:
413 except IntegrityError:
410 # if we catch integrity error, it means we inserted this object
414 # if we catch integrity error, it means we inserted this object
411 # assumption is that's really an edge race-condition case and
415 # assumption is that's really an edge race-condition case and
412 # it's safe is to skip it
416 # it's safe is to skip it
413 Session().rollback()
417 Session().rollback()
414 except Exception:
418 except Exception:
415 log.exception('Failed to commit on cache key update')
419 log.exception('Failed to commit on cache key update')
416 Session().rollback()
420 Session().rollback()
417 if self.raise_exception:
421 if self.raise_exception:
418 raise
422 raise
@@ -1,797 +1,853 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2017-2020 RhodeCode GmbH
3 # Copyright (C) 2017-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import os
22 import os
23 import sys
23 import sys
24 import time
24 import time
25 import platform
25 import platform
26 import collections
26 import collections
27 from functools import wraps
28
27 import pkg_resources
29 import pkg_resources
28 import logging
30 import logging
29 import resource
31 import resource
30
32
31 from pyramid.compat import configparser
33 from pyramid.compat import configparser
32
34
33 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
34
36
35
37
36 psutil = None
38 psutil = None
37
39
38 try:
40 try:
39 # cygwin cannot have yet psutil support.
41 # cygwin cannot have yet psutil support.
40 import psutil as psutil
42 import psutil as psutil
41 except ImportError:
43 except ImportError:
42 pass
44 pass
43
45
44
46
45 _NA = 'NOT AVAILABLE'
47 _NA = 'NOT AVAILABLE'
46
48
47 STATE_OK = 'ok'
49 STATE_OK = 'ok'
48 STATE_ERR = 'error'
50 STATE_ERR = 'error'
49 STATE_WARN = 'warning'
51 STATE_WARN = 'warning'
50
52
51 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
53 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
52
54
53
55
56 registered_helpers = {}
57
58
59 def register_sysinfo(func):
60 """
61 @register_helper
62 def db_check():
63 pass
64
65 db_check == registered_helpers['db_check']
66 """
67 global registered_helpers
68 registered_helpers[func.__name__] = func
69
70 @wraps(func)
71 def _wrapper(*args, **kwargs):
72 return func(*args, **kwargs)
73 return _wrapper
74
75
54 # HELPERS
76 # HELPERS
55 def percentage(part, whole):
77 def percentage(part, whole):
56 whole = float(whole)
78 whole = float(whole)
57 if whole > 0:
79 if whole > 0:
58 return round(100 * float(part) / whole, 1)
80 return round(100 * float(part) / whole, 1)
59 return 0.0
81 return 0.0
60
82
61
83
62 def get_storage_size(storage_path):
84 def get_storage_size(storage_path):
63 sizes = []
85 sizes = []
64 for file_ in os.listdir(storage_path):
86 for file_ in os.listdir(storage_path):
65 storage_file = os.path.join(storage_path, file_)
87 storage_file = os.path.join(storage_path, file_)
66 if os.path.isfile(storage_file):
88 if os.path.isfile(storage_file):
67 try:
89 try:
68 sizes.append(os.path.getsize(storage_file))
90 sizes.append(os.path.getsize(storage_file))
69 except OSError:
91 except OSError:
70 log.exception('Failed to get size of storage file %s', storage_file)
92 log.exception('Failed to get size of storage file %s', storage_file)
71 pass
93 pass
72
94
73 return sum(sizes)
95 return sum(sizes)
74
96
75
97
76 def get_resource(resource_type):
98 def get_resource(resource_type):
77 try:
99 try:
78 return resource.getrlimit(resource_type)
100 return resource.getrlimit(resource_type)
79 except Exception:
101 except Exception:
80 return 'NOT_SUPPORTED'
102 return 'NOT_SUPPORTED'
81
103
82
104
83 def get_cert_path(ini_path):
105 def get_cert_path(ini_path):
84 default = '/etc/ssl/certs/ca-certificates.crt'
106 default = '/etc/ssl/certs/ca-certificates.crt'
85 control_ca_bundle = os.path.join(
107 control_ca_bundle = os.path.join(
86 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
108 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
87 '.rccontrol-profile/etc/ca-bundle.crt')
109 '.rccontrol-profile/etc/ca-bundle.crt')
88 if os.path.isfile(control_ca_bundle):
110 if os.path.isfile(control_ca_bundle):
89 default = control_ca_bundle
111 default = control_ca_bundle
90
112
91 return default
113 return default
92
114
93
115
94 class SysInfoRes(object):
116 class SysInfoRes(object):
95 def __init__(self, value, state=None, human_value=None):
117 def __init__(self, value, state=None, human_value=None):
96 self.value = value
118 self.value = value
97 self.state = state or STATE_OK_DEFAULT
119 self.state = state or STATE_OK_DEFAULT
98 self.human_value = human_value or value
120 self.human_value = human_value or value
99
121
100 def __json__(self):
122 def __json__(self):
101 return {
123 return {
102 'value': self.value,
124 'value': self.value,
103 'state': self.state,
125 'state': self.state,
104 'human_value': self.human_value,
126 'human_value': self.human_value,
105 }
127 }
106
128
107 def get_value(self):
129 def get_value(self):
108 return self.__json__()
130 return self.__json__()
109
131
110 def __str__(self):
132 def __str__(self):
111 return '<SysInfoRes({})>'.format(self.__json__())
133 return '<SysInfoRes({})>'.format(self.__json__())
112
134
113
135
114 class SysInfo(object):
136 class SysInfo(object):
115
137
116 def __init__(self, func_name, **kwargs):
138 def __init__(self, func_name, **kwargs):
117 self.func_name = func_name
139 self.func_name = func_name
118 self.value = _NA
140 self.value = _NA
119 self.state = None
141 self.state = None
120 self.kwargs = kwargs or {}
142 self.kwargs = kwargs or {}
121
143
122 def __call__(self):
144 def __call__(self):
123 computed = self.compute(**self.kwargs)
145 computed = self.compute(**self.kwargs)
124 if not isinstance(computed, SysInfoRes):
146 if not isinstance(computed, SysInfoRes):
125 raise ValueError(
147 raise ValueError(
126 'computed value for {} is not instance of '
148 'computed value for {} is not instance of '
127 '{}, got {} instead'.format(
149 '{}, got {} instead'.format(
128 self.func_name, SysInfoRes, type(computed)))
150 self.func_name, SysInfoRes, type(computed)))
129 return computed.__json__()
151 return computed.__json__()
130
152
131 def __str__(self):
153 def __str__(self):
132 return '<SysInfo({})>'.format(self.func_name)
154 return '<SysInfo({})>'.format(self.func_name)
133
155
134 def compute(self, **kwargs):
156 def compute(self, **kwargs):
135 return self.func_name(**kwargs)
157 return self.func_name(**kwargs)
136
158
137
159
138 # SysInfo functions
160 # SysInfo functions
161 @register_sysinfo
139 def python_info():
162 def python_info():
140 value = dict(version=' '.join(platform._sys_version()),
163 value = dict(version=' '.join(platform._sys_version()),
141 executable=sys.executable)
164 executable=sys.executable)
142 return SysInfoRes(value=value)
165 return SysInfoRes(value=value)
143
166
144
167
168 @register_sysinfo
145 def py_modules():
169 def py_modules():
146 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
170 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
147 for p in pkg_resources.working_set])
171 for p in pkg_resources.working_set])
148
172
149 value = sorted(mods.items(), key=lambda k: k[0].lower())
173 value = sorted(mods.items(), key=lambda k: k[0].lower())
150 return SysInfoRes(value=value)
174 return SysInfoRes(value=value)
151
175
152
176
177 @register_sysinfo
153 def platform_type():
178 def platform_type():
154 from rhodecode.lib.utils import safe_unicode, generate_platform_uuid
179 from rhodecode.lib.utils import safe_unicode, generate_platform_uuid
155
180
156 value = dict(
181 value = dict(
157 name=safe_unicode(platform.platform()),
182 name=safe_unicode(platform.platform()),
158 uuid=generate_platform_uuid()
183 uuid=generate_platform_uuid()
159 )
184 )
160 return SysInfoRes(value=value)
185 return SysInfoRes(value=value)
161
186
162
187
188 @register_sysinfo
163 def locale_info():
189 def locale_info():
164 import locale
190 import locale
165
191
166 value = dict(
192 value = dict(
167 locale_default=locale.getdefaultlocale(),
193 locale_default=locale.getdefaultlocale(),
168 locale_lc_all=locale.getlocale(locale.LC_ALL),
194 locale_lc_all=locale.getlocale(locale.LC_ALL),
169 lang_env=os.environ.get('LANG'),
195 lang_env=os.environ.get('LANG'),
170 lc_all_env=os.environ.get('LC_ALL'),
196 lc_all_env=os.environ.get('LC_ALL'),
171 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
197 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
172 )
198 )
173 human_value = 'LANG: {}, locale LC_ALL: {}, Default locales: {}'.format(
199 human_value = 'LANG: {}, locale LC_ALL: {}, Default locales: {}'.format(
174 value['lang_env'], value['locale_lc_all'], value['locale_default'])
200 value['lang_env'], value['locale_lc_all'], value['locale_default'])
175 return SysInfoRes(value=value, human_value=human_value)
201 return SysInfoRes(value=value, human_value=human_value)
176
202
177
203
204 @register_sysinfo
178 def ulimit_info():
205 def ulimit_info():
179 data = collections.OrderedDict([
206 data = collections.OrderedDict([
180 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
207 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
181 ('file size', get_resource(resource.RLIMIT_FSIZE)),
208 ('file size', get_resource(resource.RLIMIT_FSIZE)),
182 ('stack size', get_resource(resource.RLIMIT_STACK)),
209 ('stack size', get_resource(resource.RLIMIT_STACK)),
183 ('core file size', get_resource(resource.RLIMIT_CORE)),
210 ('core file size', get_resource(resource.RLIMIT_CORE)),
184 ('address space size', get_resource(resource.RLIMIT_AS)),
211 ('address space size', get_resource(resource.RLIMIT_AS)),
185 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
212 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
186 ('heap size', get_resource(resource.RLIMIT_DATA)),
213 ('heap size', get_resource(resource.RLIMIT_DATA)),
187 ('rss size', get_resource(resource.RLIMIT_RSS)),
214 ('rss size', get_resource(resource.RLIMIT_RSS)),
188 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
215 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
189 ('open files', get_resource(resource.RLIMIT_NOFILE)),
216 ('open files', get_resource(resource.RLIMIT_NOFILE)),
190 ])
217 ])
191
218
192 text = ', '.join('{}:{}'.format(k, v) for k, v in data.items())
219 text = ', '.join('{}:{}'.format(k, v) for k, v in data.items())
193
220
194 value = {
221 value = {
195 'limits': data,
222 'limits': data,
196 'text': text,
223 'text': text,
197 }
224 }
198 return SysInfoRes(value=value)
225 return SysInfoRes(value=value)
199
226
200
227
228 @register_sysinfo
201 def uptime():
229 def uptime():
202 from rhodecode.lib.helpers import age, time_to_datetime
230 from rhodecode.lib.helpers import age, time_to_datetime
203 from rhodecode.translation import TranslationString
231 from rhodecode.translation import TranslationString
204
232
205 value = dict(boot_time=0, uptime=0, text='')
233 value = dict(boot_time=0, uptime=0, text='')
206 state = STATE_OK_DEFAULT
234 state = STATE_OK_DEFAULT
207 if not psutil:
235 if not psutil:
208 return SysInfoRes(value=value, state=state)
236 return SysInfoRes(value=value, state=state)
209
237
210 boot_time = psutil.boot_time()
238 boot_time = psutil.boot_time()
211 value['boot_time'] = boot_time
239 value['boot_time'] = boot_time
212 value['uptime'] = time.time() - boot_time
240 value['uptime'] = time.time() - boot_time
213
241
214 date_or_age = age(time_to_datetime(boot_time))
242 date_or_age = age(time_to_datetime(boot_time))
215 if isinstance(date_or_age, TranslationString):
243 if isinstance(date_or_age, TranslationString):
216 date_or_age = date_or_age.interpolate()
244 date_or_age = date_or_age.interpolate()
217
245
218 human_value = value.copy()
246 human_value = value.copy()
219 human_value['boot_time'] = time_to_datetime(boot_time)
247 human_value['boot_time'] = time_to_datetime(boot_time)
220 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
248 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
221
249
222 human_value['text'] = u'Server started {}'.format(date_or_age)
250 human_value['text'] = u'Server started {}'.format(date_or_age)
223 return SysInfoRes(value=value, human_value=human_value)
251 return SysInfoRes(value=value, human_value=human_value)
224
252
225
253
254 @register_sysinfo
226 def memory():
255 def memory():
227 from rhodecode.lib.helpers import format_byte_size_binary
256 from rhodecode.lib.helpers import format_byte_size_binary
228 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
257 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
229 percent_used=0, free=0, inactive=0, active=0, shared=0,
258 percent_used=0, free=0, inactive=0, active=0, shared=0,
230 total=0, buffers=0, text='')
259 total=0, buffers=0, text='')
231
260
232 state = STATE_OK_DEFAULT
261 state = STATE_OK_DEFAULT
233 if not psutil:
262 if not psutil:
234 return SysInfoRes(value=value, state=state)
263 return SysInfoRes(value=value, state=state)
235
264
236 value.update(dict(psutil.virtual_memory()._asdict()))
265 value.update(dict(psutil.virtual_memory()._asdict()))
237 value['used_real'] = value['total'] - value['available']
266 value['used_real'] = value['total'] - value['available']
238 value['percent_used'] = psutil._common.usage_percent(
267 value['percent_used'] = psutil._common.usage_percent(
239 value['used_real'], value['total'], 1)
268 value['used_real'], value['total'], 1)
240
269
241 human_value = value.copy()
270 human_value = value.copy()
242 human_value['text'] = '%s/%s, %s%% used' % (
271 human_value['text'] = '%s/%s, %s%% used' % (
243 format_byte_size_binary(value['used_real']),
272 format_byte_size_binary(value['used_real']),
244 format_byte_size_binary(value['total']),
273 format_byte_size_binary(value['total']),
245 value['percent_used'],)
274 value['percent_used'],)
246
275
247 keys = value.keys()[::]
276 keys = value.keys()[::]
248 keys.pop(keys.index('percent'))
277 keys.pop(keys.index('percent'))
249 keys.pop(keys.index('percent_used'))
278 keys.pop(keys.index('percent_used'))
250 keys.pop(keys.index('text'))
279 keys.pop(keys.index('text'))
251 for k in keys:
280 for k in keys:
252 human_value[k] = format_byte_size_binary(value[k])
281 human_value[k] = format_byte_size_binary(value[k])
253
282
254 if state['type'] == STATE_OK and value['percent_used'] > 90:
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
255 msg = 'Critical: your available RAM memory is very low.'
284 msg = 'Critical: your available RAM memory is very low.'
256 state = {'message': msg, 'type': STATE_ERR}
285 state = {'message': msg, 'type': STATE_ERR}
257
286
258 elif state['type'] == STATE_OK and value['percent_used'] > 70:
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
259 msg = 'Warning: your available RAM memory is running low.'
288 msg = 'Warning: your available RAM memory is running low.'
260 state = {'message': msg, 'type': STATE_WARN}
289 state = {'message': msg, 'type': STATE_WARN}
261
290
262 return SysInfoRes(value=value, state=state, human_value=human_value)
291 return SysInfoRes(value=value, state=state, human_value=human_value)
263
292
264
293
294 @register_sysinfo
265 def machine_load():
295 def machine_load():
266 value = {'1_min': _NA, '5_min': _NA, '15_min': _NA, 'text': ''}
296 value = {'1_min': _NA, '5_min': _NA, '15_min': _NA, 'text': ''}
267 state = STATE_OK_DEFAULT
297 state = STATE_OK_DEFAULT
268 if not psutil:
298 if not psutil:
269 return SysInfoRes(value=value, state=state)
299 return SysInfoRes(value=value, state=state)
270
300
271 # load averages
301 # load averages
272 if hasattr(psutil.os, 'getloadavg'):
302 if hasattr(psutil.os, 'getloadavg'):
273 value.update(dict(
303 value.update(dict(
274 zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg())))
304 zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg())))
275
305
276 human_value = value.copy()
306 human_value = value.copy()
277 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
307 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
278 value['1_min'], value['5_min'], value['15_min'])
308 value['1_min'], value['5_min'], value['15_min'])
279
309
280 if state['type'] == STATE_OK and value['15_min'] > 5:
310 if state['type'] == STATE_OK and value['15_min'] > 5:
281 msg = 'Warning: your machine load is very high.'
311 msg = 'Warning: your machine load is very high.'
282 state = {'message': msg, 'type': STATE_WARN}
312 state = {'message': msg, 'type': STATE_WARN}
283
313
284 return SysInfoRes(value=value, state=state, human_value=human_value)
314 return SysInfoRes(value=value, state=state, human_value=human_value)
285
315
286
316
317 @register_sysinfo
287 def cpu():
318 def cpu():
288 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
319 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
289 state = STATE_OK_DEFAULT
320 state = STATE_OK_DEFAULT
290
321
291 if not psutil:
322 if not psutil:
292 return SysInfoRes(value=value, state=state)
323 return SysInfoRes(value=value, state=state)
293
324
294 value['cpu'] = psutil.cpu_percent(0.5)
325 value['cpu'] = psutil.cpu_percent(0.5)
295 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
326 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
296 value['cpu_count'] = psutil.cpu_count()
327 value['cpu_count'] = psutil.cpu_count()
297
328
298 human_value = value.copy()
329 human_value = value.copy()
299 human_value['text'] = '{} cores at {} %'.format(
330 human_value['text'] = '{} cores at {} %'.format(
300 value['cpu_count'], value['cpu'])
331 value['cpu_count'], value['cpu'])
301
332
302 return SysInfoRes(value=value, state=state, human_value=human_value)
333 return SysInfoRes(value=value, state=state, human_value=human_value)
303
334
304
335
336 @register_sysinfo
305 def storage():
337 def storage():
306 from rhodecode.lib.helpers import format_byte_size_binary
338 from rhodecode.lib.helpers import format_byte_size_binary
307 from rhodecode.model.settings import VcsSettingsModel
339 from rhodecode.model.settings import VcsSettingsModel
308 path = VcsSettingsModel().get_repos_location()
340 path = VcsSettingsModel().get_repos_location()
309
341
310 value = dict(percent=0, used=0, total=0, path=path, text='')
342 value = dict(percent=0, used=0, total=0, path=path, text='')
311 state = STATE_OK_DEFAULT
343 state = STATE_OK_DEFAULT
312 if not psutil:
344 if not psutil:
313 return SysInfoRes(value=value, state=state)
345 return SysInfoRes(value=value, state=state)
314
346
315 try:
347 try:
316 value.update(dict(psutil.disk_usage(path)._asdict()))
348 value.update(dict(psutil.disk_usage(path)._asdict()))
317 except Exception as e:
349 except Exception as e:
318 log.exception('Failed to fetch disk info')
350 log.exception('Failed to fetch disk info')
319 state = {'message': str(e), 'type': STATE_ERR}
351 state = {'message': str(e), 'type': STATE_ERR}
320
352
321 human_value = value.copy()
353 human_value = value.copy()
322 human_value['used'] = format_byte_size_binary(value['used'])
354 human_value['used'] = format_byte_size_binary(value['used'])
323 human_value['total'] = format_byte_size_binary(value['total'])
355 human_value['total'] = format_byte_size_binary(value['total'])
324 human_value['text'] = "{}/{}, {}% used".format(
356 human_value['text'] = "{}/{}, {}% used".format(
325 format_byte_size_binary(value['used']),
357 format_byte_size_binary(value['used']),
326 format_byte_size_binary(value['total']),
358 format_byte_size_binary(value['total']),
327 value['percent'])
359 value['percent'])
328
360
329 if state['type'] == STATE_OK and value['percent'] > 90:
361 if state['type'] == STATE_OK and value['percent'] > 90:
330 msg = 'Critical: your disk space is very low.'
362 msg = 'Critical: your disk space is very low.'
331 state = {'message': msg, 'type': STATE_ERR}
363 state = {'message': msg, 'type': STATE_ERR}
332
364
333 elif state['type'] == STATE_OK and value['percent'] > 70:
365 elif state['type'] == STATE_OK and value['percent'] > 70:
334 msg = 'Warning: your disk space is running low.'
366 msg = 'Warning: your disk space is running low.'
335 state = {'message': msg, 'type': STATE_WARN}
367 state = {'message': msg, 'type': STATE_WARN}
336
368
337 return SysInfoRes(value=value, state=state, human_value=human_value)
369 return SysInfoRes(value=value, state=state, human_value=human_value)
338
370
339
371
372 @register_sysinfo
340 def storage_inodes():
373 def storage_inodes():
341 from rhodecode.model.settings import VcsSettingsModel
374 from rhodecode.model.settings import VcsSettingsModel
342 path = VcsSettingsModel().get_repos_location()
375 path = VcsSettingsModel().get_repos_location()
343
376
344 value = dict(percent=0, free=0, used=0, total=0, path=path, text='')
377 value = dict(percent=0, free=0, used=0, total=0, path=path, text='')
345 state = STATE_OK_DEFAULT
378 state = STATE_OK_DEFAULT
346 if not psutil:
379 if not psutil:
347 return SysInfoRes(value=value, state=state)
380 return SysInfoRes(value=value, state=state)
348
381
349 try:
382 try:
350 i_stat = os.statvfs(path)
383 i_stat = os.statvfs(path)
351 value['free'] = i_stat.f_ffree
384 value['free'] = i_stat.f_ffree
352 value['used'] = i_stat.f_files-i_stat.f_favail
385 value['used'] = i_stat.f_files-i_stat.f_favail
353 value['total'] = i_stat.f_files
386 value['total'] = i_stat.f_files
354 value['percent'] = percentage(value['used'], value['total'])
387 value['percent'] = percentage(value['used'], value['total'])
355 except Exception as e:
388 except Exception as e:
356 log.exception('Failed to fetch disk inodes info')
389 log.exception('Failed to fetch disk inodes info')
357 state = {'message': str(e), 'type': STATE_ERR}
390 state = {'message': str(e), 'type': STATE_ERR}
358
391
359 human_value = value.copy()
392 human_value = value.copy()
360 human_value['text'] = "{}/{}, {}% used".format(
393 human_value['text'] = "{}/{}, {}% used".format(
361 value['used'], value['total'], value['percent'])
394 value['used'], value['total'], value['percent'])
362
395
363 if state['type'] == STATE_OK and value['percent'] > 90:
396 if state['type'] == STATE_OK and value['percent'] > 90:
364 msg = 'Critical: your disk free inodes are very low.'
397 msg = 'Critical: your disk free inodes are very low.'
365 state = {'message': msg, 'type': STATE_ERR}
398 state = {'message': msg, 'type': STATE_ERR}
366
399
367 elif state['type'] == STATE_OK and value['percent'] > 70:
400 elif state['type'] == STATE_OK and value['percent'] > 70:
368 msg = 'Warning: your disk free inodes are running low.'
401 msg = 'Warning: your disk free inodes are running low.'
369 state = {'message': msg, 'type': STATE_WARN}
402 state = {'message': msg, 'type': STATE_WARN}
370
403
371 return SysInfoRes(value=value, state=state, human_value=human_value)
404 return SysInfoRes(value=value, state=state, human_value=human_value)
372
405
373
406
407 @register_sysinfo
374 def storage_archives():
408 def storage_archives():
375 import rhodecode
409 import rhodecode
376 from rhodecode.lib.utils import safe_str
410 from rhodecode.lib.utils import safe_str
377 from rhodecode.lib.helpers import format_byte_size_binary
411 from rhodecode.lib.helpers import format_byte_size_binary
378
412
379 msg = 'Enable this by setting ' \
413 msg = 'Enable this by setting ' \
380 'archive_cache_dir=/path/to/cache option in the .ini file'
414 'archive_cache_dir=/path/to/cache option in the .ini file'
381 path = safe_str(rhodecode.CONFIG.get('archive_cache_dir', msg))
415 path = safe_str(rhodecode.CONFIG.get('archive_cache_dir', msg))
382
416
383 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
417 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
384 state = STATE_OK_DEFAULT
418 state = STATE_OK_DEFAULT
385 try:
419 try:
386 items_count = 0
420 items_count = 0
387 used = 0
421 used = 0
388 for root, dirs, files in os.walk(path):
422 for root, dirs, files in os.walk(path):
389 if root == path:
423 if root == path:
390 items_count = len(files)
424 items_count = len(files)
391
425
392 for f in files:
426 for f in files:
393 try:
427 try:
394 used += os.path.getsize(os.path.join(root, f))
428 used += os.path.getsize(os.path.join(root, f))
395 except OSError:
429 except OSError:
396 pass
430 pass
397 value.update({
431 value.update({
398 'percent': 100,
432 'percent': 100,
399 'used': used,
433 'used': used,
400 'total': used,
434 'total': used,
401 'items': items_count
435 'items': items_count
402 })
436 })
403
437
404 except Exception as e:
438 except Exception as e:
405 log.exception('failed to fetch archive cache storage')
439 log.exception('failed to fetch archive cache storage')
406 state = {'message': str(e), 'type': STATE_ERR}
440 state = {'message': str(e), 'type': STATE_ERR}
407
441
408 human_value = value.copy()
442 human_value = value.copy()
409 human_value['used'] = format_byte_size_binary(value['used'])
443 human_value['used'] = format_byte_size_binary(value['used'])
410 human_value['total'] = format_byte_size_binary(value['total'])
444 human_value['total'] = format_byte_size_binary(value['total'])
411 human_value['text'] = "{} ({} items)".format(
445 human_value['text'] = "{} ({} items)".format(
412 human_value['used'], value['items'])
446 human_value['used'], value['items'])
413
447
414 return SysInfoRes(value=value, state=state, human_value=human_value)
448 return SysInfoRes(value=value, state=state, human_value=human_value)
415
449
416
450
451 @register_sysinfo
417 def storage_gist():
452 def storage_gist():
418 from rhodecode.model.gist import GIST_STORE_LOC
453 from rhodecode.model.gist import GIST_STORE_LOC
419 from rhodecode.model.settings import VcsSettingsModel
454 from rhodecode.model.settings import VcsSettingsModel
420 from rhodecode.lib.utils import safe_str
455 from rhodecode.lib.utils import safe_str
421 from rhodecode.lib.helpers import format_byte_size_binary
456 from rhodecode.lib.helpers import format_byte_size_binary
422 path = safe_str(os.path.join(
457 path = safe_str(os.path.join(
423 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
458 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
424
459
425 # gist storage
460 # gist storage
426 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
461 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
427 state = STATE_OK_DEFAULT
462 state = STATE_OK_DEFAULT
428
463
429 try:
464 try:
430 items_count = 0
465 items_count = 0
431 used = 0
466 used = 0
432 for root, dirs, files in os.walk(path):
467 for root, dirs, files in os.walk(path):
433 if root == path:
468 if root == path:
434 items_count = len(dirs)
469 items_count = len(dirs)
435
470
436 for f in files:
471 for f in files:
437 try:
472 try:
438 used += os.path.getsize(os.path.join(root, f))
473 used += os.path.getsize(os.path.join(root, f))
439 except OSError:
474 except OSError:
440 pass
475 pass
441 value.update({
476 value.update({
442 'percent': 100,
477 'percent': 100,
443 'used': used,
478 'used': used,
444 'total': used,
479 'total': used,
445 'items': items_count
480 'items': items_count
446 })
481 })
447 except Exception as e:
482 except Exception as e:
448 log.exception('failed to fetch gist storage items')
483 log.exception('failed to fetch gist storage items')
449 state = {'message': str(e), 'type': STATE_ERR}
484 state = {'message': str(e), 'type': STATE_ERR}
450
485
451 human_value = value.copy()
486 human_value = value.copy()
452 human_value['used'] = format_byte_size_binary(value['used'])
487 human_value['used'] = format_byte_size_binary(value['used'])
453 human_value['total'] = format_byte_size_binary(value['total'])
488 human_value['total'] = format_byte_size_binary(value['total'])
454 human_value['text'] = "{} ({} items)".format(
489 human_value['text'] = "{} ({} items)".format(
455 human_value['used'], value['items'])
490 human_value['used'], value['items'])
456
491
457 return SysInfoRes(value=value, state=state, human_value=human_value)
492 return SysInfoRes(value=value, state=state, human_value=human_value)
458
493
459
494
495 @register_sysinfo
460 def storage_temp():
496 def storage_temp():
461 import tempfile
497 import tempfile
462 from rhodecode.lib.helpers import format_byte_size_binary
498 from rhodecode.lib.helpers import format_byte_size_binary
463
499
464 path = tempfile.gettempdir()
500 path = tempfile.gettempdir()
465 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
501 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
466 state = STATE_OK_DEFAULT
502 state = STATE_OK_DEFAULT
467
503
468 if not psutil:
504 if not psutil:
469 return SysInfoRes(value=value, state=state)
505 return SysInfoRes(value=value, state=state)
470
506
471 try:
507 try:
472 value.update(dict(psutil.disk_usage(path)._asdict()))
508 value.update(dict(psutil.disk_usage(path)._asdict()))
473 except Exception as e:
509 except Exception as e:
474 log.exception('Failed to fetch temp dir info')
510 log.exception('Failed to fetch temp dir info')
475 state = {'message': str(e), 'type': STATE_ERR}
511 state = {'message': str(e), 'type': STATE_ERR}
476
512
477 human_value = value.copy()
513 human_value = value.copy()
478 human_value['used'] = format_byte_size_binary(value['used'])
514 human_value['used'] = format_byte_size_binary(value['used'])
479 human_value['total'] = format_byte_size_binary(value['total'])
515 human_value['total'] = format_byte_size_binary(value['total'])
480 human_value['text'] = "{}/{}, {}% used".format(
516 human_value['text'] = "{}/{}, {}% used".format(
481 format_byte_size_binary(value['used']),
517 format_byte_size_binary(value['used']),
482 format_byte_size_binary(value['total']),
518 format_byte_size_binary(value['total']),
483 value['percent'])
519 value['percent'])
484
520
485 return SysInfoRes(value=value, state=state, human_value=human_value)
521 return SysInfoRes(value=value, state=state, human_value=human_value)
486
522
487
523
524 @register_sysinfo
488 def search_info():
525 def search_info():
489 import rhodecode
526 import rhodecode
490 from rhodecode.lib.index import searcher_from_config
527 from rhodecode.lib.index import searcher_from_config
491
528
492 backend = rhodecode.CONFIG.get('search.module', '')
529 backend = rhodecode.CONFIG.get('search.module', '')
493 location = rhodecode.CONFIG.get('search.location', '')
530 location = rhodecode.CONFIG.get('search.location', '')
494
531
495 try:
532 try:
496 searcher = searcher_from_config(rhodecode.CONFIG)
533 searcher = searcher_from_config(rhodecode.CONFIG)
497 searcher = searcher.__class__.__name__
534 searcher = searcher.__class__.__name__
498 except Exception:
535 except Exception:
499 searcher = None
536 searcher = None
500
537
501 value = dict(
538 value = dict(
502 backend=backend, searcher=searcher, location=location, text='')
539 backend=backend, searcher=searcher, location=location, text='')
503 state = STATE_OK_DEFAULT
540 state = STATE_OK_DEFAULT
504
541
505 human_value = value.copy()
542 human_value = value.copy()
506 human_value['text'] = "backend:`{}`".format(human_value['backend'])
543 human_value['text'] = "backend:`{}`".format(human_value['backend'])
507
544
508 return SysInfoRes(value=value, state=state, human_value=human_value)
545 return SysInfoRes(value=value, state=state, human_value=human_value)
509
546
510
547
548 @register_sysinfo
511 def git_info():
549 def git_info():
512 from rhodecode.lib.vcs.backends import git
550 from rhodecode.lib.vcs.backends import git
513 state = STATE_OK_DEFAULT
551 state = STATE_OK_DEFAULT
514 value = human_value = ''
552 value = human_value = ''
515 try:
553 try:
516 value = git.discover_git_version(raise_on_exc=True)
554 value = git.discover_git_version(raise_on_exc=True)
517 human_value = 'version reported from VCSServer: {}'.format(value)
555 human_value = 'version reported from VCSServer: {}'.format(value)
518 except Exception as e:
556 except Exception as e:
519 state = {'message': str(e), 'type': STATE_ERR}
557 state = {'message': str(e), 'type': STATE_ERR}
520
558
521 return SysInfoRes(value=value, state=state, human_value=human_value)
559 return SysInfoRes(value=value, state=state, human_value=human_value)
522
560
523
561
562 @register_sysinfo
524 def hg_info():
563 def hg_info():
525 from rhodecode.lib.vcs.backends import hg
564 from rhodecode.lib.vcs.backends import hg
526 state = STATE_OK_DEFAULT
565 state = STATE_OK_DEFAULT
527 value = human_value = ''
566 value = human_value = ''
528 try:
567 try:
529 value = hg.discover_hg_version(raise_on_exc=True)
568 value = hg.discover_hg_version(raise_on_exc=True)
530 human_value = 'version reported from VCSServer: {}'.format(value)
569 human_value = 'version reported from VCSServer: {}'.format(value)
531 except Exception as e:
570 except Exception as e:
532 state = {'message': str(e), 'type': STATE_ERR}
571 state = {'message': str(e), 'type': STATE_ERR}
533 return SysInfoRes(value=value, state=state, human_value=human_value)
572 return SysInfoRes(value=value, state=state, human_value=human_value)
534
573
535
574
575 @register_sysinfo
536 def svn_info():
576 def svn_info():
537 from rhodecode.lib.vcs.backends import svn
577 from rhodecode.lib.vcs.backends import svn
538 state = STATE_OK_DEFAULT
578 state = STATE_OK_DEFAULT
539 value = human_value = ''
579 value = human_value = ''
540 try:
580 try:
541 value = svn.discover_svn_version(raise_on_exc=True)
581 value = svn.discover_svn_version(raise_on_exc=True)
542 human_value = 'version reported from VCSServer: {}'.format(value)
582 human_value = 'version reported from VCSServer: {}'.format(value)
543 except Exception as e:
583 except Exception as e:
544 state = {'message': str(e), 'type': STATE_ERR}
584 state = {'message': str(e), 'type': STATE_ERR}
545 return SysInfoRes(value=value, state=state, human_value=human_value)
585 return SysInfoRes(value=value, state=state, human_value=human_value)
546
586
547
587
588 @register_sysinfo
548 def vcs_backends():
589 def vcs_backends():
549 import rhodecode
590 import rhodecode
550 value = rhodecode.CONFIG.get('vcs.backends')
591 value = rhodecode.CONFIG.get('vcs.backends')
551 human_value = 'Enabled backends in order: {}'.format(','.join(value))
592 human_value = 'Enabled backends in order: {}'.format(','.join(value))
552 return SysInfoRes(value=value, human_value=human_value)
593 return SysInfoRes(value=value, human_value=human_value)
553
594
554
595
596 @register_sysinfo
555 def vcs_server():
597 def vcs_server():
556 import rhodecode
598 import rhodecode
557 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
599 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
558
600
559 server_url = rhodecode.CONFIG.get('vcs.server')
601 server_url = rhodecode.CONFIG.get('vcs.server')
560 enabled = rhodecode.CONFIG.get('vcs.server.enable')
602 enabled = rhodecode.CONFIG.get('vcs.server.enable')
561 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
603 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
562 state = STATE_OK_DEFAULT
604 state = STATE_OK_DEFAULT
563 version = None
605 version = None
564 workers = 0
606 workers = 0
565
607
566 try:
608 try:
567 data = get_vcsserver_service_data()
609 data = get_vcsserver_service_data()
568 if data and 'version' in data:
610 if data and 'version' in data:
569 version = data['version']
611 version = data['version']
570
612
571 if data and 'config' in data:
613 if data and 'config' in data:
572 conf = data['config']
614 conf = data['config']
573 workers = conf.get('workers', 'NOT AVAILABLE')
615 workers = conf.get('workers', 'NOT AVAILABLE')
574
616
575 connection = 'connected'
617 connection = 'connected'
576 except Exception as e:
618 except Exception as e:
577 connection = 'failed'
619 connection = 'failed'
578 state = {'message': str(e), 'type': STATE_ERR}
620 state = {'message': str(e), 'type': STATE_ERR}
579
621
580 value = dict(
622 value = dict(
581 url=server_url,
623 url=server_url,
582 enabled=enabled,
624 enabled=enabled,
583 protocol=protocol,
625 protocol=protocol,
584 connection=connection,
626 connection=connection,
585 version=version,
627 version=version,
586 text='',
628 text='',
587 )
629 )
588
630
589 human_value = value.copy()
631 human_value = value.copy()
590 human_value['text'] = \
632 human_value['text'] = \
591 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
633 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
592 url=server_url, ver=version, workers=workers, mode=protocol,
634 url=server_url, ver=version, workers=workers, mode=protocol,
593 conn=connection)
635 conn=connection)
594
636
595 return SysInfoRes(value=value, state=state, human_value=human_value)
637 return SysInfoRes(value=value, state=state, human_value=human_value)
596
638
597
639
640 @register_sysinfo
598 def vcs_server_config():
641 def vcs_server_config():
599 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
642 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
600 state = STATE_OK_DEFAULT
643 state = STATE_OK_DEFAULT
601
644
602 value = {}
645 value = {}
603 try:
646 try:
604 data = get_vcsserver_service_data()
647 data = get_vcsserver_service_data()
605 value = data['app_config']
648 value = data['app_config']
606 except Exception as e:
649 except Exception as e:
607 state = {'message': str(e), 'type': STATE_ERR}
650 state = {'message': str(e), 'type': STATE_ERR}
608
651
609 human_value = value.copy()
652 human_value = value.copy()
610 human_value['text'] = 'VCS Server config'
653 human_value['text'] = 'VCS Server config'
611
654
612 return SysInfoRes(value=value, state=state, human_value=human_value)
655 return SysInfoRes(value=value, state=state, human_value=human_value)
613
656
614
657
658 @register_sysinfo
615 def rhodecode_app_info():
659 def rhodecode_app_info():
616 import rhodecode
660 import rhodecode
617 edition = rhodecode.CONFIG.get('rhodecode.edition')
661 edition = rhodecode.CONFIG.get('rhodecode.edition')
618
662
619 value = dict(
663 value = dict(
620 rhodecode_version=rhodecode.__version__,
664 rhodecode_version=rhodecode.__version__,
621 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
665 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
622 text=''
666 text=''
623 )
667 )
624 human_value = value.copy()
668 human_value = value.copy()
625 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
669 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
626 edition=edition, ver=value['rhodecode_version']
670 edition=edition, ver=value['rhodecode_version']
627 )
671 )
628 return SysInfoRes(value=value, human_value=human_value)
672 return SysInfoRes(value=value, human_value=human_value)
629
673
630
674
675 @register_sysinfo
631 def rhodecode_config():
676 def rhodecode_config():
632 import rhodecode
677 import rhodecode
633 path = rhodecode.CONFIG.get('__file__')
678 path = rhodecode.CONFIG.get('__file__')
634 rhodecode_ini_safe = rhodecode.CONFIG.copy()
679 rhodecode_ini_safe = rhodecode.CONFIG.copy()
635 cert_path = get_cert_path(path)
680 cert_path = get_cert_path(path)
636
681
637 try:
682 try:
638 config = configparser.ConfigParser()
683 config = configparser.ConfigParser()
639 config.read(path)
684 config.read(path)
640 parsed_ini = config
685 parsed_ini = config
641 if parsed_ini.has_section('server:main'):
686 if parsed_ini.has_section('server:main'):
642 parsed_ini = dict(parsed_ini.items('server:main'))
687 parsed_ini = dict(parsed_ini.items('server:main'))
643 except Exception:
688 except Exception:
644 log.exception('Failed to read .ini file for display')
689 log.exception('Failed to read .ini file for display')
645 parsed_ini = {}
690 parsed_ini = {}
646
691
647 rhodecode_ini_safe['server:main'] = parsed_ini
692 rhodecode_ini_safe['server:main'] = parsed_ini
648
693
649 blacklist = [
694 blacklist = [
650 'rhodecode_license_key',
695 'rhodecode_license_key',
651 'routes.map',
696 'routes.map',
652 'sqlalchemy.db1.url',
697 'sqlalchemy.db1.url',
653 'channelstream.secret',
698 'channelstream.secret',
654 'beaker.session.secret',
699 'beaker.session.secret',
655 'rhodecode.encrypted_values.secret',
700 'rhodecode.encrypted_values.secret',
656 'rhodecode_auth_github_consumer_key',
701 'rhodecode_auth_github_consumer_key',
657 'rhodecode_auth_github_consumer_secret',
702 'rhodecode_auth_github_consumer_secret',
658 'rhodecode_auth_google_consumer_key',
703 'rhodecode_auth_google_consumer_key',
659 'rhodecode_auth_google_consumer_secret',
704 'rhodecode_auth_google_consumer_secret',
660 'rhodecode_auth_bitbucket_consumer_secret',
705 'rhodecode_auth_bitbucket_consumer_secret',
661 'rhodecode_auth_bitbucket_consumer_key',
706 'rhodecode_auth_bitbucket_consumer_key',
662 'rhodecode_auth_twitter_consumer_secret',
707 'rhodecode_auth_twitter_consumer_secret',
663 'rhodecode_auth_twitter_consumer_key',
708 'rhodecode_auth_twitter_consumer_key',
664
709
665 'rhodecode_auth_twitter_secret',
710 'rhodecode_auth_twitter_secret',
666 'rhodecode_auth_github_secret',
711 'rhodecode_auth_github_secret',
667 'rhodecode_auth_google_secret',
712 'rhodecode_auth_google_secret',
668 'rhodecode_auth_bitbucket_secret',
713 'rhodecode_auth_bitbucket_secret',
669
714
670 'appenlight.api_key',
715 'appenlight.api_key',
671 ('app_conf', 'sqlalchemy.db1.url')
716 ('app_conf', 'sqlalchemy.db1.url')
672 ]
717 ]
673 for k in blacklist:
718 for k in blacklist:
674 if isinstance(k, tuple):
719 if isinstance(k, tuple):
675 section, key = k
720 section, key = k
676 if section in rhodecode_ini_safe:
721 if section in rhodecode_ini_safe:
677 rhodecode_ini_safe[section] = '**OBFUSCATED**'
722 rhodecode_ini_safe[section] = '**OBFUSCATED**'
678 else:
723 else:
679 rhodecode_ini_safe.pop(k, None)
724 rhodecode_ini_safe.pop(k, None)
680
725
681 # TODO: maybe put some CONFIG checks here ?
726 # TODO: maybe put some CONFIG checks here ?
682 return SysInfoRes(value={'config': rhodecode_ini_safe,
727 return SysInfoRes(value={'config': rhodecode_ini_safe,
683 'path': path, 'cert_path': cert_path})
728 'path': path, 'cert_path': cert_path})
684
729
685
730
731 @register_sysinfo
686 def database_info():
732 def database_info():
687 import rhodecode
733 import rhodecode
688 from sqlalchemy.engine import url as engine_url
734 from sqlalchemy.engine import url as engine_url
689 from rhodecode.model.meta import Base as sql_base, Session
735 from rhodecode.model.meta import Base as sql_base, Session
690 from rhodecode.model.db import DbMigrateVersion
736 from rhodecode.model.db import DbMigrateVersion
691
737
692 state = STATE_OK_DEFAULT
738 state = STATE_OK_DEFAULT
693
739
694 db_migrate = DbMigrateVersion.query().filter(
740 db_migrate = DbMigrateVersion.query().filter(
695 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
741 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
696
742
697 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
743 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
698
744
699 try:
745 try:
700 engine = sql_base.metadata.bind
746 engine = sql_base.metadata.bind
701 db_server_info = engine.dialect._get_server_version_info(
747 db_server_info = engine.dialect._get_server_version_info(
702 Session.connection(bind=engine))
748 Session.connection(bind=engine))
703 db_version = '.'.join(map(str, db_server_info))
749 db_version = '.'.join(map(str, db_server_info))
704 except Exception:
750 except Exception:
705 log.exception('failed to fetch db version')
751 log.exception('failed to fetch db version')
706 db_version = 'UNKNOWN'
752 db_version = 'UNKNOWN'
707
753
708 db_info = dict(
754 db_info = dict(
709 migrate_version=db_migrate.version,
755 migrate_version=db_migrate.version,
710 type=db_url_obj.get_backend_name(),
756 type=db_url_obj.get_backend_name(),
711 version=db_version,
757 version=db_version,
712 url=repr(db_url_obj)
758 url=repr(db_url_obj)
713 )
759 )
714 current_version = db_migrate.version
760 current_version = db_migrate.version
715 expected_version = rhodecode.__dbversion__
761 expected_version = rhodecode.__dbversion__
716 if state['type'] == STATE_OK and current_version != expected_version:
762 if state['type'] == STATE_OK and current_version != expected_version:
717 msg = 'Critical: database schema mismatch, ' \
763 msg = 'Critical: database schema mismatch, ' \
718 'expected version {}, got {}. ' \
764 'expected version {}, got {}. ' \
719 'Please run migrations on your database.'.format(
765 'Please run migrations on your database.'.format(
720 expected_version, current_version)
766 expected_version, current_version)
721 state = {'message': msg, 'type': STATE_ERR}
767 state = {'message': msg, 'type': STATE_ERR}
722
768
723 human_value = db_info.copy()
769 human_value = db_info.copy()
724 human_value['url'] = "{} @ migration version: {}".format(
770 human_value['url'] = "{} @ migration version: {}".format(
725 db_info['url'], db_info['migrate_version'])
771 db_info['url'], db_info['migrate_version'])
726 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
772 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
727 return SysInfoRes(value=db_info, state=state, human_value=human_value)
773 return SysInfoRes(value=db_info, state=state, human_value=human_value)
728
774
729
775
776 @register_sysinfo
730 def server_info(environ):
777 def server_info(environ):
731 import rhodecode
778 import rhodecode
732 from rhodecode.lib.base import get_server_ip_addr, get_server_port
779 from rhodecode.lib.base import get_server_ip_addr, get_server_port
733
780
734 value = {
781 value = {
735 'server_ip': '%s:%s' % (
782 'server_ip': '%s:%s' % (
736 get_server_ip_addr(environ, log_errors=False),
783 get_server_ip_addr(environ, log_errors=False),
737 get_server_port(environ)
784 get_server_port(environ)
738 ),
785 ),
739 'server_id': rhodecode.CONFIG.get('instance_id'),
786 'server_id': rhodecode.CONFIG.get('instance_id'),
740 }
787 }
741 return SysInfoRes(value=value)
788 return SysInfoRes(value=value)
742
789
743
790
791 @register_sysinfo
744 def usage_info():
792 def usage_info():
745 from rhodecode.model.db import User, Repository
793 from rhodecode.model.db import User, Repository
746 value = {
794 value = {
747 'users': User.query().count(),
795 'users': User.query().count(),
748 'users_active': User.query().filter(User.active == True).count(),
796 'users_active': User.query().filter(User.active == True).count(),
749 'repositories': Repository.query().count(),
797 'repositories': Repository.query().count(),
750 'repository_types': {
798 'repository_types': {
751 'hg': Repository.query().filter(
799 'hg': Repository.query().filter(
752 Repository.repo_type == 'hg').count(),
800 Repository.repo_type == 'hg').count(),
753 'git': Repository.query().filter(
801 'git': Repository.query().filter(
754 Repository.repo_type == 'git').count(),
802 Repository.repo_type == 'git').count(),
755 'svn': Repository.query().filter(
803 'svn': Repository.query().filter(
756 Repository.repo_type == 'svn').count(),
804 Repository.repo_type == 'svn').count(),
757 },
805 },
758 }
806 }
759 return SysInfoRes(value=value)
807 return SysInfoRes(value=value)
760
808
761
809
762 def get_system_info(environ):
810 def get_system_info(environ):
763 environ = environ or {}
811 environ = environ or {}
764 return {
812 return {
765 'rhodecode_app': SysInfo(rhodecode_app_info)(),
813 'rhodecode_app': SysInfo(rhodecode_app_info)(),
766 'rhodecode_config': SysInfo(rhodecode_config)(),
814 'rhodecode_config': SysInfo(rhodecode_config)(),
767 'rhodecode_usage': SysInfo(usage_info)(),
815 'rhodecode_usage': SysInfo(usage_info)(),
768 'python': SysInfo(python_info)(),
816 'python': SysInfo(python_info)(),
769 'py_modules': SysInfo(py_modules)(),
817 'py_modules': SysInfo(py_modules)(),
770
818
771 'platform': SysInfo(platform_type)(),
819 'platform': SysInfo(platform_type)(),
772 'locale': SysInfo(locale_info)(),
820 'locale': SysInfo(locale_info)(),
773 'server': SysInfo(server_info, environ=environ)(),
821 'server': SysInfo(server_info, environ=environ)(),
774 'database': SysInfo(database_info)(),
822 'database': SysInfo(database_info)(),
775 'ulimit': SysInfo(ulimit_info)(),
823 'ulimit': SysInfo(ulimit_info)(),
776 'storage': SysInfo(storage)(),
824 'storage': SysInfo(storage)(),
777 'storage_inodes': SysInfo(storage_inodes)(),
825 'storage_inodes': SysInfo(storage_inodes)(),
778 'storage_archive': SysInfo(storage_archives)(),
826 'storage_archive': SysInfo(storage_archives)(),
779 'storage_gist': SysInfo(storage_gist)(),
827 'storage_gist': SysInfo(storage_gist)(),
780 'storage_temp': SysInfo(storage_temp)(),
828 'storage_temp': SysInfo(storage_temp)(),
781
829
782 'search': SysInfo(search_info)(),
830 'search': SysInfo(search_info)(),
783
831
784 'uptime': SysInfo(uptime)(),
832 'uptime': SysInfo(uptime)(),
785 'load': SysInfo(machine_load)(),
833 'load': SysInfo(machine_load)(),
786 'cpu': SysInfo(cpu)(),
834 'cpu': SysInfo(cpu)(),
787 'memory': SysInfo(memory)(),
835 'memory': SysInfo(memory)(),
788
836
789 'vcs_backends': SysInfo(vcs_backends)(),
837 'vcs_backends': SysInfo(vcs_backends)(),
790 'vcs_server': SysInfo(vcs_server)(),
838 'vcs_server': SysInfo(vcs_server)(),
791
839
792 'vcs_server_config': SysInfo(vcs_server_config)(),
840 'vcs_server_config': SysInfo(vcs_server_config)(),
793
841
794 'git': SysInfo(git_info)(),
842 'git': SysInfo(git_info)(),
795 'hg': SysInfo(hg_info)(),
843 'hg': SysInfo(hg_info)(),
796 'svn': SysInfo(svn_info)(),
844 'svn': SysInfo(svn_info)(),
797 }
845 }
846
847
848 def load_system_info(key):
849 """
850 get_sys_info('vcs_server')
851 get_sys_info('database')
852 """
853 return SysInfo(registered_helpers[key])()
General Comments 0
You need to be logged in to leave comments. Login now