##// END OF EJS Templates
fix(svn): svn events fixes
super-admin -
r1261:0f8db01d default
parent child Browse files
Show More
@@ -0,0 +1,111 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 import redis
20
21 from ..lib import rc_cache
22 from ..lib.ext_json import json
23
24
25 log = logging.getLogger(__name__)
26
27 redis_client = None
28
29
30 class RedisTxnClient:
31
32 def __init__(self, url):
33 self.url = url
34 self._create_client(url)
35
36 def _create_client(self, url):
37 connection_pool = redis.ConnectionPool.from_url(url)
38 self.writer_client = redis.StrictRedis(
39 connection_pool=connection_pool
40 )
41 self.reader_client = self.writer_client
42
43 def set(self, key, value):
44 self.writer_client.set(key, value)
45
46 def get(self, key):
47 return self.reader_client.get(key)
48
49 def delete(self, key):
50 self.writer_client.delete(key)
51
52
53 def get_redis_client(url=''):
54
55 global redis_client
56 if redis_client is not None:
57 return redis_client
58 if not url:
59 from vcsserver import CONFIG
60 url = CONFIG['vcs.svn.redis_conn']
61 redis_client = RedisTxnClient(url)
62 return redis_client
63
64
65 def get_txn_id_data_key(repo_path, svn_txn_id):
66 log.debug('svn-txn-id: %s, obtaining data path', svn_txn_id)
67 repo_key = rc_cache.utils.compute_key_from_params(repo_path)
68 final_key = f'{repo_key}.{svn_txn_id}.svn_txn_id'
69 log.debug('computed final key: %s', final_key)
70
71 return final_key
72
73
74 def store_txn_id_data(repo_path, svn_txn_id, data_dict):
75 log.debug('svn-txn-id: %s, storing data', svn_txn_id)
76
77 if not svn_txn_id:
78 log.warning('Cannot store txn_id because it is empty')
79 return
80
81 redis_conn = get_redis_client()
82
83 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
84 store_data = json.dumps(data_dict)
85 redis_conn.set(store_key, store_data)
86
87
88 def get_txn_id_from_store(repo_path, svn_txn_id, rm_on_read=False):
89 """
90 Reads txn_id from store and if present returns the data for callback manager
91 """
92 log.debug('svn-txn-id: %s, retrieving data', svn_txn_id)
93 redis_conn = get_redis_client()
94
95 store_key = get_txn_id_data_key(repo_path, svn_txn_id)
96 data = {}
97 redis_conn.get(store_key)
98 raw_data = 'not-set'
99 try:
100 raw_data = redis_conn.get(store_key)
101 if not raw_data:
102 raise ValueError(f'Failed to get txn_id metadata, from store: {store_key}')
103 data = json.loads(raw_data)
104 except Exception:
105 log.exception('Failed to get txn_id metadata: %s', raw_data)
106
107 if rm_on_read:
108 log.debug('Cleaning up txn_id at %s', store_key)
109 redis_conn.delete(store_key)
110
111 return data
@@ -1,187 +1,191 b''
1 #
1 #
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 ; #################################
6
6
7 [server:main]
7 [server:main]
8 ; COMMON HOST/IP CONFIG
8 ; COMMON HOST/IP CONFIG
9 host = 0.0.0.0
9 host = 0.0.0.0
10 port = 10010
10 port = 10010
11
11
12
12
13 ; ###########################
13 ; ###########################
14 ; GUNICORN APPLICATION SERVER
14 ; GUNICORN APPLICATION SERVER
15 ; ###########################
15 ; ###########################
16
16
17 ; run with gunicorn --paste rhodecode.ini
17 ; run with gunicorn --paste rhodecode.ini
18
18
19 ; Module to use, this setting shouldn't be changed
19 ; Module to use, this setting shouldn't be changed
20 use = egg:gunicorn#main
20 use = egg:gunicorn#main
21
21
22 [app:main]
22 [app:main]
23 ; The %(here)s variable will be replaced with the absolute path of parent directory
23 ; The %(here)s variable will be replaced with the absolute path of parent directory
24 ; of this file
24 ; of this file
25 ; Each option in the app:main can be override by an environmental variable
25 ; Each option in the app:main can be override by an environmental variable
26 ;
26 ;
27 ;To override an option:
27 ;To override an option:
28 ;
28 ;
29 ;RC_<KeyName>
29 ;RC_<KeyName>
30 ;Everything should be uppercase, . and - should be replaced by _.
30 ;Everything should be uppercase, . and - should be replaced by _.
31 ;For example, if you have these configuration settings:
31 ;For example, if you have these configuration settings:
32 ;rc_cache.repo_object.backend = foo
32 ;rc_cache.repo_object.backend = foo
33 ;can be overridden by
33 ;can be overridden by
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
35
35
36 use = egg:rhodecode-vcsserver
36 use = egg:rhodecode-vcsserver
37
37
38
38
39 ; #############
39 ; #############
40 ; DEBUG OPTIONS
40 ; DEBUG OPTIONS
41 ; #############
41 ; #############
42
42
43 # During development the we want to have the debug toolbar enabled
43 # During development the we want to have the debug toolbar enabled
44 pyramid.includes =
44 pyramid.includes =
45 pyramid_debugtoolbar
45 pyramid_debugtoolbar
46
46
47 debugtoolbar.hosts = 0.0.0.0/0
47 debugtoolbar.hosts = 0.0.0.0/0
48 debugtoolbar.exclude_prefixes =
48 debugtoolbar.exclude_prefixes =
49 /css
49 /css
50 /fonts
50 /fonts
51 /images
51 /images
52 /js
52 /js
53
53
54 ; #################
54 ; #################
55 ; END DEBUG OPTIONS
55 ; END DEBUG OPTIONS
56 ; #################
56 ; #################
57
57
58 ; Pyramid default locales, we need this to be set
58 ; Pyramid default locales, we need this to be set
59 #pyramid.default_locale_name = en
59 #pyramid.default_locale_name = en
60
60
61 ; default locale used by VCS systems
61 ; default locale used by VCS systems
62 #locale = en_US.UTF-8
62 #locale = en_US.UTF-8
63
63
64 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
64 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
65 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
65 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
66 ; or /usr/local/bin/rhodecode_bin/vcs_bin
66 ; or /usr/local/bin/rhodecode_bin/vcs_bin
67 core.binary_dir =
67 core.binary_dir =
68
68
69 ; Redis connection settings for svn integrations logic
70 ; This connection string needs to be the same on ce and vcsserver
71 vcs.svn.redis_conn = redis://redis:6379/0
72
69 ; Custom exception store path, defaults to TMPDIR
73 ; Custom exception store path, defaults to TMPDIR
70 ; This is used to store exception from RhodeCode in shared directory
74 ; This is used to store exception from RhodeCode in shared directory
71 #exception_tracker.store_path =
75 #exception_tracker.store_path =
72
76
73 ; #############
77 ; #############
74 ; DOGPILE CACHE
78 ; DOGPILE CACHE
75 ; #############
79 ; #############
76
80
77 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
81 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
78 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
82 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
79 #cache_dir = %(here)s/data
83 #cache_dir = %(here)s/data
80
84
81 ; ***************************************
85 ; ***************************************
82 ; `repo_object` cache, default file based
86 ; `repo_object` cache, default file based
83 ; ***************************************
87 ; ***************************************
84
88
85 ; `repo_object` cache settings for vcs methods for repositories
89 ; `repo_object` cache settings for vcs methods for repositories
86 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
90 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
87
91
88 ; cache auto-expires after N seconds
92 ; cache auto-expires after N seconds
89 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
93 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
90 #rc_cache.repo_object.expiration_time = 2592000
94 #rc_cache.repo_object.expiration_time = 2592000
91
95
92 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
96 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
93 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
97 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
94
98
95 ; ***********************************************************
99 ; ***********************************************************
96 ; `repo_object` cache with redis backend
100 ; `repo_object` cache with redis backend
97 ; recommended for larger instance, and for better performance
101 ; recommended for larger instance, and for better performance
98 ; ***********************************************************
102 ; ***********************************************************
99
103
100 ; `repo_object` cache settings for vcs methods for repositories
104 ; `repo_object` cache settings for vcs methods for repositories
101 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
105 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
102
106
103 ; cache auto-expires after N seconds
107 ; cache auto-expires after N seconds
104 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
108 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
105 #rc_cache.repo_object.expiration_time = 2592000
109 #rc_cache.repo_object.expiration_time = 2592000
106
110
107 ; redis_expiration_time needs to be greater then expiration_time
111 ; redis_expiration_time needs to be greater then expiration_time
108 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
112 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
109
113
110 #rc_cache.repo_object.arguments.host = localhost
114 #rc_cache.repo_object.arguments.host = localhost
111 #rc_cache.repo_object.arguments.port = 6379
115 #rc_cache.repo_object.arguments.port = 6379
112 #rc_cache.repo_object.arguments.db = 5
116 #rc_cache.repo_object.arguments.db = 5
113 #rc_cache.repo_object.arguments.socket_timeout = 30
117 #rc_cache.repo_object.arguments.socket_timeout = 30
114 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
118 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
115 #rc_cache.repo_object.arguments.distributed_lock = true
119 #rc_cache.repo_object.arguments.distributed_lock = true
116
120
117 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
121 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
118 #rc_cache.repo_object.arguments.lock_auto_renewal = true
122 #rc_cache.repo_object.arguments.lock_auto_renewal = true
119
123
120 ; Statsd client config, this is used to send metrics to statsd
124 ; Statsd client config, this is used to send metrics to statsd
121 ; We recommend setting statsd_exported and scrape them using Promethues
125 ; We recommend setting statsd_exported and scrape them using Promethues
122 #statsd.enabled = false
126 #statsd.enabled = false
123 #statsd.statsd_host = 0.0.0.0
127 #statsd.statsd_host = 0.0.0.0
124 #statsd.statsd_port = 8125
128 #statsd.statsd_port = 8125
125 #statsd.statsd_prefix =
129 #statsd.statsd_prefix =
126 #statsd.statsd_ipv6 = false
130 #statsd.statsd_ipv6 = false
127
131
128 ; configure logging automatically at server startup set to false
132 ; configure logging automatically at server startup set to false
129 ; to use the below custom logging config.
133 ; to use the below custom logging config.
130 ; RC_LOGGING_FORMATTER
134 ; RC_LOGGING_FORMATTER
131 ; RC_LOGGING_LEVEL
135 ; RC_LOGGING_LEVEL
132 ; env variables can control the settings for logging in case of autoconfigure
136 ; env variables can control the settings for logging in case of autoconfigure
133
137
134 #logging.autoconfigure = true
138 #logging.autoconfigure = true
135
139
136 ; specify your own custom logging config file to configure logging
140 ; specify your own custom logging config file to configure logging
137 #logging.logging_conf_file = /path/to/custom_logging.ini
141 #logging.logging_conf_file = /path/to/custom_logging.ini
138
142
139 ; #####################
143 ; #####################
140 ; LOGGING CONFIGURATION
144 ; LOGGING CONFIGURATION
141 ; #####################
145 ; #####################
142
146
143 [loggers]
147 [loggers]
144 keys = root, vcsserver
148 keys = root, vcsserver
145
149
146 [handlers]
150 [handlers]
147 keys = console
151 keys = console
148
152
149 [formatters]
153 [formatters]
150 keys = generic, json
154 keys = generic, json
151
155
152 ; #######
156 ; #######
153 ; LOGGERS
157 ; LOGGERS
154 ; #######
158 ; #######
155 [logger_root]
159 [logger_root]
156 level = NOTSET
160 level = NOTSET
157 handlers = console
161 handlers = console
158
162
159 [logger_vcsserver]
163 [logger_vcsserver]
160 level = DEBUG
164 level = DEBUG
161 handlers =
165 handlers =
162 qualname = vcsserver
166 qualname = vcsserver
163 propagate = 1
167 propagate = 1
164
168
165 ; ########
169 ; ########
166 ; HANDLERS
170 ; HANDLERS
167 ; ########
171 ; ########
168
172
169 [handler_console]
173 [handler_console]
170 class = StreamHandler
174 class = StreamHandler
171 args = (sys.stderr, )
175 args = (sys.stderr, )
172 level = DEBUG
176 level = DEBUG
173 ; To enable JSON formatted logs replace 'generic' with 'json'
177 ; To enable JSON formatted logs replace 'generic' with 'json'
174 ; This allows sending properly formatted logs to grafana loki or elasticsearch
178 ; This allows sending properly formatted logs to grafana loki or elasticsearch
175 formatter = generic
179 formatter = generic
176
180
177 ; ##########
181 ; ##########
178 ; FORMATTERS
182 ; FORMATTERS
179 ; ##########
183 ; ##########
180
184
181 [formatter_generic]
185 [formatter_generic]
182 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
186 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
183 datefmt = %Y-%m-%d %H:%M:%S
187 datefmt = %Y-%m-%d %H:%M:%S
184
188
185 [formatter_json]
189 [formatter_json]
186 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
190 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
187 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
191 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
@@ -1,167 +1,171 b''
1 #
1 #
2
2
3 ; #################################
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 ; #################################
6
6
7 [server:main]
7 [server:main]
8 ; COMMON HOST/IP CONFIG
8 ; COMMON HOST/IP CONFIG
9 host = 0.0.0.0
9 host = 0.0.0.0
10 port = 10010
10 port = 10010
11
11
12
12
13 ; ###########################
13 ; ###########################
14 ; GUNICORN APPLICATION SERVER
14 ; GUNICORN APPLICATION SERVER
15 ; ###########################
15 ; ###########################
16
16
17 ; run with gunicorn --paste rhodecode.ini
17 ; run with gunicorn --paste rhodecode.ini
18
18
19 ; Module to use, this setting shouldn't be changed
19 ; Module to use, this setting shouldn't be changed
20 use = egg:gunicorn#main
20 use = egg:gunicorn#main
21
21
22 [app:main]
22 [app:main]
23 ; The %(here)s variable will be replaced with the absolute path of parent directory
23 ; The %(here)s variable will be replaced with the absolute path of parent directory
24 ; of this file
24 ; of this file
25 ; Each option in the app:main can be override by an environmental variable
25 ; Each option in the app:main can be override by an environmental variable
26 ;
26 ;
27 ;To override an option:
27 ;To override an option:
28 ;
28 ;
29 ;RC_<KeyName>
29 ;RC_<KeyName>
30 ;Everything should be uppercase, . and - should be replaced by _.
30 ;Everything should be uppercase, . and - should be replaced by _.
31 ;For example, if you have these configuration settings:
31 ;For example, if you have these configuration settings:
32 ;rc_cache.repo_object.backend = foo
32 ;rc_cache.repo_object.backend = foo
33 ;can be overridden by
33 ;can be overridden by
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
34 ;export RC_CACHE_REPO_OBJECT_BACKEND=foo
35
35
36 use = egg:rhodecode-vcsserver
36 use = egg:rhodecode-vcsserver
37
37
38 ; Pyramid default locales, we need this to be set
38 ; Pyramid default locales, we need this to be set
39 #pyramid.default_locale_name = en
39 #pyramid.default_locale_name = en
40
40
41 ; default locale used by VCS systems
41 ; default locale used by VCS systems
42 #locale = en_US.UTF-8
42 #locale = en_US.UTF-8
43
43
44 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
44 ; path to binaries (hg,git,svn) for vcsserver, it should be set by the installer
45 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
45 ; at installation time, e.g /home/user/.rccontrol/vcsserver-1/profile/bin
46 ; or /usr/local/bin/rhodecode_bin/vcs_bin
46 ; or /usr/local/bin/rhodecode_bin/vcs_bin
47 core.binary_dir =
47 core.binary_dir =
48
48
49 ; Redis connection settings for svn integrations logic
50 ; This connection string needs to be the same on ce and vcsserver
51 vcs.svn.redis_conn = redis://redis:6379/0
52
49 ; Custom exception store path, defaults to TMPDIR
53 ; Custom exception store path, defaults to TMPDIR
50 ; This is used to store exception from RhodeCode in shared directory
54 ; This is used to store exception from RhodeCode in shared directory
51 #exception_tracker.store_path =
55 #exception_tracker.store_path =
52
56
53 ; #############
57 ; #############
54 ; DOGPILE CACHE
58 ; DOGPILE CACHE
55 ; #############
59 ; #############
56
60
57 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
61 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
58 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
62 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
59 #cache_dir = %(here)s/data
63 #cache_dir = %(here)s/data
60
64
61 ; ***************************************
65 ; ***************************************
62 ; `repo_object` cache, default file based
66 ; `repo_object` cache, default file based
63 ; ***************************************
67 ; ***************************************
64
68
65 ; `repo_object` cache settings for vcs methods for repositories
69 ; `repo_object` cache settings for vcs methods for repositories
66 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
70 #rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
67
71
68 ; cache auto-expires after N seconds
72 ; cache auto-expires after N seconds
69 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
73 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
70 #rc_cache.repo_object.expiration_time = 2592000
74 #rc_cache.repo_object.expiration_time = 2592000
71
75
72 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
76 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
73 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
77 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache_repo_object.db
74
78
75 ; ***********************************************************
79 ; ***********************************************************
76 ; `repo_object` cache with redis backend
80 ; `repo_object` cache with redis backend
77 ; recommended for larger instance, and for better performance
81 ; recommended for larger instance, and for better performance
78 ; ***********************************************************
82 ; ***********************************************************
79
83
80 ; `repo_object` cache settings for vcs methods for repositories
84 ; `repo_object` cache settings for vcs methods for repositories
81 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
85 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
82
86
83 ; cache auto-expires after N seconds
87 ; cache auto-expires after N seconds
84 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
88 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
85 #rc_cache.repo_object.expiration_time = 2592000
89 #rc_cache.repo_object.expiration_time = 2592000
86
90
87 ; redis_expiration_time needs to be greater then expiration_time
91 ; redis_expiration_time needs to be greater then expiration_time
88 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
92 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
89
93
90 #rc_cache.repo_object.arguments.host = localhost
94 #rc_cache.repo_object.arguments.host = localhost
91 #rc_cache.repo_object.arguments.port = 6379
95 #rc_cache.repo_object.arguments.port = 6379
92 #rc_cache.repo_object.arguments.db = 5
96 #rc_cache.repo_object.arguments.db = 5
93 #rc_cache.repo_object.arguments.socket_timeout = 30
97 #rc_cache.repo_object.arguments.socket_timeout = 30
94 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
98 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
95 #rc_cache.repo_object.arguments.distributed_lock = true
99 #rc_cache.repo_object.arguments.distributed_lock = true
96
100
97 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
101 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
98 #rc_cache.repo_object.arguments.lock_auto_renewal = true
102 #rc_cache.repo_object.arguments.lock_auto_renewal = true
99
103
100 ; Statsd client config, this is used to send metrics to statsd
104 ; Statsd client config, this is used to send metrics to statsd
101 ; We recommend setting statsd_exported and scrape them using Promethues
105 ; We recommend setting statsd_exported and scrape them using Promethues
102 #statsd.enabled = false
106 #statsd.enabled = false
103 #statsd.statsd_host = 0.0.0.0
107 #statsd.statsd_host = 0.0.0.0
104 #statsd.statsd_port = 8125
108 #statsd.statsd_port = 8125
105 #statsd.statsd_prefix =
109 #statsd.statsd_prefix =
106 #statsd.statsd_ipv6 = false
110 #statsd.statsd_ipv6 = false
107
111
108 ; configure logging automatically at server startup set to false
112 ; configure logging automatically at server startup set to false
109 ; to use the below custom logging config.
113 ; to use the below custom logging config.
110 ; RC_LOGGING_FORMATTER
114 ; RC_LOGGING_FORMATTER
111 ; RC_LOGGING_LEVEL
115 ; RC_LOGGING_LEVEL
112 ; env variables can control the settings for logging in case of autoconfigure
116 ; env variables can control the settings for logging in case of autoconfigure
113
117
114 #logging.autoconfigure = true
118 #logging.autoconfigure = true
115
119
116 ; specify your own custom logging config file to configure logging
120 ; specify your own custom logging config file to configure logging
117 #logging.logging_conf_file = /path/to/custom_logging.ini
121 #logging.logging_conf_file = /path/to/custom_logging.ini
118
122
119 ; #####################
123 ; #####################
120 ; LOGGING CONFIGURATION
124 ; LOGGING CONFIGURATION
121 ; #####################
125 ; #####################
122
126
123 [loggers]
127 [loggers]
124 keys = root, vcsserver
128 keys = root, vcsserver
125
129
126 [handlers]
130 [handlers]
127 keys = console
131 keys = console
128
132
129 [formatters]
133 [formatters]
130 keys = generic, json
134 keys = generic, json
131
135
132 ; #######
136 ; #######
133 ; LOGGERS
137 ; LOGGERS
134 ; #######
138 ; #######
135 [logger_root]
139 [logger_root]
136 level = NOTSET
140 level = NOTSET
137 handlers = console
141 handlers = console
138
142
139 [logger_vcsserver]
143 [logger_vcsserver]
140 level = INFO
144 level = INFO
141 handlers =
145 handlers =
142 qualname = vcsserver
146 qualname = vcsserver
143 propagate = 1
147 propagate = 1
144
148
145 ; ########
149 ; ########
146 ; HANDLERS
150 ; HANDLERS
147 ; ########
151 ; ########
148
152
149 [handler_console]
153 [handler_console]
150 class = StreamHandler
154 class = StreamHandler
151 args = (sys.stderr, )
155 args = (sys.stderr, )
152 level = INFO
156 level = INFO
153 ; To enable JSON formatted logs replace 'generic' with 'json'
157 ; To enable JSON formatted logs replace 'generic' with 'json'
154 ; This allows sending properly formatted logs to grafana loki or elasticsearch
158 ; This allows sending properly formatted logs to grafana loki or elasticsearch
155 formatter = generic
159 formatter = generic
156
160
157 ; ##########
161 ; ##########
158 ; FORMATTERS
162 ; FORMATTERS
159 ; ##########
163 ; ##########
160
164
161 [formatter_generic]
165 [formatter_generic]
162 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
166 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
163 datefmt = %Y-%m-%d %H:%M:%S
167 datefmt = %Y-%m-%d %H:%M:%S
164
168
165 [formatter_json]
169 [formatter_json]
166 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
170 format = %(timestamp)s %(levelname)s %(name)s %(message)s %(req_id)s
167 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
171 class = vcsserver.lib._vendor.jsonlogger.JsonFormatter
@@ -1,230 +1,238 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import re
19 import re
20 import os
20 import os
21 import sys
21 import sys
22 import datetime
22 import datetime
23 import logging
23 import logging
24 import pkg_resources
24 import pkg_resources
25
25
26 import vcsserver
26 import vcsserver
27 import vcsserver.settings
27 import vcsserver.settings
28 from vcsserver.lib.str_utils import safe_bytes
28 from vcsserver.lib.str_utils import safe_bytes
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32 HOOKS_DIR_MODE = 0o755
32 HOOKS_DIR_MODE = 0o755
33 HOOKS_FILE_MODE = 0o755
33 HOOKS_FILE_MODE = 0o755
34
34
35
35
36 def set_permissions_if_needed(path_to_check, perms: oct):
36 def set_permissions_if_needed(path_to_check, perms: oct):
37 # Get current permissions
37 # Get current permissions
38 current_permissions = os.stat(path_to_check).st_mode & 0o777 # Extract permission bits
38 current_permissions = os.stat(path_to_check).st_mode & 0o777 # Extract permission bits
39
39
40 # Check if current permissions are lower than required
40 # Check if current permissions are lower than required
41 if current_permissions < int(perms):
41 if current_permissions < int(perms):
42 # Change the permissions if they are lower than required
42 # Change the permissions if they are lower than required
43 os.chmod(path_to_check, perms)
43 os.chmod(path_to_check, perms)
44
44
45
45
46 def get_git_hooks_path(repo_path, bare):
46 def get_git_hooks_path(repo_path, bare):
47 hooks_path = os.path.join(repo_path, 'hooks')
47 hooks_path = os.path.join(repo_path, 'hooks')
48 if not bare:
48 if not bare:
49 hooks_path = os.path.join(repo_path, '.git', 'hooks')
49 hooks_path = os.path.join(repo_path, '.git', 'hooks')
50
50
51 return hooks_path
51 return hooks_path
52
52
53
53
54 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
54 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
55 """
55 """
56 Creates a RhodeCode hook inside a git repository
56 Creates a RhodeCode hook inside a git repository
57
57
58 :param repo_path: path to repository
58 :param repo_path: path to repository
59 :param bare: defines if repository is considered a bare git repo
59 :param bare: defines if repository is considered a bare git repo
60 :param executable: binary executable to put in the hooks
60 :param executable: binary executable to put in the hooks
61 :param force_create: Creates even if the same name hook exists
61 :param force_create: Creates even if the same name hook exists
62 """
62 """
63 executable = executable or sys.executable
63 executable = executable or sys.executable
64 hooks_path = get_git_hooks_path(repo_path, bare)
64 hooks_path = get_git_hooks_path(repo_path, bare)
65
65
66 # we always call it to ensure dir exists and it has a proper mode
66 # we always call it to ensure dir exists and it has a proper mode
67 if not os.path.exists(hooks_path):
67 if not os.path.exists(hooks_path):
68 # If it doesn't exist, create a new directory with the specified mode
68 # If it doesn't exist, create a new directory with the specified mode
69 os.makedirs(hooks_path, mode=HOOKS_DIR_MODE, exist_ok=True)
69 os.makedirs(hooks_path, mode=HOOKS_DIR_MODE, exist_ok=True)
70 # If it exists, change the directory's mode to the specified mode
70 # If it exists, change the directory's mode to the specified mode
71 set_permissions_if_needed(hooks_path, perms=HOOKS_DIR_MODE)
71 set_permissions_if_needed(hooks_path, perms=HOOKS_DIR_MODE)
72
72
73 tmpl_post = pkg_resources.resource_string(
73 tmpl_post = pkg_resources.resource_string(
74 'vcsserver', '/'.join(
74 'vcsserver', '/'.join(
75 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
75 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
76 tmpl_pre = pkg_resources.resource_string(
76 tmpl_pre = pkg_resources.resource_string(
77 'vcsserver', '/'.join(
77 'vcsserver', '/'.join(
78 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
78 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
79
79
80 path = '' # not used for now
80 path = '' # not used for now
81 timestamp = datetime.datetime.utcnow().isoformat()
81 timestamp = datetime.datetime.utcnow().isoformat()
82
82
83 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
83 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
84 log.debug('Installing git hook in repo %s', repo_path)
84 log.debug('Installing git hook in repo %s', repo_path)
85 _hook_file = os.path.join(hooks_path, f'{h_type}-receive')
85 _hook_file = os.path.join(hooks_path, f'{h_type}-receive')
86 _rhodecode_hook = check_rhodecode_hook(_hook_file)
86 _rhodecode_hook = check_rhodecode_hook(_hook_file)
87
87
88 if _rhodecode_hook or force_create:
88 if _rhodecode_hook or force_create:
89 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
89 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
90 env_expand = str([
91 ('RC_INI_FILE', vcsserver.CONFIG['__file__']),
92 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
93 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
94 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
95 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
96 ])
90 try:
97 try:
91 with open(_hook_file, 'wb') as f:
98 with open(_hook_file, 'wb') as f:
99 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
92 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
100 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
93 template = template.replace(b'_DATE_', safe_bytes(timestamp))
101 template = template.replace(b'_DATE_', safe_bytes(timestamp))
94 template = template.replace(b'_ENV_', safe_bytes(executable))
102 template = template.replace(b'_ENV_', safe_bytes(executable))
95 template = template.replace(b'_PATH_', safe_bytes(path))
103 template = template.replace(b'_PATH_', safe_bytes(path))
96 f.write(template)
104 f.write(template)
97 set_permissions_if_needed(_hook_file, perms=HOOKS_FILE_MODE)
105 set_permissions_if_needed(_hook_file, perms=HOOKS_FILE_MODE)
98 except OSError:
106 except OSError:
99 log.exception('error writing hook file %s', _hook_file)
107 log.exception('error writing hook file %s', _hook_file)
100 else:
108 else:
101 log.debug('skipping writing hook file')
109 log.debug('skipping writing hook file')
102
110
103 return True
111 return True
104
112
105
113
106 def get_svn_hooks_path(repo_path):
114 def get_svn_hooks_path(repo_path):
107 hooks_path = os.path.join(repo_path, 'hooks')
115 hooks_path = os.path.join(repo_path, 'hooks')
108
116
109 return hooks_path
117 return hooks_path
110
118
111
119
112 def install_svn_hooks(repo_path, executable=None, force_create=False):
120 def install_svn_hooks(repo_path, executable=None, force_create=False):
113 """
121 """
114 Creates RhodeCode hooks inside a svn repository
122 Creates RhodeCode hooks inside a svn repository
115
123
116 :param repo_path: path to repository
124 :param repo_path: path to repository
117 :param executable: binary executable to put in the hooks
125 :param executable: binary executable to put in the hooks
118 :param force_create: Create even if same name hook exists
126 :param force_create: Create even if same name hook exists
119 """
127 """
120 executable = executable or sys.executable
128 executable = executable or sys.executable
121 hooks_path = get_svn_hooks_path(repo_path)
129 hooks_path = get_svn_hooks_path(repo_path)
122 if not os.path.isdir(hooks_path):
130 if not os.path.isdir(hooks_path):
123 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
131 os.makedirs(hooks_path, mode=0o777, exist_ok=True)
124
132
125 tmpl_post = pkg_resources.resource_string(
133 tmpl_post = pkg_resources.resource_string(
126 'vcsserver', '/'.join(
134 'vcsserver', '/'.join(
127 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
135 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
128 tmpl_pre = pkg_resources.resource_string(
136 tmpl_pre = pkg_resources.resource_string(
129 'vcsserver', '/'.join(
137 'vcsserver', '/'.join(
130 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
138 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
131
139
132 path = '' # not used for now
140 path = '' # not used for now
133 timestamp = datetime.datetime.utcnow().isoformat()
141 timestamp = datetime.datetime.utcnow().isoformat()
134
142
135 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
143 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
136 log.debug('Installing svn hook in repo %s', repo_path)
144 log.debug('Installing svn hook in repo %s', repo_path)
137 _hook_file = os.path.join(hooks_path, f'{h_type}-commit')
145 _hook_file = os.path.join(hooks_path, f'{h_type}-commit')
138 _rhodecode_hook = check_rhodecode_hook(_hook_file)
146 _rhodecode_hook = check_rhodecode_hook(_hook_file)
139
147
140 if _rhodecode_hook or force_create:
148 if _rhodecode_hook or force_create:
141 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
149 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
142
150
143 env_expand = str([
151 env_expand = str([
152 ('RC_INI_FILE', vcsserver.CONFIG['__file__']),
144 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
153 ('RC_CORE_BINARY_DIR', vcsserver.settings.BINARY_DIR),
145 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
154 ('RC_GIT_EXECUTABLE', vcsserver.settings.GIT_EXECUTABLE()),
146 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
155 ('RC_SVN_EXECUTABLE', vcsserver.settings.SVN_EXECUTABLE()),
147 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
156 ('RC_SVNLOOK_EXECUTABLE', vcsserver.settings.SVNLOOK_EXECUTABLE()),
148
149 ])
157 ])
150 try:
158 try:
151 with open(_hook_file, 'wb') as f:
159 with open(_hook_file, 'wb') as f:
160 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
152 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
161 template = template.replace(b'_TMPL_', safe_bytes(vcsserver.get_version()))
153 template = template.replace(b'_DATE_', safe_bytes(timestamp))
162 template = template.replace(b'_DATE_', safe_bytes(timestamp))
154 template = template.replace(b'_OS_EXPAND_', safe_bytes(env_expand))
155 template = template.replace(b'_ENV_', safe_bytes(executable))
163 template = template.replace(b'_ENV_', safe_bytes(executable))
156 template = template.replace(b'_PATH_', safe_bytes(path))
164 template = template.replace(b'_PATH_', safe_bytes(path))
157
165
158 f.write(template)
166 f.write(template)
159 os.chmod(_hook_file, 0o755)
167 os.chmod(_hook_file, 0o755)
160 except OSError:
168 except OSError:
161 log.exception('error writing hook file %s', _hook_file)
169 log.exception('error writing hook file %s', _hook_file)
162 else:
170 else:
163 log.debug('skipping writing hook file')
171 log.debug('skipping writing hook file')
164
172
165 return True
173 return True
166
174
167
175
168 def get_version_from_hook(hook_path):
176 def get_version_from_hook(hook_path):
169 version = b''
177 version = b''
170 hook_content = read_hook_content(hook_path)
178 hook_content = read_hook_content(hook_path)
171 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
179 matches = re.search(rb'RC_HOOK_VER\s*=\s*(.*)', hook_content)
172 if matches:
180 if matches:
173 try:
181 try:
174 version = matches.groups()[0]
182 version = matches.groups()[0]
175 log.debug('got version %s from hooks.', version)
183 log.debug('got version %s from hooks.', version)
176 except Exception:
184 except Exception:
177 log.exception("Exception while reading the hook version.")
185 log.exception("Exception while reading the hook version.")
178 return version.replace(b"'", b"")
186 return version.replace(b"'", b"")
179
187
180
188
181 def check_rhodecode_hook(hook_path):
189 def check_rhodecode_hook(hook_path):
182 """
190 """
183 Check if the hook was created by RhodeCode
191 Check if the hook was created by RhodeCode
184 """
192 """
185 if not os.path.exists(hook_path):
193 if not os.path.exists(hook_path):
186 return True
194 return True
187
195
188 log.debug('hook exists, checking if it is from RhodeCode')
196 log.debug('hook exists, checking if it is from RhodeCode')
189
197
190 version = get_version_from_hook(hook_path)
198 version = get_version_from_hook(hook_path)
191 if version:
199 if version:
192 return True
200 return True
193
201
194 return False
202 return False
195
203
196
204
197 def read_hook_content(hook_path) -> bytes:
205 def read_hook_content(hook_path) -> bytes:
198 content = b''
206 content = b''
199 if os.path.isfile(hook_path):
207 if os.path.isfile(hook_path):
200 with open(hook_path, 'rb') as f:
208 with open(hook_path, 'rb') as f:
201 content = f.read()
209 content = f.read()
202 return content
210 return content
203
211
204
212
205 def get_git_pre_hook_version(repo_path, bare):
213 def get_git_pre_hook_version(repo_path, bare):
206 hooks_path = get_git_hooks_path(repo_path, bare)
214 hooks_path = get_git_hooks_path(repo_path, bare)
207 _hook_file = os.path.join(hooks_path, 'pre-receive')
215 _hook_file = os.path.join(hooks_path, 'pre-receive')
208 version = get_version_from_hook(_hook_file)
216 version = get_version_from_hook(_hook_file)
209 return version
217 return version
210
218
211
219
212 def get_git_post_hook_version(repo_path, bare):
220 def get_git_post_hook_version(repo_path, bare):
213 hooks_path = get_git_hooks_path(repo_path, bare)
221 hooks_path = get_git_hooks_path(repo_path, bare)
214 _hook_file = os.path.join(hooks_path, 'post-receive')
222 _hook_file = os.path.join(hooks_path, 'post-receive')
215 version = get_version_from_hook(_hook_file)
223 version = get_version_from_hook(_hook_file)
216 return version
224 return version
217
225
218
226
219 def get_svn_pre_hook_version(repo_path):
227 def get_svn_pre_hook_version(repo_path):
220 hooks_path = get_svn_hooks_path(repo_path)
228 hooks_path = get_svn_hooks_path(repo_path)
221 _hook_file = os.path.join(hooks_path, 'pre-commit')
229 _hook_file = os.path.join(hooks_path, 'pre-commit')
222 version = get_version_from_hook(_hook_file)
230 version = get_version_from_hook(_hook_file)
223 return version
231 return version
224
232
225
233
226 def get_svn_post_hook_version(repo_path):
234 def get_svn_post_hook_version(repo_path):
227 hooks_path = get_svn_hooks_path(repo_path)
235 hooks_path = get_svn_hooks_path(repo_path)
228 _hook_file = os.path.join(hooks_path, 'post-commit')
236 _hook_file = os.path.join(hooks_path, 'post-commit')
229 version = get_version_from_hook(_hook_file)
237 version = get_version_from_hook(_hook_file)
230 return version
238 return version
@@ -1,51 +1,59 b''
1 #!_ENV_
1 #!_ENV_
2
2 import os
3 import os
3 import sys
4 import sys
4 path_adjust = [_PATH_]
5 path_adjust = [_PATH_]
5
6
6 if path_adjust:
7 if path_adjust:
7 sys.path = path_adjust
8 sys.path = path_adjust
8
9
10 # special trick to pass in some information from rc to hooks
11 # mod_dav strips ALL env vars and we can't even access things like PATH
12 for env_k, env_v in _OS_EXPAND_:
13 os.environ[env_k] = env_v
14
9 try:
15 try:
10 from vcsserver import hooks
16 from vcsserver import hooks
11 except ImportError:
17 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
18 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
19 import traceback
14 print(traceback.format_exc())
20 print(traceback.format_exc())
15 hooks = None
21 hooks = None
16
22
17
23
18 # TIMESTAMP: _DATE_
24 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
25 RC_HOOK_VER = '_TMPL_'
20
26
21
27
22 def main():
28 def main():
23 if hooks is None:
29 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
30 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
31 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
32 sys.exit(0)
27
33
28 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_GIT_HOOKS'):
34 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_GIT_HOOKS'):
29 sys.exit(0)
35 sys.exit(0)
30
36
31 repo_path = os.getcwd()
37 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
38 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
39
34 # os.environ is modified here by a subprocess call that
40 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
41 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
42 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
43 # like IP or username from basic-auth
44
45 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
38 try:
46 try:
39 result = hooks.git_post_receive(repo_path, push_data, os.environ)
47 result = hooks.git_post_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
48 sys.exit(result)
41 except Exception as error:
49 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
50 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
51 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
52 raise
45 print(f'ERROR: {error}')
53 print(f'ERROR: {error}')
46 sys.exit(1)
54 sys.exit(1)
47 sys.exit(0)
55 sys.exit(0)
48
56
49
57
50 if __name__ == '__main__':
58 if __name__ == '__main__':
51 main()
59 main()
@@ -1,51 +1,59 b''
1 #!_ENV_
1 #!_ENV_
2
2 import os
3 import os
3 import sys
4 import sys
4 path_adjust = [_PATH_]
5 path_adjust = [_PATH_]
5
6
6 if path_adjust:
7 if path_adjust:
7 sys.path = path_adjust
8 sys.path = path_adjust
8
9
10 # special trick to pass in some information from rc to hooks
11 # mod_dav strips ALL env vars and we can't even access things like PATH
12 for env_k, env_v in _OS_EXPAND_:
13 os.environ[env_k] = env_v
14
9 try:
15 try:
10 from vcsserver import hooks
16 from vcsserver import hooks
11 except ImportError:
17 except ImportError:
12 if os.environ.get('RC_DEBUG_GIT_HOOK'):
18 if os.environ.get('RC_DEBUG_GIT_HOOK'):
13 import traceback
19 import traceback
14 print(traceback.format_exc())
20 print(traceback.format_exc())
15 hooks = None
21 hooks = None
16
22
17
23
18 # TIMESTAMP: _DATE_
24 # TIMESTAMP: _DATE_
19 RC_HOOK_VER = '_TMPL_'
25 RC_HOOK_VER = '_TMPL_'
20
26
21
27
22 def main():
28 def main():
23 if hooks is None:
29 if hooks is None:
24 # exit with success if we cannot import vcsserver.hooks !!
30 # exit with success if we cannot import vcsserver.hooks !!
25 # this allows simply push to this repo even without rhodecode
31 # this allows simply push to this repo even without rhodecode
26 sys.exit(0)
32 sys.exit(0)
27
33
28 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_GIT_HOOKS'):
34 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_GIT_HOOKS'):
29 sys.exit(0)
35 sys.exit(0)
30
36
31 repo_path = os.getcwd()
37 repo_path = os.getcwd()
32 push_data = sys.stdin.readlines()
38 push_data = sys.stdin.readlines()
33 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
39
34 # os.environ is modified here by a subprocess call that
40 # os.environ is modified here by a subprocess call that
35 # runs git and later git executes this hook.
41 # runs git and later git executes this hook.
36 # Environ gets some additional info from rhodecode system
42 # Environ gets some additional info from rhodecode system
37 # like IP or username from basic-auth
43 # like IP or username from basic-auth
44
45 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
38 try:
46 try:
39 result = hooks.git_pre_receive(repo_path, push_data, os.environ)
47 result = hooks.git_pre_receive(repo_path, push_data, os.environ)
40 sys.exit(result)
48 sys.exit(result)
41 except Exception as error:
49 except Exception as error:
42 # TODO: johbo: Improve handling of this special case
50 # TODO: johbo: Improve handling of this special case
43 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
51 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
44 raise
52 raise
45 print(f'ERROR: {error}')
53 print(f'ERROR: {error}')
46 sys.exit(1)
54 sys.exit(1)
47 sys.exit(0)
55 sys.exit(0)
48
56
49
57
50 if __name__ == '__main__':
58 if __name__ == '__main__':
51 main()
59 main()
@@ -1,54 +1,54 b''
1 #!_ENV_
1 #!_ENV_
2
2
3 import os
3 import os
4 import sys
4 import sys
5 path_adjust = [_PATH_]
5 path_adjust = [_PATH_]
6
6
7 if path_adjust:
7 if path_adjust:
8 sys.path = path_adjust
8 sys.path = path_adjust
9
9
10 # special trick to pass in some information from rc to hooks
11 # mod_dav strips ALL env vars and we can't even access things like PATH
12 for env_k, env_v in _OS_EXPAND_:
13 os.environ[env_k] = env_v
14
10 try:
15 try:
11 from vcsserver import hooks
16 from vcsserver import hooks
12 except ImportError:
17 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
18 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
19 import traceback
15 print(traceback.format_exc())
20 print(traceback.format_exc())
16 hooks = None
21 hooks = None
17
22
18
23
19 # TIMESTAMP: _DATE_
24 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
25 RC_HOOK_VER = '_TMPL_'
21
26
22
27
23 # special trick to pass in some information from rc to hooks
24 # mod_dav strips ALL env vars and we can't even access things like PATH
25 for env_k, env_v in _OS_EXPAND_:
26 os.environ[env_k] = env_v
27
28 def main():
28 def main():
29 if hooks is None:
29 if hooks is None:
30 # exit with success if we cannot import vcsserver.hooks !!
30 # exit with success if we cannot import vcsserver.hooks !!
31 # this allows simply push to this repo even without rhodecode
31 # this allows simply push to this repo even without rhodecode
32 sys.exit(0)
32 sys.exit(0)
33
33
34 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_SVN_HOOKS'):
34 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_SVN_HOOKS'):
35 sys.exit(0)
35 sys.exit(0)
36 repo_path = os.getcwd()
36 cwd_repo_path = os.getcwd()
37 push_data = sys.argv[1:]
37 push_data = sys.argv[1:]
38
38
39 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
39 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
40
40
41 try:
41 try:
42 result = hooks.svn_post_commit(repo_path, push_data, os.environ)
42 result = hooks.svn_post_commit(cwd_repo_path, push_data, os.environ)
43 sys.exit(result)
43 sys.exit(result)
44 except Exception as error:
44 except Exception as error:
45 # TODO: johbo: Improve handling of this special case
45 # TODO: johbo: Improve handling of this special case
46 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
46 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
47 raise
47 raise
48 print(f'ERROR: {error}')
48 print(f'ERROR: {error}')
49 sys.exit(1)
49 sys.exit(1)
50 sys.exit(0)
50 sys.exit(0)
51
51
52
52
53 if __name__ == '__main__':
53 if __name__ == '__main__':
54 main()
54 main()
@@ -1,58 +1,57 b''
1 #!_ENV_
1 #!_ENV_
2
2
3 import os
3 import os
4 import sys
4 import sys
5 path_adjust = [_PATH_]
5 path_adjust = [_PATH_]
6
6
7 if path_adjust:
7 if path_adjust:
8 sys.path = path_adjust
8 sys.path = path_adjust
9
9
10 # special trick to pass in some information from rc to hooks
11 # mod_dav strips ALL env vars and we can't even access things like PATH
12 for env_k, env_v in _OS_EXPAND_:
13 os.environ[env_k] = env_v
14
10 try:
15 try:
11 from vcsserver import hooks
16 from vcsserver import hooks
12 except ImportError:
17 except ImportError:
13 if os.environ.get('RC_DEBUG_SVN_HOOK'):
18 if os.environ.get('RC_DEBUG_SVN_HOOK'):
14 import traceback
19 import traceback
15 print(traceback.format_exc())
20 print(traceback.format_exc())
16 hooks = None
21 hooks = None
17
22
18
23
19 # TIMESTAMP: _DATE_
24 # TIMESTAMP: _DATE_
20 RC_HOOK_VER = '_TMPL_'
25 RC_HOOK_VER = '_TMPL_'
21
26
22
27
23 # special trick to pass in some information from rc to hooks
24 # mod_dav strips ALL env vars and we can't even access things like PATH
25 for env_k, env_v in _OS_EXPAND_:
26 os.environ[env_k] = env_v
27
28 def main():
28 def main():
29 if os.environ.get('SSH_READ_ONLY') == '1':
29 if os.environ.get('SSH_READ_ONLY') == '1':
30 sys.stderr.write('Only read-only access is allowed')
30 sys.stderr.write('Only read-only access is allowed')
31 sys.exit(1)
31 sys.exit(1)
32
32
33 if hooks is None:
33 if hooks is None:
34 # exit with success if we cannot import vcsserver.hooks !!
34 # exit with success if we cannot import vcsserver.hooks !!
35 # this allows simply push to this repo even without rhodecode
35 # this allows simply push to this repo even without rhodecode
36 sys.exit(0)
36 sys.exit(0)
37
37
38 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_SVN_HOOKS'):
38 if os.environ.get('RC_SKIP_HOOKS') or os.environ.get('RC_SKIP_SVN_HOOKS'):
39 sys.exit(0)
39 sys.exit(0)
40 repo_path = os.getcwd()
40 cwd_repo_path = os.getcwd()
41 push_data = sys.argv[1:]
41 push_data = sys.argv[1:]
42
42
43 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
43 os.environ['RC_HOOK_VER'] = RC_HOOK_VER
44
45 try:
44 try:
46 result = hooks.svn_pre_commit(repo_path, push_data, os.environ)
45 result = hooks.svn_pre_commit(cwd_repo_path, push_data, os.environ)
47 sys.exit(result)
46 sys.exit(result)
48 except Exception as error:
47 except Exception as error:
49 # TODO: johbo: Improve handling of this special case
48 # TODO: johbo: Improve handling of this special case
50 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
49 if not getattr(error, '_vcs_kind', None) == 'repo_locked':
51 raise
50 raise
52 print(f'ERROR: {error}')
51 print(f'ERROR: {error}')
53 sys.exit(1)
52 sys.exit(1)
54 sys.exit(0)
53 sys.exit(0)
55
54
56
55
57 if __name__ == '__main__':
56 if __name__ == '__main__':
58 main()
57 main()
@@ -1,832 +1,822 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21 import logging
21 import logging
22 import collections
22 import collections
23 import base64
23 import base64
24 import msgpack
24 import msgpack
25 import dataclasses
25 import dataclasses
26 import pygit2
26 import pygit2
27
27
28 import http.client
28 import http.client
29 from celery import Celery
29 from celery import Celery
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33
33
34 from vcsserver import exceptions, subprocessio, settings
34 from vcsserver.lib.ext_json import json
35 from vcsserver.lib.ext_json import json
35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver.lib.str_utils import ascii_str, safe_str
36 from vcsserver.lib.str_utils import ascii_str, safe_str
37 from vcsserver.lib.svn_txn_utils import get_txn_id_from_store
37 from vcsserver.remote.git_remote import Repository
38 from vcsserver.remote.git_remote import Repository
38
39
39 celery_app = Celery('__vcsserver__')
40 celery_app = Celery('__vcsserver__')
40 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
41
42
42
43
43 class HooksHttpClient:
44 class HooksHttpClient:
44 proto = 'msgpack.v1'
45 proto = 'msgpack.v1'
45 connection = None
46 connection = None
46
47
47 def __init__(self, hooks_uri):
48 def __init__(self, hooks_uri):
48 self.hooks_uri = hooks_uri
49 self.hooks_uri = hooks_uri
49
50
50 def __repr__(self):
51 def __repr__(self):
51 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52 return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})'
52
53
53 def __call__(self, method, extras):
54 def __call__(self, method, extras):
54 connection = http.client.HTTPConnection(self.hooks_uri)
55 connection = http.client.HTTPConnection(self.hooks_uri)
55 # binary msgpack body
56 # binary msgpack body
56 headers, body = self._serialize(method, extras)
57 headers, body = self._serialize(method, extras)
57 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58 log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri)
58
59
59 try:
60 try:
60 try:
61 try:
61 connection.request('POST', '/', body, headers)
62 connection.request('POST', '/', body, headers)
62 except Exception as error:
63 except Exception as error:
63 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error)
64 raise
65 raise
65
66
66 response = connection.getresponse()
67 response = connection.getresponse()
67 try:
68 try:
68 return msgpack.load(response)
69 return msgpack.load(response)
69 except Exception:
70 except Exception:
70 response_data = response.read()
71 response_data = response.read()
71 log.exception('Failed to decode hook response json data. '
72 log.exception('Failed to decode hook response json data. '
72 'response_code:%s, raw_data:%s',
73 'response_code:%s, raw_data:%s',
73 response.status, response_data)
74 response.status, response_data)
74 raise
75 raise
75 finally:
76 finally:
76 connection.close()
77 connection.close()
77
78
78 @classmethod
79 @classmethod
79 def _serialize(cls, hook_name, extras):
80 def _serialize(cls, hook_name, extras):
80 data = {
81 data = {
81 'method': hook_name,
82 'method': hook_name,
82 'extras': extras
83 'extras': extras
83 }
84 }
84 headers = {
85 headers = {
85 "rc-hooks-protocol": cls.proto,
86 "rc-hooks-protocol": cls.proto,
86 "Connection": "keep-alive"
87 "Connection": "keep-alive"
87 }
88 }
88 return headers, msgpack.packb(data)
89 return headers, msgpack.packb(data)
89
90
90
91
91 class HooksCeleryClient:
92 class HooksCeleryClient:
92 TASK_TIMEOUT = 60 # time in seconds
93 TASK_TIMEOUT = 60 # time in seconds
93
94
94 def __init__(self, queue, backend):
95 def __init__(self, queue, backend):
95 celery_app.config_from_object({
96 celery_app.config_from_object({
96 'broker_url': queue, 'result_backend': backend,
97 'broker_url': queue, 'result_backend': backend,
97 'broker_connection_retry_on_startup': True,
98 'broker_connection_retry_on_startup': True,
98 'task_serializer': 'json',
99 'task_serializer': 'json',
99 'accept_content': ['json', 'msgpack'],
100 'accept_content': ['json', 'msgpack'],
100 'result_serializer': 'json',
101 'result_serializer': 'json',
101 'result_accept_content': ['json', 'msgpack']
102 'result_accept_content': ['json', 'msgpack']
102 })
103 })
103 self.celery_app = celery_app
104 self.celery_app = celery_app
104
105
105 def __call__(self, method, extras):
106 def __call__(self, method, extras):
106 inquired_task = self.celery_app.signature(
107 inquired_task = self.celery_app.signature(
107 f'rhodecode.lib.celerylib.tasks.{method}'
108 f'rhodecode.lib.celerylib.tasks.{method}'
108 )
109 )
109 return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
110 return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT)
110
111
111
112
112 class HooksShadowRepoClient:
113 class HooksShadowRepoClient:
113
114
114 def __call__(self, hook_name, extras):
115 def __call__(self, hook_name, extras):
115 return {'output': '', 'status': 0}
116 return {'output': '', 'status': 0}
116
117
117
118
118 class RemoteMessageWriter:
119 class RemoteMessageWriter:
119 """Writer base class."""
120 """Writer base class."""
120 def write(self, message):
121 def write(self, message):
121 raise NotImplementedError()
122 raise NotImplementedError()
122
123
123
124
124 class HgMessageWriter(RemoteMessageWriter):
125 class HgMessageWriter(RemoteMessageWriter):
125 """Writer that knows how to send messages to mercurial clients."""
126 """Writer that knows how to send messages to mercurial clients."""
126
127
127 def __init__(self, ui):
128 def __init__(self, ui):
128 self.ui = ui
129 self.ui = ui
129
130
130 def write(self, message: str):
131 def write(self, message: str):
131 # TODO: Check why the quiet flag is set by default.
132 # TODO: Check why the quiet flag is set by default.
132 old = self.ui.quiet
133 old = self.ui.quiet
133 self.ui.quiet = False
134 self.ui.quiet = False
134 self.ui.status(message.encode('utf-8'))
135 self.ui.status(message.encode('utf-8'))
135 self.ui.quiet = old
136 self.ui.quiet = old
136
137
137
138
138 class GitMessageWriter(RemoteMessageWriter):
139 class GitMessageWriter(RemoteMessageWriter):
139 """Writer that knows how to send messages to git clients."""
140 """Writer that knows how to send messages to git clients."""
140
141
141 def __init__(self, stdout=None):
142 def __init__(self, stdout=None):
142 self.stdout = stdout or sys.stdout
143 self.stdout = stdout or sys.stdout
143
144
144 def write(self, message: str):
145 def write(self, message: str):
145 self.stdout.write(message)
146 self.stdout.write(message)
146
147
147
148
148 class SvnMessageWriter(RemoteMessageWriter):
149 class SvnMessageWriter(RemoteMessageWriter):
149 """Writer that knows how to send messages to svn clients."""
150 """Writer that knows how to send messages to svn clients."""
150
151
151 def __init__(self, stderr=None):
152 def __init__(self, stderr=None):
152 # SVN needs data sent to stderr for back-to-client messaging
153 # SVN needs data sent to stderr for back-to-client messaging
153 self.stderr = stderr or sys.stderr
154 self.stderr = stderr or sys.stderr
154
155
155 def write(self, message):
156 def write(self, message):
156 self.stderr.write(message)
157 self.stderr.write(message)
157
158
158
159
159 def _handle_exception(result):
160 def _handle_exception(result):
160 exception_class = result.get('exception')
161 exception_class = result.get('exception')
161 exception_traceback = result.get('exception_traceback')
162 exception_traceback = result.get('exception_traceback')
162 log.debug('Handling hook-call exception: %s', exception_class)
163 log.debug('Handling hook-call exception: %s', exception_class)
163
164
164 if exception_traceback:
165 if exception_traceback:
165 log.error('Got traceback from remote call:%s', exception_traceback)
166 log.error('Got traceback from remote call:%s', exception_traceback)
166
167
167 if exception_class == 'HTTPLockedRC':
168 if exception_class == 'HTTPLockedRC':
168 raise exceptions.RepositoryLockedException()(*result['exception_args'])
169 raise exceptions.RepositoryLockedException()(*result['exception_args'])
169 elif exception_class == 'HTTPBranchProtected':
170 elif exception_class == 'HTTPBranchProtected':
170 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
171 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
171 elif exception_class == 'RepositoryError':
172 elif exception_class == 'RepositoryError':
172 raise exceptions.VcsException()(*result['exception_args'])
173 raise exceptions.VcsException()(*result['exception_args'])
173 elif exception_class:
174 elif exception_class:
174 raise Exception(
175 raise Exception(
175 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
176 f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """
176 )
177 )
177
178
178
179
179 def _get_hooks_client(extras):
180 def _get_hooks_client(extras):
180 hooks_uri = extras.get('hooks_uri')
181 hooks_uri = extras.get('hooks_uri')
181 task_queue = extras.get('task_queue')
182 task_queue = extras.get('task_queue')
182 task_backend = extras.get('task_backend')
183 task_backend = extras.get('task_backend')
183 is_shadow_repo = extras.get('is_shadow_repo')
184 is_shadow_repo = extras.get('is_shadow_repo')
184
185
185 if hooks_uri:
186 if hooks_uri:
186 return HooksHttpClient(hooks_uri)
187 return HooksHttpClient(hooks_uri)
187 elif task_queue and task_backend:
188 elif task_queue and task_backend:
188 return HooksCeleryClient(task_queue, task_backend)
189 return HooksCeleryClient(task_queue, task_backend)
189 elif is_shadow_repo:
190 elif is_shadow_repo:
190 return HooksShadowRepoClient()
191 return HooksShadowRepoClient()
191 else:
192 else:
192 raise Exception("Hooks client not found!")
193 raise Exception("Hooks client not found!")
193
194
194
195
195 def _call_hook(hook_name, extras, writer):
196 def _call_hook(hook_name, extras, writer):
196 hooks_client = _get_hooks_client(extras)
197 hooks_client = _get_hooks_client(extras)
197 log.debug('Hooks, using client:%s', hooks_client)
198 log.debug('Hooks, using client:%s', hooks_client)
198 result = hooks_client(hook_name, extras)
199 result = hooks_client(hook_name, extras)
199 log.debug('Hooks got result: %s', result)
200 log.debug('Hooks got result: %s', result)
200 _handle_exception(result)
201 _handle_exception(result)
201 writer.write(result['output'])
202 writer.write(result['output'])
202
203
203 return result['status']
204 return result['status']
204
205
205
206
206 def _extras_from_ui(ui):
207 def _extras_from_ui(ui):
207 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
208 hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA')
208 if not hook_data:
209 if not hook_data:
209 # maybe it's inside environ ?
210 # maybe it's inside environ ?
210 env_hook_data = os.environ.get('RC_SCM_DATA')
211 env_hook_data = os.environ.get('RC_SCM_DATA')
211 if env_hook_data:
212 if env_hook_data:
212 hook_data = env_hook_data
213 hook_data = env_hook_data
213
214
214 extras = {}
215 extras = {}
215 if hook_data:
216 if hook_data:
216 extras = json.loads(hook_data)
217 extras = json.loads(hook_data)
217 return extras
218 return extras
218
219
219
220
220 def _rev_range_hash(repo, node, check_heads=False):
221 def _rev_range_hash(repo, node, check_heads=False):
221 from vcsserver.hgcompat import get_ctx
222 from vcsserver.hgcompat import get_ctx
222
223
223 commits = []
224 commits = []
224 revs = []
225 revs = []
225 start = get_ctx(repo, node).rev()
226 start = get_ctx(repo, node).rev()
226 end = len(repo)
227 end = len(repo)
227 for rev in range(start, end):
228 for rev in range(start, end):
228 revs.append(rev)
229 revs.append(rev)
229 ctx = get_ctx(repo, rev)
230 ctx = get_ctx(repo, rev)
230 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
231 commit_id = ascii_str(mercurial.node.hex(ctx.node()))
231 branch = safe_str(ctx.branch())
232 branch = safe_str(ctx.branch())
232 commits.append((commit_id, branch))
233 commits.append((commit_id, branch))
233
234
234 parent_heads = []
235 parent_heads = []
235 if check_heads:
236 if check_heads:
236 parent_heads = _check_heads(repo, start, end, revs)
237 parent_heads = _check_heads(repo, start, end, revs)
237 return commits, parent_heads
238 return commits, parent_heads
238
239
239
240
240 def _check_heads(repo, start, end, commits):
241 def _check_heads(repo, start, end, commits):
241 from vcsserver.hgcompat import get_ctx
242 from vcsserver.hgcompat import get_ctx
242 changelog = repo.changelog
243 changelog = repo.changelog
243 parents = set()
244 parents = set()
244
245
245 for new_rev in commits:
246 for new_rev in commits:
246 for p in changelog.parentrevs(new_rev):
247 for p in changelog.parentrevs(new_rev):
247 if p == mercurial.node.nullrev:
248 if p == mercurial.node.nullrev:
248 continue
249 continue
249 if p < start:
250 if p < start:
250 parents.add(p)
251 parents.add(p)
251
252
252 for p in parents:
253 for p in parents:
253 branch = get_ctx(repo, p).branch()
254 branch = get_ctx(repo, p).branch()
254 # The heads descending from that parent, on the same branch
255 # The heads descending from that parent, on the same branch
255 parent_heads = {p}
256 parent_heads = {p}
256 reachable = {p}
257 reachable = {p}
257 for x in range(p + 1, end):
258 for x in range(p + 1, end):
258 if get_ctx(repo, x).branch() != branch:
259 if get_ctx(repo, x).branch() != branch:
259 continue
260 continue
260 for pp in changelog.parentrevs(x):
261 for pp in changelog.parentrevs(x):
261 if pp in reachable:
262 if pp in reachable:
262 reachable.add(x)
263 reachable.add(x)
263 parent_heads.discard(pp)
264 parent_heads.discard(pp)
264 parent_heads.add(x)
265 parent_heads.add(x)
265 # More than one head? Suggest merging
266 # More than one head? Suggest merging
266 if len(parent_heads) > 1:
267 if len(parent_heads) > 1:
267 return list(parent_heads)
268 return list(parent_heads)
268
269
269 return []
270 return []
270
271
271
272
272 def _get_git_env():
273 def _get_git_env():
273 env = {}
274 env = {}
274 for k, v in os.environ.items():
275 for k, v in os.environ.items():
275 if k.startswith('GIT'):
276 if k.startswith('GIT'):
276 env[k] = v
277 env[k] = v
277
278
278 # serialized version
279 # serialized version
279 return [(k, v) for k, v in env.items()]
280 return [(k, v) for k, v in env.items()]
280
281
281
282
282 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
283 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
283 env = {}
284 env = {}
284 for k, v in os.environ.items():
285 for k, v in os.environ.items():
285 if k.startswith('HG'):
286 if k.startswith('HG'):
286 env[k] = v
287 env[k] = v
287
288
288 env['HG_NODE'] = old_rev
289 env['HG_NODE'] = old_rev
289 env['HG_NODE_LAST'] = new_rev
290 env['HG_NODE_LAST'] = new_rev
290 env['HG_TXNID'] = txnid
291 env['HG_TXNID'] = txnid
291 env['HG_PENDING'] = repo_path
292 env['HG_PENDING'] = repo_path
292
293
293 return [(k, v) for k, v in env.items()]
294 return [(k, v) for k, v in env.items()]
294
295
295
296
297 def _get_ini_settings(ini_file):
298 from vcsserver.http_main import sanitize_settings_and_apply_defaults
299 from vcsserver.lib.config_utils import get_app_config_lightweight, configure_and_store_settings
300
301 global_config = {'__file__': ini_file}
302 ini_settings = get_app_config_lightweight(ini_file)
303 sanitize_settings_and_apply_defaults(global_config, ini_settings)
304 configure_and_store_settings(global_config, ini_settings)
305
306 return ini_settings
307
308
296 def _fix_hooks_executables(ini_path=''):
309 def _fix_hooks_executables(ini_path=''):
297 """
310 """
298 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
311 This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns
299 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
312 especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail
300 because svn is not on PATH
313 because svn is not on PATH
301 """
314 """
302 from vcsserver.http_main import sanitize_settings_and_apply_defaults
315 # set defaults, in case we can't read from ini_file
303 from vcsserver.lib.config_utils import get_app_config_lightweight
304
305 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
316 core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin'
306 if ini_path:
317 if ini_path:
307
318 ini_settings = _get_ini_settings(ini_path)
308 ini_settings = get_app_config_lightweight(ini_path)
309 ini_settings = sanitize_settings_and_apply_defaults({'__file__': ini_path}, ini_settings)
310 core_binary_dir = ini_settings['core.binary_dir']
319 core_binary_dir = ini_settings['core.binary_dir']
311
320
312 settings.BINARY_DIR = core_binary_dir
321 settings.BINARY_DIR = core_binary_dir
313
322
314
323
315 def repo_size(ui, repo, **kwargs):
324 def repo_size(ui, repo, **kwargs):
316 extras = _extras_from_ui(ui)
325 extras = _extras_from_ui(ui)
317 return _call_hook('repo_size', extras, HgMessageWriter(ui))
326 return _call_hook('repo_size', extras, HgMessageWriter(ui))
318
327
319
328
320 def pre_pull(ui, repo, **kwargs):
329 def pre_pull(ui, repo, **kwargs):
321 extras = _extras_from_ui(ui)
330 extras = _extras_from_ui(ui)
322 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
331 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
323
332
324
333
325 def pre_pull_ssh(ui, repo, **kwargs):
334 def pre_pull_ssh(ui, repo, **kwargs):
326 extras = _extras_from_ui(ui)
335 extras = _extras_from_ui(ui)
327 if extras and extras.get('SSH'):
336 if extras and extras.get('SSH'):
328 return pre_pull(ui, repo, **kwargs)
337 return pre_pull(ui, repo, **kwargs)
329 return 0
338 return 0
330
339
331
340
332 def post_pull(ui, repo, **kwargs):
341 def post_pull(ui, repo, **kwargs):
333 extras = _extras_from_ui(ui)
342 extras = _extras_from_ui(ui)
334 return _call_hook('post_pull', extras, HgMessageWriter(ui))
343 return _call_hook('post_pull', extras, HgMessageWriter(ui))
335
344
336
345
337 def post_pull_ssh(ui, repo, **kwargs):
346 def post_pull_ssh(ui, repo, **kwargs):
338 extras = _extras_from_ui(ui)
347 extras = _extras_from_ui(ui)
339 if extras and extras.get('SSH'):
348 if extras and extras.get('SSH'):
340 return post_pull(ui, repo, **kwargs)
349 return post_pull(ui, repo, **kwargs)
341 return 0
350 return 0
342
351
343
352
344 def pre_push(ui, repo, node=None, **kwargs):
353 def pre_push(ui, repo, node=None, **kwargs):
345 """
354 """
346 Mercurial pre_push hook
355 Mercurial pre_push hook
347 """
356 """
348 extras = _extras_from_ui(ui)
357 extras = _extras_from_ui(ui)
349 detect_force_push = extras.get('detect_force_push')
358 detect_force_push = extras.get('detect_force_push')
350
359
351 rev_data = []
360 rev_data = []
352 hook_type: str = safe_str(kwargs.get('hooktype'))
361 hook_type: str = safe_str(kwargs.get('hooktype'))
353
362
354 if node and hook_type == 'pretxnchangegroup':
363 if node and hook_type == 'pretxnchangegroup':
355 branches = collections.defaultdict(list)
364 branches = collections.defaultdict(list)
356 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
365 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
357 for commit_id, branch in commits:
366 for commit_id, branch in commits:
358 branches[branch].append(commit_id)
367 branches[branch].append(commit_id)
359
368
360 for branch, commits in branches.items():
369 for branch, commits in branches.items():
361 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
370 old_rev = ascii_str(kwargs.get('node_last')) or commits[0]
362 rev_data.append({
371 rev_data.append({
363 'total_commits': len(commits),
372 'total_commits': len(commits),
364 'old_rev': old_rev,
373 'old_rev': old_rev,
365 'new_rev': commits[-1],
374 'new_rev': commits[-1],
366 'ref': '',
375 'ref': '',
367 'type': 'branch',
376 'type': 'branch',
368 'name': branch,
377 'name': branch,
369 })
378 })
370
379
371 for push_ref in rev_data:
380 for push_ref in rev_data:
372 push_ref['multiple_heads'] = _heads
381 push_ref['multiple_heads'] = _heads
373
382
374 repo_path = os.path.join(
383 repo_path = os.path.join(
375 extras.get('repo_store', ''), extras.get('repository', ''))
384 extras.get('repo_store', ''), extras.get('repository', ''))
376 push_ref['hg_env'] = _get_hg_env(
385 push_ref['hg_env'] = _get_hg_env(
377 old_rev=push_ref['old_rev'],
386 old_rev=push_ref['old_rev'],
378 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
387 new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')),
379 repo_path=repo_path)
388 repo_path=repo_path)
380
389
381 extras['hook_type'] = hook_type or 'pre_push'
390 extras['hook_type'] = hook_type or 'pre_push'
382 extras['commit_ids'] = rev_data
391 extras['commit_ids'] = rev_data
383
392
384 return _call_hook('pre_push', extras, HgMessageWriter(ui))
393 return _call_hook('pre_push', extras, HgMessageWriter(ui))
385
394
386
395
387 def pre_push_ssh(ui, repo, node=None, **kwargs):
396 def pre_push_ssh(ui, repo, node=None, **kwargs):
388 extras = _extras_from_ui(ui)
397 extras = _extras_from_ui(ui)
389 if extras.get('SSH'):
398 if extras.get('SSH'):
390 return pre_push(ui, repo, node, **kwargs)
399 return pre_push(ui, repo, node, **kwargs)
391
400
392 return 0
401 return 0
393
402
394
403
395 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
404 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
396 """
405 """
397 Mercurial pre_push hook for SSH
406 Mercurial pre_push hook for SSH
398 """
407 """
399 extras = _extras_from_ui(ui)
408 extras = _extras_from_ui(ui)
400 if extras.get('SSH'):
409 if extras.get('SSH'):
401 permission = extras['SSH_PERMISSIONS']
410 permission = extras['SSH_PERMISSIONS']
402
411
403 if 'repository.write' == permission or 'repository.admin' == permission:
412 if 'repository.write' == permission or 'repository.admin' == permission:
404 return 0
413 return 0
405
414
406 # non-zero ret code
415 # non-zero ret code
407 return 1
416 return 1
408
417
409 return 0
418 return 0
410
419
411
420
412 def post_push(ui, repo, node, **kwargs):
421 def post_push(ui, repo, node, **kwargs):
413 """
422 """
414 Mercurial post_push hook
423 Mercurial post_push hook
415 """
424 """
416 extras = _extras_from_ui(ui)
425 extras = _extras_from_ui(ui)
417
426
418 commit_ids = []
427 commit_ids = []
419 branches = []
428 branches = []
420 bookmarks = []
429 bookmarks = []
421 tags = []
430 tags = []
422 hook_type: str = safe_str(kwargs.get('hooktype'))
431 hook_type: str = safe_str(kwargs.get('hooktype'))
423
432
424 commits, _heads = _rev_range_hash(repo, node)
433 commits, _heads = _rev_range_hash(repo, node)
425 for commit_id, branch in commits:
434 for commit_id, branch in commits:
426 commit_ids.append(commit_id)
435 commit_ids.append(commit_id)
427 if branch not in branches:
436 if branch not in branches:
428 branches.append(branch)
437 branches.append(branch)
429
438
430 if hasattr(ui, '_rc_pushkey_bookmarks'):
439 if hasattr(ui, '_rc_pushkey_bookmarks'):
431 bookmarks = ui._rc_pushkey_bookmarks
440 bookmarks = ui._rc_pushkey_bookmarks
432
441
433 extras['hook_type'] = hook_type or 'post_push'
442 extras['hook_type'] = hook_type or 'post_push'
434 extras['commit_ids'] = commit_ids
443 extras['commit_ids'] = commit_ids
435
444
436 extras['new_refs'] = {
445 extras['new_refs'] = {
437 'branches': branches,
446 'branches': branches,
438 'bookmarks': bookmarks,
447 'bookmarks': bookmarks,
439 'tags': tags
448 'tags': tags
440 }
449 }
441
450
442 return _call_hook('post_push', extras, HgMessageWriter(ui))
451 return _call_hook('post_push', extras, HgMessageWriter(ui))
443
452
444
453
445 def post_push_ssh(ui, repo, node, **kwargs):
454 def post_push_ssh(ui, repo, node, **kwargs):
446 """
455 """
447 Mercurial post_push hook for SSH
456 Mercurial post_push hook for SSH
448 """
457 """
449 if _extras_from_ui(ui).get('SSH'):
458 if _extras_from_ui(ui).get('SSH'):
450 return post_push(ui, repo, node, **kwargs)
459 return post_push(ui, repo, node, **kwargs)
451 return 0
460 return 0
452
461
453
462
454 def key_push(ui, repo, **kwargs):
463 def key_push(ui, repo, **kwargs):
455 from vcsserver.hgcompat import get_ctx
464 from vcsserver.hgcompat import get_ctx
456
465
457 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
466 if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks':
458 # store new bookmarks in our UI object propagated later to post_push
467 # store new bookmarks in our UI object propagated later to post_push
459 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
468 ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks()
460 return
469 return
461
470
462
471
463 # backward compat
472 # backward compat
464 log_pull_action = post_pull
473 log_pull_action = post_pull
465
474
466 # backward compat
475 # backward compat
467 log_push_action = post_push
476 log_push_action = post_push
468
477
469
478
470 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
479 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
471 """
480 """
472 Old hook name: keep here for backward compatibility.
481 Old hook name: keep here for backward compatibility.
473
482
474 This is only required when the installed git hooks are not upgraded.
483 This is only required when the installed git hooks are not upgraded.
475 """
484 """
476 pass
485 pass
477
486
478
487
479 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
488 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
480 """
489 """
481 Old hook name: keep here for backward compatibility.
490 Old hook name: keep here for backward compatibility.
482
491
483 This is only required when the installed git hooks are not upgraded.
492 This is only required when the installed git hooks are not upgraded.
484 """
493 """
485 pass
494 pass
486
495
487
496
488 @dataclasses.dataclass
497 @dataclasses.dataclass
489 class HookResponse:
498 class HookResponse:
490 status: int
499 status: int
491 output: str
500 output: str
492
501
493
502
494 def git_pre_pull(extras) -> HookResponse:
503 def git_pre_pull(extras) -> HookResponse:
495 """
504 """
496 Pre pull hook.
505 Pre pull hook.
497
506
498 :param extras: dictionary containing the keys defined in simplevcs
507 :param extras: dictionary containing the keys defined in simplevcs
499 :type extras: dict
508 :type extras: dict
500
509
501 :return: status code of the hook. 0 for success.
510 :return: status code of the hook. 0 for success.
502 :rtype: int
511 :rtype: int
503 """
512 """
504
513
505 if 'pull' not in extras['hooks']:
514 if 'pull' not in extras['hooks']:
506 return HookResponse(0, '')
515 return HookResponse(0, '')
507
516
508 stdout = io.StringIO()
517 stdout = io.StringIO()
509 try:
518 try:
510 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
519 status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
511
520
512 except Exception as error:
521 except Exception as error:
513 log.exception('Failed to call pre_pull hook')
522 log.exception('Failed to call pre_pull hook')
514 status_code = 128
523 status_code = 128
515 stdout.write(f'ERROR: {error}\n')
524 stdout.write(f'ERROR: {error}\n')
516
525
517 return HookResponse(status_code, stdout.getvalue())
526 return HookResponse(status_code, stdout.getvalue())
518
527
519
528
520 def git_post_pull(extras) -> HookResponse:
529 def git_post_pull(extras) -> HookResponse:
521 """
530 """
522 Post pull hook.
531 Post pull hook.
523
532
524 :param extras: dictionary containing the keys defined in simplevcs
533 :param extras: dictionary containing the keys defined in simplevcs
525 :type extras: dict
534 :type extras: dict
526
535
527 :return: status code of the hook. 0 for success.
536 :return: status code of the hook. 0 for success.
528 :rtype: int
537 :rtype: int
529 """
538 """
530 if 'pull' not in extras['hooks']:
539 if 'pull' not in extras['hooks']:
531 return HookResponse(0, '')
540 return HookResponse(0, '')
532
541
533 stdout = io.StringIO()
542 stdout = io.StringIO()
534 try:
543 try:
535 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
544 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
536 except Exception as error:
545 except Exception as error:
537 status = 128
546 status = 128
538 stdout.write(f'ERROR: {error}\n')
547 stdout.write(f'ERROR: {error}\n')
539
548
540 return HookResponse(status, stdout.getvalue())
549 return HookResponse(status, stdout.getvalue())
541
550
542
551
543 def _parse_git_ref_lines(revision_lines):
552 def _parse_git_ref_lines(revision_lines):
544 rev_data = []
553 rev_data = []
545 for revision_line in revision_lines or []:
554 for revision_line in revision_lines or []:
546 old_rev, new_rev, ref = revision_line.strip().split(' ')
555 old_rev, new_rev, ref = revision_line.strip().split(' ')
547 ref_data = ref.split('/', 2)
556 ref_data = ref.split('/', 2)
548 if ref_data[1] in ('tags', 'heads'):
557 if ref_data[1] in ('tags', 'heads'):
549 rev_data.append({
558 rev_data.append({
550 # NOTE(marcink):
559 # NOTE(marcink):
551 # we're unable to tell total_commits for git at this point
560 # we're unable to tell total_commits for git at this point
552 # but we set the variable for consistency with GIT
561 # but we set the variable for consistency with GIT
553 'total_commits': -1,
562 'total_commits': -1,
554 'old_rev': old_rev,
563 'old_rev': old_rev,
555 'new_rev': new_rev,
564 'new_rev': new_rev,
556 'ref': ref,
565 'ref': ref,
557 'type': ref_data[1],
566 'type': ref_data[1],
558 'name': ref_data[2],
567 'name': ref_data[2],
559 })
568 })
560 return rev_data
569 return rev_data
561
570
562
571
563 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
572 def git_pre_receive(unused_repo_path, revision_lines, env) -> int:
564 """
573 """
565 Pre push hook.
574 Pre push hook.
566
575
567 :return: status code of the hook. 0 for success.
576 :return: status code of the hook. 0 for success.
568 """
577 """
569 extras = json.loads(env['RC_SCM_DATA'])
578 extras = json.loads(env['RC_SCM_DATA'])
570 rev_data = _parse_git_ref_lines(revision_lines)
579 rev_data = _parse_git_ref_lines(revision_lines)
571 if 'push' not in extras['hooks']:
580 if 'push' not in extras['hooks']:
572 return 0
581 return 0
573 _fix_hooks_executables()
582 _fix_hooks_executables(env.get('RC_INI_FILE'))
574
583
575 empty_commit_id = '0' * 40
584 empty_commit_id = '0' * 40
576
585
577 detect_force_push = extras.get('detect_force_push')
586 detect_force_push = extras.get('detect_force_push')
578
587
579 for push_ref in rev_data:
588 for push_ref in rev_data:
580 # store our git-env which holds the temp store
589 # store our git-env which holds the temp store
581 push_ref['git_env'] = _get_git_env()
590 push_ref['git_env'] = _get_git_env()
582 push_ref['pruned_sha'] = ''
591 push_ref['pruned_sha'] = ''
583 if not detect_force_push:
592 if not detect_force_push:
584 # don't check for forced-push when we don't need to
593 # don't check for forced-push when we don't need to
585 continue
594 continue
586
595
587 type_ = push_ref['type']
596 type_ = push_ref['type']
588 new_branch = push_ref['old_rev'] == empty_commit_id
597 new_branch = push_ref['old_rev'] == empty_commit_id
589 delete_branch = push_ref['new_rev'] == empty_commit_id
598 delete_branch = push_ref['new_rev'] == empty_commit_id
590 if type_ == 'heads' and not (new_branch or delete_branch):
599 if type_ == 'heads' and not (new_branch or delete_branch):
591 old_rev = push_ref['old_rev']
600 old_rev = push_ref['old_rev']
592 new_rev = push_ref['new_rev']
601 new_rev = push_ref['new_rev']
593 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
602 cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}']
594 stdout, stderr = subprocessio.run_command(
603 stdout, stderr = subprocessio.run_command(
595 cmd, env=os.environ.copy())
604 cmd, env=os.environ.copy())
596 # means we're having some non-reachable objects, this forced push was used
605 # means we're having some non-reachable objects, this forced push was used
597 if stdout:
606 if stdout:
598 push_ref['pruned_sha'] = stdout.splitlines()
607 push_ref['pruned_sha'] = stdout.splitlines()
599
608
600 extras['hook_type'] = 'pre_receive'
609 extras['hook_type'] = 'pre_receive'
601 extras['commit_ids'] = rev_data
610 extras['commit_ids'] = rev_data
602
611
603 stdout = sys.stdout
612 stdout = sys.stdout
604 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
613 status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout))
605
614
606 return status_code
615 return status_code
607
616
608
617
609 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
618 def git_post_receive(unused_repo_path, revision_lines, env) -> int:
610 """
619 """
611 Post push hook.
620 Post push hook.
612
621
613 :return: status code of the hook. 0 for success.
622 :return: status code of the hook. 0 for success.
614 """
623 """
615 extras = json.loads(env['RC_SCM_DATA'])
624 extras = json.loads(env['RC_SCM_DATA'])
616 if 'push' not in extras['hooks']:
625 if 'push' not in extras['hooks']:
617 return 0
626 return 0
618
627
619 _fix_hooks_executables()
628 _fix_hooks_executables(env.get('RC_INI_FILE'))
620
629
621 rev_data = _parse_git_ref_lines(revision_lines)
630 rev_data = _parse_git_ref_lines(revision_lines)
622
631
623 git_revs = []
632 git_revs = []
624
633
625 # N.B.(skreft): it is ok to just call git, as git before calling a
634 # N.B.(skreft): it is ok to just call git, as git before calling a
626 # subcommand sets the PATH environment variable so that it point to the
635 # subcommand sets the PATH environment variable so that it point to the
627 # correct version of the git executable.
636 # correct version of the git executable.
628 empty_commit_id = '0' * 40
637 empty_commit_id = '0' * 40
629 branches = []
638 branches = []
630 tags = []
639 tags = []
631 for push_ref in rev_data:
640 for push_ref in rev_data:
632 type_ = push_ref['type']
641 type_ = push_ref['type']
633
642
634 if type_ == 'heads':
643 if type_ == 'heads':
635 # starting new branch case
644 # starting new branch case
636 if push_ref['old_rev'] == empty_commit_id:
645 if push_ref['old_rev'] == empty_commit_id:
637 push_ref_name = push_ref['name']
646 push_ref_name = push_ref['name']
638
647
639 if push_ref_name not in branches:
648 if push_ref_name not in branches:
640 branches.append(push_ref_name)
649 branches.append(push_ref_name)
641
650
642 need_head_set = ''
651 need_head_set = ''
643 with Repository(os.getcwd()) as repo:
652 with Repository(os.getcwd()) as repo:
644 try:
653 try:
645 repo.head
654 repo.head
646 except pygit2.GitError:
655 except pygit2.GitError:
647 need_head_set = f'refs/heads/{push_ref_name}'
656 need_head_set = f'refs/heads/{push_ref_name}'
648
657
649 if need_head_set:
658 if need_head_set:
650 repo.set_head(need_head_set)
659 repo.set_head(need_head_set)
651 print(f"Setting default branch to {push_ref_name}")
660 print(f"Setting default branch to {push_ref_name}")
652
661
653 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
662 cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*']
654 stdout, stderr = subprocessio.run_command(
663 stdout, stderr = subprocessio.run_command(
655 cmd, env=os.environ.copy())
664 cmd, env=os.environ.copy())
656 heads = safe_str(stdout)
665 heads = safe_str(stdout)
657 heads = heads.replace(push_ref['ref'], '')
666 heads = heads.replace(push_ref['ref'], '')
658 heads = ' '.join(head for head
667 heads = ' '.join(head for head
659 in heads.splitlines() if head) or '.'
668 in heads.splitlines() if head) or '.'
660 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
669 cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse',
661 '--pretty=format:%H', '--', push_ref['new_rev'],
670 '--pretty=format:%H', '--', push_ref['new_rev'],
662 '--not', heads]
671 '--not', heads]
663 stdout, stderr = subprocessio.run_command(
672 stdout, stderr = subprocessio.run_command(
664 cmd, env=os.environ.copy())
673 cmd, env=os.environ.copy())
665 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
674 git_revs.extend(list(map(ascii_str, stdout.splitlines())))
666
675
667 # delete branch case
676 # delete branch case
668 elif push_ref['new_rev'] == empty_commit_id:
677 elif push_ref['new_rev'] == empty_commit_id:
669 git_revs.append(f'delete_branch=>{push_ref["name"]}')
678 git_revs.append(f'delete_branch=>{push_ref["name"]}')
670 else:
679 else:
671 if push_ref['name'] not in branches:
680 if push_ref['name'] not in branches:
672 branches.append(push_ref['name'])
681 branches.append(push_ref['name'])
673
682
674 cmd = [settings.GIT_EXECUTABLE(), 'log',
683 cmd = [settings.GIT_EXECUTABLE(), 'log',
675 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
684 f'{push_ref["old_rev"]}..{push_ref["new_rev"]}',
676 '--reverse', '--pretty=format:%H']
685 '--reverse', '--pretty=format:%H']
677 stdout, stderr = subprocessio.run_command(
686 stdout, stderr = subprocessio.run_command(
678 cmd, env=os.environ.copy())
687 cmd, env=os.environ.copy())
679 # we get bytes from stdout, we need str to be consistent
688 # we get bytes from stdout, we need str to be consistent
680 log_revs = list(map(ascii_str, stdout.splitlines()))
689 log_revs = list(map(ascii_str, stdout.splitlines()))
681 git_revs.extend(log_revs)
690 git_revs.extend(log_revs)
682
691
683 # Pure pygit2 impl. but still 2-3x slower :/
692 # Pure pygit2 impl. but still 2-3x slower :/
684 # results = []
693 # results = []
685 #
694 #
686 # with Repository(os.getcwd()) as repo:
695 # with Repository(os.getcwd()) as repo:
687 # repo_new_rev = repo[push_ref['new_rev']]
696 # repo_new_rev = repo[push_ref['new_rev']]
688 # repo_old_rev = repo[push_ref['old_rev']]
697 # repo_old_rev = repo[push_ref['old_rev']]
689 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
698 # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL)
690 #
699 #
691 # for commit in walker:
700 # for commit in walker:
692 # if commit.id == repo_old_rev.id:
701 # if commit.id == repo_old_rev.id:
693 # break
702 # break
694 # results.append(commit.id.hex)
703 # results.append(commit.id.hex)
695 # # reverse the order, can't use GIT_SORT_REVERSE
704 # # reverse the order, can't use GIT_SORT_REVERSE
696 # log_revs = results[::-1]
705 # log_revs = results[::-1]
697
706
698 elif type_ == 'tags':
707 elif type_ == 'tags':
699 if push_ref['name'] not in tags:
708 if push_ref['name'] not in tags:
700 tags.append(push_ref['name'])
709 tags.append(push_ref['name'])
701 git_revs.append(f'tag=>{push_ref["name"]}')
710 git_revs.append(f'tag=>{push_ref["name"]}')
702
711
703 extras['hook_type'] = 'post_receive'
712 extras['hook_type'] = 'post_receive'
704 extras['commit_ids'] = git_revs
713 extras['commit_ids'] = git_revs
705 extras['new_refs'] = {
714 extras['new_refs'] = {
706 'branches': branches,
715 'branches': branches,
707 'bookmarks': [],
716 'bookmarks': [],
708 'tags': tags,
717 'tags': tags,
709 }
718 }
710
719
711 stdout = sys.stdout
720 stdout = sys.stdout
712
721
713 if 'repo_size' in extras['hooks']:
722 if 'repo_size' in extras['hooks']:
714 try:
723 try:
715 _call_hook('repo_size', extras, GitMessageWriter(stdout))
724 _call_hook('repo_size', extras, GitMessageWriter(stdout))
716 except Exception:
725 except Exception:
717 pass
726 pass
718
727
719 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
728 status_code = _call_hook('post_push', extras, GitMessageWriter(stdout))
720 return status_code
729 return status_code
721
730
722
731
723 def _get_extras_from_txn_id(path, txn_id):
732 def get_extras_from_txn_id(repo_path, txn_id):
724 _fix_hooks_executables()
733 extras = get_txn_id_from_store(repo_path, txn_id)
725
726 extras = {}
727 try:
728 cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget',
729 '-t', txn_id,
730 '--revprop', path, 'rc-scm-extras']
731 stdout, stderr = subprocessio.run_command(
732 cmd, env=os.environ.copy())
733 extras = json.loads(base64.urlsafe_b64decode(stdout))
734 except Exception:
735 log.exception('Failed to extract extras info from txn_id')
736
737 return extras
738
739
740 def _get_extras_from_commit_id(commit_id, path):
741 _fix_hooks_executables()
742
743 extras = {}
744 try:
745 cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget',
746 '-r', commit_id,
747 '--revprop', path, 'rc-scm-extras']
748 stdout, stderr = subprocessio.run_command(
749 cmd, env=os.environ.copy())
750 extras = json.loads(base64.urlsafe_b64decode(stdout))
751 except Exception:
752 log.exception('Failed to extract extras info from commit_id')
753
754 return extras
734 return extras
755
735
756
736
757 def svn_pre_commit(repo_path, commit_data, env):
737 def svn_pre_commit(repo_path, commit_data, env):
758
738
759 path, txn_id = commit_data
739 path, txn_id = commit_data
760 branches = []
740 branches = []
761 tags = []
741 tags = []
762
742
763 if env.get('RC_SCM_DATA'):
743 if env.get('RC_SCM_DATA'):
764 extras = json.loads(env['RC_SCM_DATA'])
744 extras = json.loads(env['RC_SCM_DATA'])
765 else:
745 else:
746 ini_path = env.get('RC_INI_FILE')
747 if ini_path:
748 _get_ini_settings(ini_path)
766 # fallback method to read from TXN-ID stored data
749 # fallback method to read from TXN-ID stored data
767 extras = _get_extras_from_txn_id(path, txn_id)
750 extras = get_extras_from_txn_id(path, txn_id)
768
751
769 if not extras:
752 if not extras:
770 #TODO: temporary fix until svn txn-id changes are merged
753 raise ValueError('SVN-PRE-COMMIT: Failed to extract context data in called extras for hook execution')
754
755 if extras.get('rc_internal_commit'):
756 # special marker for internal commit, we don't call hooks client
771 return 0
757 return 0
772 raise ValueError('Failed to extract context data called extras for hook execution')
773
758
774 extras['hook_type'] = 'pre_commit'
759 extras['hook_type'] = 'pre_commit'
775 extras['commit_ids'] = [txn_id]
760 extras['commit_ids'] = [txn_id]
776 extras['txn_id'] = txn_id
761 extras['txn_id'] = txn_id
777 extras['new_refs'] = {
762 extras['new_refs'] = {
778 'total_commits': 1,
763 'total_commits': 1,
779 'branches': branches,
764 'branches': branches,
780 'bookmarks': [],
765 'bookmarks': [],
781 'tags': tags,
766 'tags': tags,
782 }
767 }
783
768
784 return _call_hook('pre_push', extras, SvnMessageWriter())
769 return _call_hook('pre_push', extras, SvnMessageWriter())
785
770
786
771
787 def svn_post_commit(repo_path, commit_data, env):
772 def svn_post_commit(repo_path, commit_data, env):
788 """
773 """
789 commit_data is path, rev, txn_id
774 commit_data is path, rev, txn_id
790 """
775 """
791
776
792 if len(commit_data) == 3:
777 if len(commit_data) == 3:
793 path, commit_id, txn_id = commit_data
778 path, commit_id, txn_id = commit_data
794 elif len(commit_data) == 2:
779 elif len(commit_data) == 2:
795 log.error('Failed to extract txn_id from commit_data using legacy method. '
780 log.error('Failed to extract txn_id from commit_data using legacy method. '
796 'Some functionality might be limited')
781 'Some functionality might be limited')
797 path, commit_id = commit_data
782 path, commit_id = commit_data
798 txn_id = None
783 txn_id = None
799 else:
784 else:
800 return 0
785 return 0
801
786
802 branches = []
787 branches = []
803 tags = []
788 tags = []
804
789
805 if env.get('RC_SCM_DATA'):
790 if env.get('RC_SCM_DATA'):
806 extras = json.loads(env['RC_SCM_DATA'])
791 extras = json.loads(env['RC_SCM_DATA'])
807 else:
792 else:
793 ini_path = env.get('RC_INI_FILE')
794 if ini_path:
795 _get_ini_settings(ini_path)
808 # fallback method to read from TXN-ID stored data
796 # fallback method to read from TXN-ID stored data
809 extras = _get_extras_from_commit_id(commit_id, path)
797 extras = get_extras_from_txn_id(path, txn_id)
810
798
811 if not extras:
799 if not extras and txn_id:
812 #TODO: temporary fix until svn txn-id changes are merged
800 raise ValueError('SVN-POST-COMMIT: Failed to extract context data in called extras for hook execution')
801
802 if extras.get('rc_internal_commit'):
803 # special marker for internal commit, we don't call hooks client
813 return 0
804 return 0
814 raise ValueError('Failed to extract context data called extras for hook execution')
815
805
816 extras['hook_type'] = 'post_commit'
806 extras['hook_type'] = 'post_commit'
817 extras['commit_ids'] = [commit_id]
807 extras['commit_ids'] = [commit_id]
818 extras['txn_id'] = txn_id
808 extras['txn_id'] = txn_id
819 extras['new_refs'] = {
809 extras['new_refs'] = {
820 'branches': branches,
810 'branches': branches,
821 'bookmarks': [],
811 'bookmarks': [],
822 'tags': tags,
812 'tags': tags,
823 'total_commits': 1,
813 'total_commits': 1,
824 }
814 }
825
815
826 if 'repo_size' in extras['hooks']:
816 if 'repo_size' in extras['hooks']:
827 try:
817 try:
828 _call_hook('repo_size', extras, SvnMessageWriter())
818 _call_hook('repo_size', extras, SvnMessageWriter())
829 except Exception:
819 except Exception:
830 pass
820 pass
831
821
832 return _call_hook('post_push', extras, SvnMessageWriter())
822 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,774 +1,763 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import platform
20 import platform
21 import sys
21 import sys
22 import locale
22 import locale
23 import logging
23 import logging
24 import uuid
24 import uuid
25 import time
25 import time
26 import wsgiref.util
26 import wsgiref.util
27 import tempfile
27 import tempfile
28 import psutil
28 import psutil
29
29
30 from itertools import chain
30 from itertools import chain
31
31
32 import msgpack
32 import msgpack
33 import configparser
33 import configparser
34
34
35 from pyramid.config import Configurator
35 from pyramid.config import Configurator
36 from pyramid.wsgi import wsgiapp
36 from pyramid.wsgi import wsgiapp
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
39 from vcsserver.base import BytesEnvelope, BinaryEnvelope
40 from vcsserver.lib.ext_json import json
40
41 from vcsserver.config.settings_maker import SettingsMaker
41 from vcsserver.config.settings_maker import SettingsMaker
42 from vcsserver.lib.str_utils import safe_int
42
43 from vcsserver.lib.statsd_client import StatsdClient
44 from vcsserver.tweens.request_wrapper import get_headers_call_context
43 from vcsserver.tweens.request_wrapper import get_headers_call_context
45
44
46 import vcsserver
45 from vcsserver import remote_wsgi, scm_app, hgpatches
47 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
46 from vcsserver.server import VcsServer
48 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
47 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
49 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
48 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
50 from vcsserver.echo_stub.echo_app import EchoApp
49 from vcsserver.echo_stub.echo_app import EchoApp
51 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
50 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
52 from vcsserver.lib.exc_tracking import store_exception, format_exc
51 from vcsserver.lib.exc_tracking import store_exception, format_exc
53 from vcsserver.server import VcsServer
52 from vcsserver.lib.str_utils import safe_int
53 from vcsserver.lib.statsd_client import StatsdClient
54 from vcsserver.lib.ext_json import json
55 from vcsserver.lib.config_utils import configure_and_store_settings
56
54
57
55 strict_vcs = True
58 strict_vcs = True
56
59
57 git_import_err = None
60 git_import_err = None
58 try:
61 try:
59 from vcsserver.remote.git_remote import GitFactory, GitRemote
62 from vcsserver.remote.git_remote import GitFactory, GitRemote
60 except ImportError as e:
63 except ImportError as e:
61 GitFactory = None
64 GitFactory = None
62 GitRemote = None
65 GitRemote = None
63 git_import_err = e
66 git_import_err = e
64 if strict_vcs:
67 if strict_vcs:
65 raise
68 raise
66
69
67
70
68 hg_import_err = None
71 hg_import_err = None
69 try:
72 try:
70 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
73 from vcsserver.remote.hg_remote import MercurialFactory, HgRemote
71 except ImportError as e:
74 except ImportError as e:
72 MercurialFactory = None
75 MercurialFactory = None
73 HgRemote = None
76 HgRemote = None
74 hg_import_err = e
77 hg_import_err = e
75 if strict_vcs:
78 if strict_vcs:
76 raise
79 raise
77
80
78
81
79 svn_import_err = None
82 svn_import_err = None
80 try:
83 try:
81 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
84 from vcsserver.remote.svn_remote import SubversionFactory, SvnRemote
82 except ImportError as e:
85 except ImportError as e:
83 SubversionFactory = None
86 SubversionFactory = None
84 SvnRemote = None
87 SvnRemote = None
85 svn_import_err = e
88 svn_import_err = e
86 if strict_vcs:
89 if strict_vcs:
87 raise
90 raise
88
91
89 log = logging.getLogger(__name__)
92 log = logging.getLogger(__name__)
90
93
91 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
94 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
92 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
95 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
93
96
94 try:
97 try:
95 locale.setlocale(locale.LC_ALL, '')
98 locale.setlocale(locale.LC_ALL, '')
96 except locale.Error as e:
99 except locale.Error as e:
97 log.error(
100 log.error('LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
98 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
99 os.environ['LC_ALL'] = 'C'
101 os.environ['LC_ALL'] = 'C'
100
102
101
103
102 def _is_request_chunked(environ):
104 def _is_request_chunked(environ):
103 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
105 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
104 return stream
106 return stream
105
107
106
108
107 def log_max_fd():
109 def log_max_fd():
108 try:
110 try:
109 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
111 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
110 log.info('Max file descriptors value: %s', maxfd)
112 log.info('Max file descriptors value: %s', maxfd)
111 except Exception:
113 except Exception:
112 pass
114 pass
113
115
114
116
115 class VCS:
117 class VCS:
116 def __init__(self, locale_conf=None, cache_config=None):
118 def __init__(self, locale_conf=None, cache_config=None):
117 self.locale = locale_conf
119 self.locale = locale_conf
118 self.cache_config = cache_config
120 self.cache_config = cache_config
119 self._configure_locale()
121 self._configure_locale()
120
122
121 log_max_fd()
123 log_max_fd()
122
124
123 if GitFactory and GitRemote:
125 if GitFactory and GitRemote:
124 git_factory = GitFactory()
126 git_factory = GitFactory()
125 self._git_remote = GitRemote(git_factory)
127 self._git_remote = GitRemote(git_factory)
126 else:
128 else:
127 log.error("Git client import failed: %s", git_import_err)
129 log.error("Git client import failed: %s", git_import_err)
128
130
129 if MercurialFactory and HgRemote:
131 if MercurialFactory and HgRemote:
130 hg_factory = MercurialFactory()
132 hg_factory = MercurialFactory()
131 self._hg_remote = HgRemote(hg_factory)
133 self._hg_remote = HgRemote(hg_factory)
132 else:
134 else:
133 log.error("Mercurial client import failed: %s", hg_import_err)
135 log.error("Mercurial client import failed: %s", hg_import_err)
134
136
135 if SubversionFactory and SvnRemote:
137 if SubversionFactory and SvnRemote:
136 svn_factory = SubversionFactory()
138 svn_factory = SubversionFactory()
137
139
138 # hg factory is used for svn url validation
140 # hg factory is used for svn url validation
139 hg_factory = MercurialFactory()
141 hg_factory = MercurialFactory()
140 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
141 else:
143 else:
142 log.error("Subversion client import failed: %s", svn_import_err)
144 log.error("Subversion client import failed: %s", svn_import_err)
143
145
144 self._vcsserver = VcsServer()
146 self._vcsserver = VcsServer()
145
147
146 def _configure_locale(self):
148 def _configure_locale(self):
147 if self.locale:
149 if self.locale:
148 log.info('Settings locale: `LC_ALL` to %s', self.locale)
150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
149 else:
151 else:
150 log.info('Configuring locale subsystem based on environment variables')
152 log.info('Configuring locale subsystem based on environment variables')
151 try:
153 try:
152 # If self.locale is the empty string, then the locale
154 # If self.locale is the empty string, then the locale
153 # module will use the environment variables. See the
155 # module will use the environment variables. See the
154 # documentation of the package `locale`.
156 # documentation of the package `locale`.
155 locale.setlocale(locale.LC_ALL, self.locale)
157 locale.setlocale(locale.LC_ALL, self.locale)
156
158
157 language_code, encoding = locale.getlocale()
159 language_code, encoding = locale.getlocale()
158 log.info(
160 log.info(
159 'Locale set to language code "%s" with encoding "%s".',
161 'Locale set to language code "%s" with encoding "%s".',
160 language_code, encoding)
162 language_code, encoding)
161 except locale.Error:
163 except locale.Error:
162 log.exception('Cannot set locale, not configuring the locale system')
164 log.exception('Cannot set locale, not configuring the locale system')
163
165
164
166
165 class WsgiProxy:
167 class WsgiProxy:
166 def __init__(self, wsgi):
168 def __init__(self, wsgi):
167 self.wsgi = wsgi
169 self.wsgi = wsgi
168
170
169 def __call__(self, environ, start_response):
171 def __call__(self, environ, start_response):
170 input_data = environ['wsgi.input'].read()
172 input_data = environ['wsgi.input'].read()
171 input_data = msgpack.unpackb(input_data)
173 input_data = msgpack.unpackb(input_data)
172
174
173 error = None
175 error = None
174 try:
176 try:
175 data, status, headers = self.wsgi.handle(
177 data, status, headers = self.wsgi.handle(
176 input_data['environment'], input_data['input_data'],
178 input_data['environment'], input_data['input_data'],
177 *input_data['args'], **input_data['kwargs'])
179 *input_data['args'], **input_data['kwargs'])
178 except Exception as e:
180 except Exception as e:
179 data, status, headers = [], None, None
181 data, status, headers = [], None, None
180 error = {
182 error = {
181 'message': str(e),
183 'message': str(e),
182 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 }
185 }
184
186
185 start_response(200, {})
187 start_response(200, {})
186 return self._iterator(error, status, headers, data)
188 return self._iterator(error, status, headers, data)
187
189
188 def _iterator(self, error, status, headers, data):
190 def _iterator(self, error, status, headers, data):
189 initial_data = [
191 initial_data = [
190 error,
192 error,
191 status,
193 status,
192 headers,
194 headers,
193 ]
195 ]
194
196
195 for d in chain(initial_data, data):
197 for d in chain(initial_data, data):
196 yield msgpack.packb(d)
198 yield msgpack.packb(d)
197
199
198
200
199 def not_found(request):
201 def not_found(request):
200 return {'status': '404 NOT FOUND'}
202 return {'status': '404 NOT FOUND'}
201
203
202
204
203 class VCSViewPredicate:
205 class VCSViewPredicate:
204 def __init__(self, val, config):
206 def __init__(self, val, config):
205 self.remotes = val
207 self.remotes = val
206
208
207 def text(self):
209 def text(self):
208 return f'vcs view method = {list(self.remotes.keys())}'
210 return f'vcs view method = {list(self.remotes.keys())}'
209
211
210 phash = text
212 phash = text
211
213
212 def __call__(self, context, request):
214 def __call__(self, context, request):
213 """
215 """
214 View predicate that returns true if given backend is supported by
216 View predicate that returns true if given backend is supported by
215 defined remotes.
217 defined remotes.
216 """
218 """
217 backend = request.matchdict.get('backend')
219 backend = request.matchdict.get('backend')
218 return backend in self.remotes
220 return backend in self.remotes
219
221
220
222
221 class HTTPApplication:
223 class HTTPApplication:
222 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
223
225
224 remote_wsgi = remote_wsgi
226 remote_wsgi = remote_wsgi
225 _use_echo_app = False
227 _use_echo_app = False
226
228
227 def __init__(self, settings=None, global_config=None):
229 def __init__(self, settings=None, global_config=None):
228
230
229 self.config = Configurator(settings=settings)
231 self.config = Configurator(settings=settings)
230 # Init our statsd at very start
232 # Init our statsd at very start
231 self.config.registry.statsd = StatsdClient.statsd
233 self.config.registry.statsd = StatsdClient.statsd
232 self.config.registry.vcs_call_context = {}
234 self.config.registry.vcs_call_context = {}
233
235
234 self.global_config = global_config
236 self.global_config = global_config
235 self.config.include('vcsserver.lib.rc_cache')
237 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.archive_cache')
238 self.config.include('vcsserver.lib.archive_cache')
237
239
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
240 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
241 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 self._remotes = {
242 self._remotes = {
241 'hg': vcs._hg_remote,
243 'hg': vcs._hg_remote,
242 'git': vcs._git_remote,
244 'git': vcs._git_remote,
243 'svn': vcs._svn_remote,
245 'svn': vcs._svn_remote,
244 'server': vcs._vcsserver,
246 'server': vcs._vcsserver,
245 }
247 }
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
248 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 self._use_echo_app = True
249 self._use_echo_app = True
248 log.warning("Using EchoApp for VCS operations.")
250 log.warning("Using EchoApp for VCS operations.")
249 self.remote_wsgi = remote_wsgi_stub
251 self.remote_wsgi = remote_wsgi_stub
250
252
251 self._configure_settings(global_config, settings)
253 configure_and_store_settings(global_config, settings)
252
254
253 self._configure()
255 self._configure()
254
256
255 def _configure_settings(self, global_config, app_settings):
256 """
257 Configure the settings module.
258 """
259 settings_merged = global_config.copy()
260 settings_merged.update(app_settings)
261
262 binary_dir = app_settings['core.binary_dir']
263
264 settings.BINARY_DIR = binary_dir
265
266 # Store the settings to make them available to other modules.
267 vcsserver.PYRAMID_SETTINGS = settings_merged
268 vcsserver.CONFIG = settings_merged
269
270 def _configure(self):
257 def _configure(self):
271 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
258 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
272
259
273 self.config.add_route('service', '/_service')
260 self.config.add_route('service', '/_service')
274 self.config.add_route('status', '/status')
261 self.config.add_route('status', '/status')
275 self.config.add_route('hg_proxy', '/proxy/hg')
262 self.config.add_route('hg_proxy', '/proxy/hg')
276 self.config.add_route('git_proxy', '/proxy/git')
263 self.config.add_route('git_proxy', '/proxy/git')
277
264
278 # rpc methods
265 # rpc methods
279 self.config.add_route('vcs', '/{backend}')
266 self.config.add_route('vcs', '/{backend}')
280
267
281 # streaming rpc remote methods
268 # streaming rpc remote methods
282 self.config.add_route('vcs_stream', '/{backend}/stream')
269 self.config.add_route('vcs_stream', '/{backend}/stream')
283
270
284 # vcs operations clone/push as streaming
271 # vcs operations clone/push as streaming
285 self.config.add_route('stream_git', '/stream/git/*repo_name')
272 self.config.add_route('stream_git', '/stream/git/*repo_name')
286 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
273 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
287
274
288 self.config.add_view(self.status_view, route_name='status', renderer='json')
275 self.config.add_view(self.status_view, route_name='status', renderer='json')
289 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
276 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
290
277
291 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
278 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
292 self.config.add_view(self.git_proxy(), route_name='git_proxy')
279 self.config.add_view(self.git_proxy(), route_name='git_proxy')
293 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
280 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
294 vcs_view=self._remotes)
281 vcs_view=self._remotes)
295 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
282 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
296 vcs_view=self._remotes)
283 vcs_view=self._remotes)
297
284
298 self.config.add_view(self.hg_stream(), route_name='stream_hg')
285 self.config.add_view(self.hg_stream(), route_name='stream_hg')
299 self.config.add_view(self.git_stream(), route_name='stream_git')
286 self.config.add_view(self.git_stream(), route_name='stream_git')
300
287
301 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
288 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
302
289
303 self.config.add_notfound_view(not_found, renderer='json')
290 self.config.add_notfound_view(not_found, renderer='json')
304
291
305 self.config.add_view(self.handle_vcs_exception, context=Exception)
292 self.config.add_view(self.handle_vcs_exception, context=Exception)
306
293
307 self.config.add_tween(
294 self.config.add_tween(
308 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
295 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
309 )
296 )
310 self.config.add_request_method(
297 self.config.add_request_method(
311 'vcsserver.lib.request_counter.get_request_counter',
298 'vcsserver.lib.request_counter.get_request_counter',
312 'request_count')
299 'request_count')
313
300
314 def wsgi_app(self):
301 def wsgi_app(self):
315 return self.config.make_wsgi_app()
302 return self.config.make_wsgi_app()
316
303
317 def _vcs_view_params(self, request):
304 def _vcs_view_params(self, request):
318 remote = self._remotes[request.matchdict['backend']]
305 remote = self._remotes[request.matchdict['backend']]
319 payload = msgpack.unpackb(request.body, use_list=True)
306 payload = msgpack.unpackb(request.body, use_list=True)
320
307
321 method = payload.get('method')
308 method = payload.get('method')
322 params = payload['params']
309 params = payload['params']
323 wire = params.get('wire')
310 wire = params.get('wire')
324 args = params.get('args')
311 args = params.get('args')
325 kwargs = params.get('kwargs')
312 kwargs = params.get('kwargs')
326 context_uid = None
313 context_uid = None
327
314
328 request.registry.vcs_call_context = {
315 request.registry.vcs_call_context = {
329 'method': method,
316 'method': method,
330 'repo_name': payload.get('_repo_name'),
317 'repo_name': payload.get('_repo_name'),
331 }
318 }
332
319
333 if wire:
320 if wire:
334 try:
321 try:
335 wire['context'] = context_uid = uuid.UUID(wire['context'])
322 wire['context'] = context_uid = uuid.UUID(wire['context'])
336 except KeyError:
323 except KeyError:
337 pass
324 pass
338 args.insert(0, wire)
325 args.insert(0, wire)
339 repo_state_uid = wire.get('repo_state_uid') if wire else None
326 repo_state_uid = wire.get('repo_state_uid') if wire else None
340
327
341 # NOTE(marcink): trading complexity for slight performance
328 # NOTE(marcink): trading complexity for slight performance
342 if log.isEnabledFor(logging.DEBUG):
329 if log.isEnabledFor(logging.DEBUG):
343 # also we SKIP printing out any of those methods args since they maybe excessive
330 # also we SKIP printing out any of those methods args since they maybe excessive
344 just_args_methods = {
331 just_args_methods = {
345 'commitctx': ('content', 'removed', 'updated'),
332 'commitctx': ('content', 'removed', 'updated'),
346 'commit': ('content', 'removed', 'updated')
333 'commit': ('content', 'removed', 'updated')
347 }
334 }
348 if method in just_args_methods:
335 if method in just_args_methods:
349 skip_args = just_args_methods[method]
336 skip_args = just_args_methods[method]
350 call_args = ''
337 call_args = ''
351 call_kwargs = {}
338 call_kwargs = {}
352 for k in kwargs:
339 for k in kwargs:
353 if k in skip_args:
340 if k in skip_args:
354 # replace our skip key with dummy
341 # replace our skip key with dummy
355 call_kwargs[k] = f'RemovedParam({k})'
342 call_kwargs[k] = f'RemovedParam({k})'
356 else:
343 else:
357 call_kwargs[k] = kwargs[k]
344 call_kwargs[k] = kwargs[k]
358 else:
345 else:
359 call_args = args[1:]
346 call_args = args[1:]
360 call_kwargs = kwargs
347 call_kwargs = kwargs
361
348
362 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
349 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
363 method, call_args, call_kwargs, context_uid, repo_state_uid)
350 method, call_args, call_kwargs, context_uid, repo_state_uid)
364
351
365 statsd = request.registry.statsd
352 statsd = request.registry.statsd
366 if statsd:
353 if statsd:
367 statsd.incr(
354 statsd.incr(
368 'vcsserver_method_total', tags=[
355 'vcsserver_method_total', tags=[
369 f"method:{method}",
356 f"method:{method}",
370 ])
357 ])
371 return payload, remote, method, args, kwargs
358 return payload, remote, method, args, kwargs
372
359
373 def vcs_view(self, request):
360 def vcs_view(self, request):
374
361
375 payload, remote, method, args, kwargs = self._vcs_view_params(request)
362 payload, remote, method, args, kwargs = self._vcs_view_params(request)
376 payload_id = payload.get('id')
363 payload_id = payload.get('id')
377
364
378 try:
365 try:
379 resp = getattr(remote, method)(*args, **kwargs)
366 resp = getattr(remote, method)(*args, **kwargs)
380 except Exception as e:
367 except Exception as e:
381 exc_info = list(sys.exc_info())
368 exc_info = list(sys.exc_info())
382 exc_type, exc_value, exc_traceback = exc_info
369 exc_type, exc_value, exc_traceback = exc_info
383
370
384 org_exc = getattr(e, '_org_exc', None)
371 org_exc = getattr(e, '_org_exc', None)
385 org_exc_name = None
372 org_exc_name = None
386 org_exc_tb = ''
373 org_exc_tb = ''
387 if org_exc:
374 if org_exc:
388 org_exc_name = org_exc.__class__.__name__
375 org_exc_name = org_exc.__class__.__name__
389 org_exc_tb = getattr(e, '_org_exc_tb', '')
376 org_exc_tb = getattr(e, '_org_exc_tb', '')
390 # replace our "faked" exception with our org
377 # replace our "faked" exception with our org
391 exc_info[0] = org_exc.__class__
378 exc_info[0] = org_exc.__class__
392 exc_info[1] = org_exc
379 exc_info[1] = org_exc
393
380
394 should_store_exc = True
381 should_store_exc = True
395 if org_exc:
382 if org_exc:
396 def get_exc_fqn(_exc_obj):
383 def get_exc_fqn(_exc_obj):
397 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
384 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
398 return module_name + '.' + org_exc_name
385 return module_name + '.' + org_exc_name
399
386
400 exc_fqn = get_exc_fqn(org_exc)
387 exc_fqn = get_exc_fqn(org_exc)
401
388
402 if exc_fqn in ['mercurial.error.RepoLookupError',
389 if exc_fqn in ['mercurial.error.RepoLookupError',
403 'vcsserver.exceptions.RefNotFoundException']:
390 'vcsserver.exceptions.RefNotFoundException']:
404 should_store_exc = False
391 should_store_exc = False
405
392
406 if should_store_exc:
393 if should_store_exc:
407 store_exception(id(exc_info), exc_info, request_path=request.path)
394 store_exception(id(exc_info), exc_info, request_path=request.path)
408
395
409 tb_info = format_exc(exc_info)
396 tb_info = format_exc(exc_info)
410
397
411 type_ = e.__class__.__name__
398 type_ = e.__class__.__name__
412 if type_ not in self.ALLOWED_EXCEPTIONS:
399 if type_ not in self.ALLOWED_EXCEPTIONS:
413 type_ = None
400 type_ = None
414
401
415 resp = {
402 resp = {
416 'id': payload_id,
403 'id': payload_id,
417 'error': {
404 'error': {
418 'message': str(e),
405 'message': str(e),
419 'traceback': tb_info,
406 'traceback': tb_info,
420 'org_exc': org_exc_name,
407 'org_exc': org_exc_name,
421 'org_exc_tb': org_exc_tb,
408 'org_exc_tb': org_exc_tb,
422 'type': type_
409 'type': type_
423 }
410 }
424 }
411 }
425
412
426 try:
413 try:
427 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
414 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
428 except AttributeError:
415 except AttributeError:
429 pass
416 pass
430 else:
417 else:
431 resp = {
418 resp = {
432 'id': payload_id,
419 'id': payload_id,
433 'result': resp
420 'result': resp
434 }
421 }
435 log.debug('Serving data for method %s', method)
422 log.debug('Serving data for method %s', method)
436 return resp
423 return resp
437
424
438 def vcs_stream_view(self, request):
425 def vcs_stream_view(self, request):
439 payload, remote, method, args, kwargs = self._vcs_view_params(request)
426 payload, remote, method, args, kwargs = self._vcs_view_params(request)
440 # this method has a stream: marker we remove it here
427 # this method has a stream: marker we remove it here
441 method = method.split('stream:')[-1]
428 method = method.split('stream:')[-1]
442 chunk_size = safe_int(payload.get('chunk_size')) or 4096
429 chunk_size = safe_int(payload.get('chunk_size')) or 4096
443
430
444 resp = getattr(remote, method)(*args, **kwargs)
431 resp = getattr(remote, method)(*args, **kwargs)
445
432
446 def get_chunked_data(method_resp):
433 def get_chunked_data(method_resp):
447 stream = io.BytesIO(method_resp)
434 stream = io.BytesIO(method_resp)
448 while 1:
435 while 1:
449 chunk = stream.read(chunk_size)
436 chunk = stream.read(chunk_size)
450 if not chunk:
437 if not chunk:
451 break
438 break
452 yield chunk
439 yield chunk
453
440
454 response = Response(app_iter=get_chunked_data(resp))
441 response = Response(app_iter=get_chunked_data(resp))
455 response.content_type = 'application/octet-stream'
442 response.content_type = 'application/octet-stream'
456
443
457 return response
444 return response
458
445
459 def status_view(self, request):
446 def status_view(self, request):
460 import vcsserver
447 import vcsserver
461 _platform_id = platform.uname()[1] or 'instance'
448 _platform_id = platform.uname()[1] or 'instance'
462
449
463 return {
450 return {
464 "status": "OK",
451 "status": "OK",
465 "vcsserver_version": vcsserver.get_version(),
452 "vcsserver_version": vcsserver.get_version(),
466 "platform": _platform_id,
453 "platform": _platform_id,
467 "pid": os.getpid(),
454 "pid": os.getpid(),
468 }
455 }
469
456
470 def service_view(self, request):
457 def service_view(self, request):
471 import vcsserver
458 import vcsserver
472
459
473 payload = msgpack.unpackb(request.body, use_list=True)
460 payload = msgpack.unpackb(request.body, use_list=True)
474 server_config, app_config = {}, {}
461 server_config, app_config = {}, {}
475
462
476 try:
463 try:
477 path = self.global_config['__file__']
464 path = self.global_config['__file__']
478 config = configparser.RawConfigParser()
465 config = configparser.RawConfigParser()
479
466
480 config.read(path)
467 config.read(path)
481
468
482 if config.has_section('server:main'):
469 if config.has_section('server:main'):
483 server_config = dict(config.items('server:main'))
470 server_config = dict(config.items('server:main'))
484 if config.has_section('app:main'):
471 if config.has_section('app:main'):
485 app_config = dict(config.items('app:main'))
472 app_config = dict(config.items('app:main'))
486
473
487 except Exception:
474 except Exception:
488 log.exception('Failed to read .ini file for display')
475 log.exception('Failed to read .ini file for display')
489
476
490 environ = list(os.environ.items())
477 environ = list(os.environ.items())
491
478
492 resp = {
479 resp = {
493 'id': payload.get('id'),
480 'id': payload.get('id'),
494 'result': dict(
481 'result': dict(
495 version=vcsserver.get_version(),
482 version=vcsserver.get_version(),
496 config=server_config,
483 config=server_config,
497 app_config=app_config,
484 app_config=app_config,
498 environ=environ,
485 environ=environ,
499 payload=payload,
486 payload=payload,
500 )
487 )
501 }
488 }
502 return resp
489 return resp
503
490
504 def _msgpack_renderer_factory(self, info):
491 def _msgpack_renderer_factory(self, info):
505
492
506 def _render(value, system):
493 def _render(value, system):
507 bin_type = False
494 bin_type = False
508 res = value.get('result')
495 res = value.get('result')
509 if isinstance(res, BytesEnvelope):
496 if isinstance(res, BytesEnvelope):
510 log.debug('Result is wrapped in BytesEnvelope type')
497 log.debug('Result is wrapped in BytesEnvelope type')
511 bin_type = True
498 bin_type = True
512 elif isinstance(res, BinaryEnvelope):
499 elif isinstance(res, BinaryEnvelope):
513 log.debug('Result is wrapped in BinaryEnvelope type')
500 log.debug('Result is wrapped in BinaryEnvelope type')
514 value['result'] = res.val
501 value['result'] = res.val
515 bin_type = True
502 bin_type = True
516
503
517 request = system.get('request')
504 request = system.get('request')
518 if request is not None:
505 if request is not None:
519 response = request.response
506 response = request.response
520 ct = response.content_type
507 ct = response.content_type
521 if ct == response.default_content_type:
508 if ct == response.default_content_type:
522 response.content_type = 'application/x-msgpack'
509 response.content_type = 'application/x-msgpack'
523 if bin_type:
510 if bin_type:
524 response.content_type = 'application/x-msgpack-bin'
511 response.content_type = 'application/x-msgpack-bin'
525
512
526 return msgpack.packb(value, use_bin_type=bin_type)
513 return msgpack.packb(value, use_bin_type=bin_type)
527 return _render
514 return _render
528
515
529 def set_env_from_config(self, environ, config):
516 def set_env_from_config(self, environ, config):
530 dict_conf = {}
517 dict_conf = {}
531 try:
518 try:
532 for elem in config:
519 for elem in config:
533 if elem[0] == 'rhodecode':
520 if elem[0] == 'rhodecode':
534 dict_conf = json.loads(elem[2])
521 dict_conf = json.loads(elem[2])
535 break
522 break
536 except Exception:
523 except Exception:
537 log.exception('Failed to fetch SCM CONFIG')
524 log.exception('Failed to fetch SCM CONFIG')
538 return
525 return
539
526
540 username = dict_conf.get('username')
527 username = dict_conf.get('username')
541 if username:
528 if username:
542 environ['REMOTE_USER'] = username
529 environ['REMOTE_USER'] = username
543 # mercurial specific, some extension api rely on this
530 # mercurial specific, some extension api rely on this
544 environ['HGUSER'] = username
531 environ['HGUSER'] = username
545
532
546 ip = dict_conf.get('ip')
533 ip = dict_conf.get('ip')
547 if ip:
534 if ip:
548 environ['REMOTE_HOST'] = ip
535 environ['REMOTE_HOST'] = ip
549
536
550 if _is_request_chunked(environ):
537 if _is_request_chunked(environ):
551 # set the compatibility flag for webob
538 # set the compatibility flag for webob
552 environ['wsgi.input_terminated'] = True
539 environ['wsgi.input_terminated'] = True
553
540
554 def hg_proxy(self):
541 def hg_proxy(self):
555 @wsgiapp
542 @wsgiapp
556 def _hg_proxy(environ, start_response):
543 def _hg_proxy(environ, start_response):
557 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
544 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
558 return app(environ, start_response)
545 return app(environ, start_response)
559 return _hg_proxy
546 return _hg_proxy
560
547
561 def git_proxy(self):
548 def git_proxy(self):
562 @wsgiapp
549 @wsgiapp
563 def _git_proxy(environ, start_response):
550 def _git_proxy(environ, start_response):
564 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
551 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
565 return app(environ, start_response)
552 return app(environ, start_response)
566 return _git_proxy
553 return _git_proxy
567
554
568 def hg_stream(self):
555 def hg_stream(self):
569 if self._use_echo_app:
556 if self._use_echo_app:
570 @wsgiapp
557 @wsgiapp
571 def _hg_stream(environ, start_response):
558 def _hg_stream(environ, start_response):
572 app = EchoApp('fake_path', 'fake_name', None)
559 app = EchoApp('fake_path', 'fake_name', None)
573 return app(environ, start_response)
560 return app(environ, start_response)
574 return _hg_stream
561 return _hg_stream
575 else:
562 else:
576 @wsgiapp
563 @wsgiapp
577 def _hg_stream(environ, start_response):
564 def _hg_stream(environ, start_response):
578 log.debug('http-app: handling hg stream')
565 log.debug('http-app: handling hg stream')
579 call_context = get_headers_call_context(environ)
566 call_context = get_headers_call_context(environ)
580
567
581 repo_path = call_context['repo_path']
568 repo_path = call_context['repo_path']
582 repo_name = call_context['repo_name']
569 repo_name = call_context['repo_name']
583 config = call_context['repo_config']
570 config = call_context['repo_config']
584
571
585 app = scm_app.create_hg_wsgi_app(
572 app = scm_app.create_hg_wsgi_app(
586 repo_path, repo_name, config)
573 repo_path, repo_name, config)
587
574
588 # Consistent path information for hgweb
575 # Consistent path information for hgweb
589 environ['PATH_INFO'] = call_context['path_info']
576 environ['PATH_INFO'] = call_context['path_info']
590 environ['REPO_NAME'] = repo_name
577 environ['REPO_NAME'] = repo_name
591 self.set_env_from_config(environ, config)
578 self.set_env_from_config(environ, config)
592
579
593 log.debug('http-app: starting app handler '
580 log.debug('http-app: starting app handler '
594 'with %s and process request', app)
581 'with %s and process request', app)
595 return app(environ, ResponseFilter(start_response))
582 return app(environ, ResponseFilter(start_response))
596 return _hg_stream
583 return _hg_stream
597
584
598 def git_stream(self):
585 def git_stream(self):
599 if self._use_echo_app:
586 if self._use_echo_app:
600 @wsgiapp
587 @wsgiapp
601 def _git_stream(environ, start_response):
588 def _git_stream(environ, start_response):
602 app = EchoApp('fake_path', 'fake_name', None)
589 app = EchoApp('fake_path', 'fake_name', None)
603 return app(environ, start_response)
590 return app(environ, start_response)
604 return _git_stream
591 return _git_stream
605 else:
592 else:
606 @wsgiapp
593 @wsgiapp
607 def _git_stream(environ, start_response):
594 def _git_stream(environ, start_response):
608 log.debug('http-app: handling git stream')
595 log.debug('http-app: handling git stream')
609
596
610 call_context = get_headers_call_context(environ)
597 call_context = get_headers_call_context(environ)
611
598
612 repo_path = call_context['repo_path']
599 repo_path = call_context['repo_path']
613 repo_name = call_context['repo_name']
600 repo_name = call_context['repo_name']
614 config = call_context['repo_config']
601 config = call_context['repo_config']
615
602
616 environ['PATH_INFO'] = call_context['path_info']
603 environ['PATH_INFO'] = call_context['path_info']
617 self.set_env_from_config(environ, config)
604 self.set_env_from_config(environ, config)
618
605
619 content_type = environ.get('CONTENT_TYPE', '')
606 content_type = environ.get('CONTENT_TYPE', '')
620
607
621 path = environ['PATH_INFO']
608 path = environ['PATH_INFO']
622 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
609 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
623 log.debug(
610 log.debug(
624 'LFS: Detecting if request `%s` is LFS server path based '
611 'LFS: Detecting if request `%s` is LFS server path based '
625 'on content type:`%s`, is_lfs:%s',
612 'on content type:`%s`, is_lfs:%s',
626 path, content_type, is_lfs_request)
613 path, content_type, is_lfs_request)
627
614
628 if not is_lfs_request:
615 if not is_lfs_request:
629 # fallback detection by path
616 # fallback detection by path
630 if GIT_LFS_PROTO_PAT.match(path):
617 if GIT_LFS_PROTO_PAT.match(path):
631 is_lfs_request = True
618 is_lfs_request = True
632 log.debug(
619 log.debug(
633 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
620 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
634 path, is_lfs_request)
621 path, is_lfs_request)
635
622
636 if is_lfs_request:
623 if is_lfs_request:
637 app = scm_app.create_git_lfs_wsgi_app(
624 app = scm_app.create_git_lfs_wsgi_app(
638 repo_path, repo_name, config)
625 repo_path, repo_name, config)
639 else:
626 else:
640 app = scm_app.create_git_wsgi_app(
627 app = scm_app.create_git_wsgi_app(
641 repo_path, repo_name, config)
628 repo_path, repo_name, config)
642
629
643 log.debug('http-app: starting app handler '
630 log.debug('http-app: starting app handler '
644 'with %s and process request', app)
631 'with %s and process request', app)
645
632
646 return app(environ, start_response)
633 return app(environ, start_response)
647
634
648 return _git_stream
635 return _git_stream
649
636
650 def handle_vcs_exception(self, exception, request):
637 def handle_vcs_exception(self, exception, request):
651 _vcs_kind = getattr(exception, '_vcs_kind', '')
638 _vcs_kind = getattr(exception, '_vcs_kind', '')
652
639
653 if _vcs_kind == 'repo_locked':
640 if _vcs_kind == 'repo_locked':
654 headers_call_context = get_headers_call_context(request.environ)
641 headers_call_context = get_headers_call_context(request.environ)
655 status_code = safe_int(headers_call_context['locked_status_code'])
642 status_code = safe_int(headers_call_context['locked_status_code'])
656
643
657 return HTTPRepoLocked(
644 return HTTPRepoLocked(
658 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
645 title=str(exception), status_code=status_code, headers=[('X-Rc-Locked', '1')])
659
646
660 elif _vcs_kind == 'repo_branch_protected':
647 elif _vcs_kind == 'repo_branch_protected':
661 # Get custom repo-branch-protected status code if present.
648 # Get custom repo-branch-protected status code if present.
662 return HTTPRepoBranchProtected(
649 return HTTPRepoBranchProtected(
663 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
650 title=str(exception), headers=[('X-Rc-Branch-Protection', '1')])
664
651
665 exc_info = request.exc_info
652 exc_info = request.exc_info
666 store_exception(id(exc_info), exc_info)
653 store_exception(id(exc_info), exc_info)
667
654
668 traceback_info = 'unavailable'
655 traceback_info = 'unavailable'
669 if request.exc_info:
656 if request.exc_info:
670 traceback_info = format_exc(request.exc_info)
657 traceback_info = format_exc(request.exc_info)
671
658
672 log.error(
659 log.error(
673 'error occurred handling this request for path: %s, \n%s',
660 'error occurred handling this request for path: %s, \n%s',
674 request.path, traceback_info)
661 request.path, traceback_info)
675
662
676 statsd = request.registry.statsd
663 statsd = request.registry.statsd
677 if statsd:
664 if statsd:
678 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
665 exc_type = f"{exception.__class__.__module__}.{exception.__class__.__name__}"
679 statsd.incr('vcsserver_exception_total',
666 statsd.incr('vcsserver_exception_total',
680 tags=[f"type:{exc_type}"])
667 tags=[f"type:{exc_type}"])
681 raise exception
668 raise exception
682
669
683
670
684 class ResponseFilter:
671 class ResponseFilter:
685
672
686 def __init__(self, start_response):
673 def __init__(self, start_response):
687 self._start_response = start_response
674 self._start_response = start_response
688
675
689 def __call__(self, status, response_headers, exc_info=None):
676 def __call__(self, status, response_headers, exc_info=None):
690 headers = tuple(
677 headers = tuple(
691 (h, v) for h, v in response_headers
678 (h, v) for h, v in response_headers
692 if not wsgiref.util.is_hop_by_hop(h))
679 if not wsgiref.util.is_hop_by_hop(h))
693 return self._start_response(status, headers, exc_info)
680 return self._start_response(status, headers, exc_info)
694
681
695
682
696 def sanitize_settings_and_apply_defaults(global_config, settings):
683 def sanitize_settings_and_apply_defaults(global_config, settings):
697 _global_settings_maker = SettingsMaker(global_config)
684 _global_settings_maker = SettingsMaker(global_config)
698 settings_maker = SettingsMaker(settings)
685 settings_maker = SettingsMaker(settings)
699
686
700 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
687 settings_maker.make_setting('logging.autoconfigure', False, parser='bool')
701
688
702 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
689 logging_conf = os.path.join(os.path.dirname(global_config.get('__file__')), 'logging.ini')
703 settings_maker.enable_logging(logging_conf)
690 settings_maker.enable_logging(logging_conf)
704
691
705 # Default includes, possible to change as a user
692 # Default includes, possible to change as a user
706 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
693 pyramid_includes = settings_maker.make_setting('pyramid.includes', [], parser='list:newline')
707 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
694 log.debug("Using the following pyramid.includes: %s", pyramid_includes)
708
695
709 settings_maker.make_setting('__file__', global_config.get('__file__'))
696 settings_maker.make_setting('__file__', global_config.get('__file__'))
710
697
711 settings_maker.make_setting('pyramid.default_locale_name', 'en')
698 settings_maker.make_setting('pyramid.default_locale_name', 'en')
712 settings_maker.make_setting('locale', 'en_US.UTF-8')
699 settings_maker.make_setting('locale', 'en_US.UTF-8')
713
700
714 settings_maker.make_setting(
701 settings_maker.make_setting(
715 'core.binary_dir', '/usr/local/bin/rhodecode_bin/vcs_bin',
702 'core.binary_dir', '/usr/local/bin/rhodecode_bin/vcs_bin',
716 default_when_empty=True, parser='string:noquote')
703 default_when_empty=True, parser='string:noquote')
717
704
705 settings_maker.make_setting('vcs.svn.redis_conn', 'redis://redis:6379/0')
706
718 temp_store = tempfile.gettempdir()
707 temp_store = tempfile.gettempdir()
719 default_cache_dir = os.path.join(temp_store, 'rc_cache')
708 default_cache_dir = os.path.join(temp_store, 'rc_cache')
720 # save default, cache dir, and use it for all backends later.
709 # save default, cache dir, and use it for all backends later.
721 default_cache_dir = settings_maker.make_setting(
710 default_cache_dir = settings_maker.make_setting(
722 'cache_dir',
711 'cache_dir',
723 default=default_cache_dir, default_when_empty=True,
712 default=default_cache_dir, default_when_empty=True,
724 parser='dir:ensured')
713 parser='dir:ensured')
725
714
726 # exception store cache
715 # exception store cache
727 settings_maker.make_setting(
716 settings_maker.make_setting(
728 'exception_tracker.store_path',
717 'exception_tracker.store_path',
729 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
718 default=os.path.join(default_cache_dir, 'exc_store'), default_when_empty=True,
730 parser='dir:ensured'
719 parser='dir:ensured'
731 )
720 )
732
721
733 # repo_object cache defaults
722 # repo_object cache defaults
734 settings_maker.make_setting(
723 settings_maker.make_setting(
735 'rc_cache.repo_object.backend',
724 'rc_cache.repo_object.backend',
736 default='dogpile.cache.rc.file_namespace',
725 default='dogpile.cache.rc.file_namespace',
737 parser='string')
726 parser='string')
738 settings_maker.make_setting(
727 settings_maker.make_setting(
739 'rc_cache.repo_object.expiration_time',
728 'rc_cache.repo_object.expiration_time',
740 default=30 * 24 * 60 * 60, # 30days
729 default=30 * 24 * 60 * 60, # 30days
741 parser='int')
730 parser='int')
742 settings_maker.make_setting(
731 settings_maker.make_setting(
743 'rc_cache.repo_object.arguments.filename',
732 'rc_cache.repo_object.arguments.filename',
744 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
733 default=os.path.join(default_cache_dir, 'vcsserver_cache_repo_object.db'),
745 parser='string')
734 parser='string')
746
735
747 # statsd
736 # statsd
748 settings_maker.make_setting('statsd.enabled', False, parser='bool')
737 settings_maker.make_setting('statsd.enabled', False, parser='bool')
749 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
738 settings_maker.make_setting('statsd.statsd_host', 'statsd-exporter', parser='string')
750 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
739 settings_maker.make_setting('statsd.statsd_port', 9125, parser='int')
751 settings_maker.make_setting('statsd.statsd_prefix', '')
740 settings_maker.make_setting('statsd.statsd_prefix', '')
752 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
741 settings_maker.make_setting('statsd.statsd_ipv6', False, parser='bool')
753
742
754 settings_maker.env_expand()
743 settings_maker.env_expand()
755
744
756
745
757 def main(global_config, **settings):
746 def main(global_config, **settings):
758 start_time = time.time()
747 start_time = time.time()
759 log.info('Pyramid app config starting')
748 log.info('Pyramid app config starting')
760
749
761 if MercurialFactory:
750 if MercurialFactory:
762 hgpatches.patch_largefiles_capabilities()
751 hgpatches.patch_largefiles_capabilities()
763 hgpatches.patch_subrepo_type_mapping()
752 hgpatches.patch_subrepo_type_mapping()
764
753
765 # Fill in and sanitize the defaults & do ENV expansion
754 # Fill in and sanitize the defaults & do ENV expansion
766 sanitize_settings_and_apply_defaults(global_config, settings)
755 sanitize_settings_and_apply_defaults(global_config, settings)
767
756
768 # init and bootstrap StatsdClient
757 # init and bootstrap StatsdClient
769 StatsdClient.setup(settings)
758 StatsdClient.setup(settings)
770
759
771 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
760 pyramid_app = HTTPApplication(settings=settings, global_config=global_config).wsgi_app()
772 total_time = time.time() - start_time
761 total_time = time.time() - start_time
773 log.info('Pyramid app created and configured in %.2fs', total_time)
762 log.info('Pyramid app created and configured in %.2fs', total_time)
774 return pyramid_app
763 return pyramid_app
@@ -1,40 +1,58 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 import os
18 import os
19 import vcsserver
20 import vcsserver.settings
19
21
20
22
21 def get_config(ini_path, **kwargs):
23 def get_config(ini_path, **kwargs):
22 import configparser
24 import configparser
23 parser = configparser.ConfigParser(**kwargs)
25 parser = configparser.ConfigParser(**kwargs)
24 parser.read(ini_path)
26 parser.read(ini_path)
25 return parser
27 return parser
26
28
27
29
28 def get_app_config_lightweight(ini_path):
30 def get_app_config_lightweight(ini_path):
29 parser = get_config(ini_path)
31 parser = get_config(ini_path)
30 parser.set('app:main', 'here', os.getcwd())
32 parser.set('app:main', 'here', os.getcwd())
31 parser.set('app:main', '__file__', ini_path)
33 parser.set('app:main', '__file__', ini_path)
32 return dict(parser.items('app:main'))
34 return dict(parser.items('app:main'))
33
35
34
36
35 def get_app_config(ini_path):
37 def get_app_config(ini_path):
36 """
38 """
37 This loads the app context and provides a heavy type iniliaziation of config
39 This loads the app context and provides a heavy type iniliaziation of config
38 """
40 """
39 from paste.deploy.loadwsgi import appconfig
41 from paste.deploy.loadwsgi import appconfig
40 return appconfig(f'config:{ini_path}', relative_to=os.getcwd())
42 return appconfig(f'config:{ini_path}', relative_to=os.getcwd())
43
44
45 def configure_and_store_settings(global_config, app_settings):
46 """
47 Configure the settings module.
48 """
49 settings_merged = global_config.copy()
50 settings_merged.update(app_settings)
51
52 binary_dir = app_settings['core.binary_dir']
53
54 vcsserver.settings.BINARY_DIR = binary_dir
55
56 # Store the settings to make them available to other modules.
57 vcsserver.PYRAMID_SETTINGS = settings_merged
58 vcsserver.CONFIG = settings_merged
@@ -1,954 +1,959 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
32 import svn.client # noqa
31 import svn.client # noqa
33 import svn.core # noqa
32 import svn.core # noqa
34 import svn.delta # noqa
33 import svn.delta # noqa
35 import svn.diff # noqa
34 import svn.diff # noqa
36 import svn.fs # noqa
35 import svn.fs # noqa
37 import svn.repos # noqa
36 import svn.repos # noqa
38
37
39 import rhodecode
38 import rhodecode
40 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver import svn_diff, exceptions, subprocessio, settings
41 from vcsserver.base import (
40 from vcsserver.base import (
42 RepoFactory,
41 RepoFactory,
43 raise_from_original,
42 raise_from_original,
44 ArchiveNode,
43 ArchiveNode,
45 store_archive_in_cache,
44 store_archive_in_cache,
46 BytesEnvelope,
45 BytesEnvelope,
47 BinaryEnvelope,
46 BinaryEnvelope,
48 )
47 )
49 from vcsserver.exceptions import NoContentException
48 from vcsserver.exceptions import NoContentException
49 from vcsserver.vcs_base import RemoteBase
50 from vcsserver.lib.str_utils import safe_str, safe_bytes
50 from vcsserver.lib.str_utils import safe_str, safe_bytes
51 from vcsserver.lib.type_utils import assert_bytes
51 from vcsserver.lib.type_utils import assert_bytes
52 from vcsserver.vcs_base import RemoteBase
53 from vcsserver.lib.svnremoterepo import svnremoterepo
52 from vcsserver.lib.svnremoterepo import svnremoterepo
53 from vcsserver.lib.svn_txn_utils import store_txn_id_data
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 svn_compatible_versions_map = {
58 svn_compatible_versions_map = {
59 'pre-1.4-compatible': '1.3',
59 'pre-1.4-compatible': '1.3',
60 'pre-1.5-compatible': '1.4',
60 'pre-1.5-compatible': '1.4',
61 'pre-1.6-compatible': '1.5',
61 'pre-1.6-compatible': '1.5',
62 'pre-1.8-compatible': '1.7',
62 'pre-1.8-compatible': '1.7',
63 'pre-1.9-compatible': '1.8',
63 'pre-1.9-compatible': '1.8',
64 }
64 }
65
65
66 current_compatible_version = '1.14'
66 current_compatible_version = '1.14'
67
67
68
68
69 def reraise_safe_exceptions(func):
69 def reraise_safe_exceptions(func):
70 """Decorator for converting svn exceptions to something neutral."""
70 """Decorator for converting svn exceptions to something neutral."""
71 def wrapper(*args, **kwargs):
71 def wrapper(*args, **kwargs):
72 try:
72 try:
73 return func(*args, **kwargs)
73 return func(*args, **kwargs)
74 except Exception as e:
74 except Exception as e:
75 if not hasattr(e, '_vcs_kind'):
75 if not hasattr(e, '_vcs_kind'):
76 log.exception("Unhandled exception in svn remote call")
76 log.exception("Unhandled exception in svn remote call")
77 raise_from_original(exceptions.UnhandledException(e), e)
77 raise_from_original(exceptions.UnhandledException(e), e)
78 raise
78 raise
79 return wrapper
79 return wrapper
80
80
81
81
82 class SubversionFactory(RepoFactory):
82 class SubversionFactory(RepoFactory):
83 repo_type = 'svn'
83 repo_type = 'svn'
84
84
85 def _create_repo(self, wire, create, compatible_version):
85 def _create_repo(self, wire, create, compatible_version):
86 path = svn.core.svn_path_canonicalize(wire['path'])
86 path = svn.core.svn_path_canonicalize(wire['path'])
87 if create:
87 if create:
88 fs_config = {'compatible-version': current_compatible_version}
88 fs_config = {'compatible-version': current_compatible_version}
89 if compatible_version:
89 if compatible_version:
90
90
91 compatible_version_string = \
91 compatible_version_string = \
92 svn_compatible_versions_map.get(compatible_version) \
92 svn_compatible_versions_map.get(compatible_version) \
93 or compatible_version
93 or compatible_version
94 fs_config['compatible-version'] = compatible_version_string
94 fs_config['compatible-version'] = compatible_version_string
95
95
96 log.debug('Create SVN repo with config `%s`', fs_config)
96 log.debug('Create SVN repo with config `%s`', fs_config)
97 repo = svn.repos.create(path, "", "", None, fs_config)
97 repo = svn.repos.create(path, "", "", None, fs_config)
98 else:
98 else:
99 repo = svn.repos.open(path)
99 repo = svn.repos.open(path)
100
100
101 log.debug('repository created: got SVN object: %s', repo)
101 log.debug('repository created: got SVN object: %s', repo)
102 return repo
102 return repo
103
103
104 def repo(self, wire, create=False, compatible_version=None):
104 def repo(self, wire, create=False, compatible_version=None):
105 """
105 """
106 Get a repository instance for the given path.
106 Get a repository instance for the given path.
107 """
107 """
108 return self._create_repo(wire, create, compatible_version)
108 return self._create_repo(wire, create, compatible_version)
109
109
110
110
111 NODE_TYPE_MAPPING = {
111 NODE_TYPE_MAPPING = {
112 svn.core.svn_node_file: 'file',
112 svn.core.svn_node_file: 'file',
113 svn.core.svn_node_dir: 'dir',
113 svn.core.svn_node_dir: 'dir',
114 }
114 }
115
115
116
116
117 class SvnRemote(RemoteBase):
117 class SvnRemote(RemoteBase):
118
118
119 def __init__(self, factory, hg_factory=None):
119 def __init__(self, factory, hg_factory=None):
120 self._factory = factory
120 self._factory = factory
121
121
122 self._bulk_methods = {
122 self._bulk_methods = {
123 # NOT supported in SVN ATM...
123 # NOT supported in SVN ATM...
124 }
124 }
125 self._bulk_file_methods = {
125 self._bulk_file_methods = {
126 "size": self.get_file_size,
126 "size": self.get_file_size,
127 "data": self.get_file_content,
127 "data": self.get_file_content,
128 "flags": self.get_node_type,
128 "flags": self.get_node_type,
129 "is_binary": self.is_binary,
129 "is_binary": self.is_binary,
130 "md5": self.md5_hash
130 "md5": self.md5_hash
131 }
131 }
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def bulk_file_request(self, wire, commit_id, path, pre_load):
134 def bulk_file_request(self, wire, commit_id, path, pre_load):
135 cache_on, context_uid, repo_id = self._cache_on(wire)
135 cache_on, context_uid, repo_id = self._cache_on(wire)
136 region = self._region(wire)
136 region = self._region(wire)
137
137
138 # since we use unified API, we need to cast from str to in for SVN
138 # since we use unified API, we need to cast from str to in for SVN
139 commit_id = int(commit_id)
139 commit_id = int(commit_id)
140
140
141 @region.conditional_cache_on_arguments(condition=cache_on)
141 @region.conditional_cache_on_arguments(condition=cache_on)
142 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
142 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
143 result = {}
143 result = {}
144 for attr in pre_load:
144 for attr in pre_load:
145 try:
145 try:
146 method = self._bulk_file_methods[attr]
146 method = self._bulk_file_methods[attr]
147 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
147 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
148 result[attr] = method(wire, _commit_id, _path)
148 result[attr] = method(wire, _commit_id, _path)
149 except KeyError as e:
149 except KeyError as e:
150 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
150 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
151 return result
151 return result
152
152
153 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
153 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
154
154
155 @reraise_safe_exceptions
155 @reraise_safe_exceptions
156 def discover_svn_version(self):
156 def discover_svn_version(self):
157 try:
157 try:
158 import svn.core
158 import svn.core
159 svn_ver = svn.core.SVN_VERSION
159 svn_ver = svn.core.SVN_VERSION
160 except ImportError:
160 except ImportError:
161 svn_ver = None
161 svn_ver = None
162 return safe_str(svn_ver)
162 return safe_str(svn_ver)
163
163
164 @reraise_safe_exceptions
164 @reraise_safe_exceptions
165 def is_empty(self, wire):
165 def is_empty(self, wire):
166 try:
166 try:
167 return self.lookup(wire, -1) == 0
167 return self.lookup(wire, -1) == 0
168 except Exception:
168 except Exception:
169 log.exception("failed to read object_store")
169 log.exception("failed to read object_store")
170 return False
170 return False
171
171
172 def check_url(self, url, config):
172 def check_url(self, url, config):
173
173
174 # uuid function gets only valid UUID from proper repo, else
174 # uuid function gets only valid UUID from proper repo, else
175 # throws exception
175 # throws exception
176 username, password, src_url = self.get_url_and_credentials(url)
176 username, password, src_url = self.get_url_and_credentials(url)
177 try:
177 try:
178 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
178 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
179 except Exception:
179 except Exception:
180 tb = traceback.format_exc()
180 tb = traceback.format_exc()
181 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
181 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
182 raise URLError(f'"{url}" is not a valid Subversion source url.')
182 raise URLError(f'"{url}" is not a valid Subversion source url.')
183 return True
183 return True
184
184
185 def is_path_valid_repository(self, wire, path):
185 def is_path_valid_repository(self, wire, path):
186 # NOTE(marcink): short circuit the check for SVN repo
186 # NOTE(marcink): short circuit the check for SVN repo
187 # the repos.open might be expensive to check, but we have one cheap
187 # the repos.open might be expensive to check, but we have one cheap
188 # pre-condition that we can use, to check for 'format' file
188 # pre-condition that we can use, to check for 'format' file
189 if not os.path.isfile(os.path.join(path, 'format')):
189 if not os.path.isfile(os.path.join(path, 'format')):
190 return False
190 return False
191
191
192 cache_on, context_uid, repo_id = self._cache_on(wire)
192 cache_on, context_uid, repo_id = self._cache_on(wire)
193 region = self._region(wire)
193 region = self._region(wire)
194
194
195 @region.conditional_cache_on_arguments(condition=cache_on)
195 @region.conditional_cache_on_arguments(condition=cache_on)
196 def _assert_correct_path(_context_uid, _repo_id, fast_check):
196 def _assert_correct_path(_context_uid, _repo_id, fast_check):
197
197
198 try:
198 try:
199 svn.repos.open(path)
199 svn.repos.open(path)
200 except svn.core.SubversionException:
200 except svn.core.SubversionException:
201 tb = traceback.format_exc()
201 tb = traceback.format_exc()
202 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
202 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
203 return False
203 return False
204 return True
204 return True
205
205
206 return _assert_correct_path(context_uid, repo_id, True)
206 return _assert_correct_path(context_uid, repo_id, True)
207
207
208 @reraise_safe_exceptions
208 @reraise_safe_exceptions
209 def verify(self, wire,):
209 def verify(self, wire,):
210 repo_path = wire['path']
210 repo_path = wire['path']
211 if not self.is_path_valid_repository(wire, repo_path):
211 if not self.is_path_valid_repository(wire, repo_path):
212 raise Exception(
212 raise Exception(
213 f"Path {repo_path} is not a valid Subversion repository.")
213 f"Path {repo_path} is not a valid Subversion repository.")
214
214
215 cmd = ['svnadmin', 'info', repo_path]
215 cmd = ['svnadmin', 'info', repo_path]
216 stdout, stderr = subprocessio.run_command(cmd)
216 stdout, stderr = subprocessio.run_command(cmd)
217 return stdout
217 return stdout
218
218
219 @reraise_safe_exceptions
219 @reraise_safe_exceptions
220 def lookup(self, wire, revision):
220 def lookup(self, wire, revision):
221 if revision not in [-1, None, 'HEAD']:
221 if revision not in [-1, None, 'HEAD']:
222 raise NotImplementedError
222 raise NotImplementedError
223 repo = self._factory.repo(wire)
223 repo = self._factory.repo(wire)
224 fs_ptr = svn.repos.fs(repo)
224 fs_ptr = svn.repos.fs(repo)
225 head = svn.fs.youngest_rev(fs_ptr)
225 head = svn.fs.youngest_rev(fs_ptr)
226 return head
226 return head
227
227
228 @reraise_safe_exceptions
228 @reraise_safe_exceptions
229 def lookup_interval(self, wire, start_ts, end_ts):
229 def lookup_interval(self, wire, start_ts, end_ts):
230 repo = self._factory.repo(wire)
230 repo = self._factory.repo(wire)
231 fsobj = svn.repos.fs(repo)
231 fsobj = svn.repos.fs(repo)
232 start_rev = None
232 start_rev = None
233 end_rev = None
233 end_rev = None
234 if start_ts:
234 if start_ts:
235 start_ts_svn = apr_time_t(start_ts)
235 start_ts_svn = apr_time_t(start_ts)
236 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
236 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
237 else:
237 else:
238 start_rev = 1
238 start_rev = 1
239 if end_ts:
239 if end_ts:
240 end_ts_svn = apr_time_t(end_ts)
240 end_ts_svn = apr_time_t(end_ts)
241 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
241 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
242 else:
242 else:
243 end_rev = svn.fs.youngest_rev(fsobj)
243 end_rev = svn.fs.youngest_rev(fsobj)
244 return start_rev, end_rev
244 return start_rev, end_rev
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def revision_properties(self, wire, revision):
247 def revision_properties(self, wire, revision):
248
248
249 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 region = self._region(wire)
250 region = self._region(wire)
251
251
252 @region.conditional_cache_on_arguments(condition=cache_on)
252 @region.conditional_cache_on_arguments(condition=cache_on)
253 def _revision_properties(_repo_id, _revision):
253 def _revision_properties(_repo_id, _revision):
254 repo = self._factory.repo(wire)
254 repo = self._factory.repo(wire)
255 fs_ptr = svn.repos.fs(repo)
255 fs_ptr = svn.repos.fs(repo)
256 return svn.fs.revision_proplist(fs_ptr, revision)
256 return svn.fs.revision_proplist(fs_ptr, revision)
257 return _revision_properties(repo_id, revision)
257 return _revision_properties(repo_id, revision)
258
258
259 def revision_changes(self, wire, revision):
259 def revision_changes(self, wire, revision):
260
260
261 repo = self._factory.repo(wire)
261 repo = self._factory.repo(wire)
262 fsobj = svn.repos.fs(repo)
262 fsobj = svn.repos.fs(repo)
263 rev_root = svn.fs.revision_root(fsobj, revision)
263 rev_root = svn.fs.revision_root(fsobj, revision)
264
264
265 editor = svn.repos.ChangeCollector(fsobj, rev_root)
265 editor = svn.repos.ChangeCollector(fsobj, rev_root)
266 editor_ptr, editor_baton = svn.delta.make_editor(editor)
266 editor_ptr, editor_baton = svn.delta.make_editor(editor)
267 base_dir = ""
267 base_dir = ""
268 send_deltas = False
268 send_deltas = False
269 svn.repos.replay2(
269 svn.repos.replay2(
270 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
270 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
271 editor_ptr, editor_baton, None)
271 editor_ptr, editor_baton, None)
272
272
273 added = []
273 added = []
274 changed = []
274 changed = []
275 removed = []
275 removed = []
276
276
277 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
277 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
278 for path, change in editor.changes.items():
278 for path, change in editor.changes.items():
279 # TODO: Decide what to do with directory nodes. Subversion can add
279 # TODO: Decide what to do with directory nodes. Subversion can add
280 # empty directories.
280 # empty directories.
281
281
282 if change.item_kind == svn.core.svn_node_dir:
282 if change.item_kind == svn.core.svn_node_dir:
283 continue
283 continue
284 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
284 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
285 added.append(path)
285 added.append(path)
286 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
286 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
287 svn.repos.CHANGE_ACTION_REPLACE]:
287 svn.repos.CHANGE_ACTION_REPLACE]:
288 changed.append(path)
288 changed.append(path)
289 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
289 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
290 removed.append(path)
290 removed.append(path)
291 else:
291 else:
292 raise NotImplementedError(
292 raise NotImplementedError(
293 "Action {} not supported on path {}".format(
293 "Action {} not supported on path {}".format(
294 change.action, path))
294 change.action, path))
295
295
296 changes = {
296 changes = {
297 'added': added,
297 'added': added,
298 'changed': changed,
298 'changed': changed,
299 'removed': removed,
299 'removed': removed,
300 }
300 }
301 return changes
301 return changes
302
302
303 @reraise_safe_exceptions
303 @reraise_safe_exceptions
304 def node_history(self, wire, path, revision, limit):
304 def node_history(self, wire, path, revision, limit):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
306 region = self._region(wire)
306 region = self._region(wire)
307
307
308 @region.conditional_cache_on_arguments(condition=cache_on)
308 @region.conditional_cache_on_arguments(condition=cache_on)
309 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
309 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
310 cross_copies = False
310 cross_copies = False
311 repo = self._factory.repo(wire)
311 repo = self._factory.repo(wire)
312 fsobj = svn.repos.fs(repo)
312 fsobj = svn.repos.fs(repo)
313 rev_root = svn.fs.revision_root(fsobj, revision)
313 rev_root = svn.fs.revision_root(fsobj, revision)
314
314
315 history_revisions = []
315 history_revisions = []
316 history = svn.fs.node_history(rev_root, path)
316 history = svn.fs.node_history(rev_root, path)
317 history = svn.fs.history_prev(history, cross_copies)
317 history = svn.fs.history_prev(history, cross_copies)
318 while history:
318 while history:
319 __, node_revision = svn.fs.history_location(history)
319 __, node_revision = svn.fs.history_location(history)
320 history_revisions.append(node_revision)
320 history_revisions.append(node_revision)
321 if limit and len(history_revisions) >= limit:
321 if limit and len(history_revisions) >= limit:
322 break
322 break
323 history = svn.fs.history_prev(history, cross_copies)
323 history = svn.fs.history_prev(history, cross_copies)
324 return history_revisions
324 return history_revisions
325 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
325 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
326
326
327 @reraise_safe_exceptions
327 @reraise_safe_exceptions
328 def node_properties(self, wire, path, revision):
328 def node_properties(self, wire, path, revision):
329 cache_on, context_uid, repo_id = self._cache_on(wire)
329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 region = self._region(wire)
330 region = self._region(wire)
331
331
332 @region.conditional_cache_on_arguments(condition=cache_on)
332 @region.conditional_cache_on_arguments(condition=cache_on)
333 def _node_properties(_repo_id, _path, _revision):
333 def _node_properties(_repo_id, _path, _revision):
334 repo = self._factory.repo(wire)
334 repo = self._factory.repo(wire)
335 fsobj = svn.repos.fs(repo)
335 fsobj = svn.repos.fs(repo)
336 rev_root = svn.fs.revision_root(fsobj, revision)
336 rev_root = svn.fs.revision_root(fsobj, revision)
337 return svn.fs.node_proplist(rev_root, path)
337 return svn.fs.node_proplist(rev_root, path)
338 return _node_properties(repo_id, path, revision)
338 return _node_properties(repo_id, path, revision)
339
339
340 def file_annotate(self, wire, path, revision):
340 def file_annotate(self, wire, path, revision):
341 abs_path = 'file://' + urllib.request.pathname2url(
341 abs_path = 'file://' + urllib.request.pathname2url(
342 vcspath.join(wire['path'], path))
342 vcspath.join(wire['path'], path))
343 file_uri = svn.core.svn_path_canonicalize(abs_path)
343 file_uri = svn.core.svn_path_canonicalize(abs_path)
344
344
345 start_rev = svn_opt_revision_value_t(0)
345 start_rev = svn_opt_revision_value_t(0)
346 peg_rev = svn_opt_revision_value_t(revision)
346 peg_rev = svn_opt_revision_value_t(revision)
347 end_rev = peg_rev
347 end_rev = peg_rev
348
348
349 annotations = []
349 annotations = []
350
350
351 def receiver(line_no, revision, author, date, line, pool):
351 def receiver(line_no, revision, author, date, line, pool):
352 annotations.append((line_no, revision, line))
352 annotations.append((line_no, revision, line))
353
353
354 # TODO: Cannot use blame5, missing typemap function in the swig code
354 # TODO: Cannot use blame5, missing typemap function in the swig code
355 try:
355 try:
356 svn.client.blame2(
356 svn.client.blame2(
357 file_uri, peg_rev, start_rev, end_rev,
357 file_uri, peg_rev, start_rev, end_rev,
358 receiver, svn.client.create_context())
358 receiver, svn.client.create_context())
359 except svn.core.SubversionException as exc:
359 except svn.core.SubversionException as exc:
360 log.exception("Error during blame operation.")
360 log.exception("Error during blame operation.")
361 raise Exception(
361 raise Exception(
362 f"Blame not supported or file does not exist at path {path}. "
362 f"Blame not supported or file does not exist at path {path}. "
363 f"Error {exc}.")
363 f"Error {exc}.")
364
364
365 return BinaryEnvelope(annotations)
365 return BinaryEnvelope(annotations)
366
366
367 @reraise_safe_exceptions
367 @reraise_safe_exceptions
368 def get_node_type(self, wire, revision=None, path=''):
368 def get_node_type(self, wire, revision=None, path=''):
369
369
370 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
371 region = self._region(wire)
372
372
373 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _get_node_type(_repo_id, _revision, _path):
374 def _get_node_type(_repo_id, _revision, _path):
375 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
376 fs_ptr = svn.repos.fs(repo)
376 fs_ptr = svn.repos.fs(repo)
377 if _revision is None:
377 if _revision is None:
378 _revision = svn.fs.youngest_rev(fs_ptr)
378 _revision = svn.fs.youngest_rev(fs_ptr)
379 root = svn.fs.revision_root(fs_ptr, _revision)
379 root = svn.fs.revision_root(fs_ptr, _revision)
380 node = svn.fs.check_path(root, path)
380 node = svn.fs.check_path(root, path)
381 return NODE_TYPE_MAPPING.get(node, None)
381 return NODE_TYPE_MAPPING.get(node, None)
382 return _get_node_type(repo_id, revision, path)
382 return _get_node_type(repo_id, revision, path)
383
383
384 @reraise_safe_exceptions
384 @reraise_safe_exceptions
385 def get_nodes(self, wire, revision=None, path=''):
385 def get_nodes(self, wire, revision=None, path=''):
386
386
387 cache_on, context_uid, repo_id = self._cache_on(wire)
387 cache_on, context_uid, repo_id = self._cache_on(wire)
388 region = self._region(wire)
388 region = self._region(wire)
389
389
390 @region.conditional_cache_on_arguments(condition=cache_on)
390 @region.conditional_cache_on_arguments(condition=cache_on)
391 def _get_nodes(_repo_id, _path, _revision):
391 def _get_nodes(_repo_id, _path, _revision):
392 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
393 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
394 if _revision is None:
394 if _revision is None:
395 _revision = svn.fs.youngest_rev(fsobj)
395 _revision = svn.fs.youngest_rev(fsobj)
396 root = svn.fs.revision_root(fsobj, _revision)
396 root = svn.fs.revision_root(fsobj, _revision)
397 entries = svn.fs.dir_entries(root, path)
397 entries = svn.fs.dir_entries(root, path)
398 result = []
398 result = []
399 for entry_path, entry_info in entries.items():
399 for entry_path, entry_info in entries.items():
400 result.append(
400 result.append(
401 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
401 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
402 return result
402 return result
403 return _get_nodes(repo_id, path, revision)
403 return _get_nodes(repo_id, path, revision)
404
404
405 @reraise_safe_exceptions
405 @reraise_safe_exceptions
406 def get_file_content(self, wire, rev=None, path=''):
406 def get_file_content(self, wire, rev=None, path=''):
407 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
408 fsobj = svn.repos.fs(repo)
408 fsobj = svn.repos.fs(repo)
409
409
410 if rev is None:
410 if rev is None:
411 rev = svn.fs.youngest_rev(fsobj)
411 rev = svn.fs.youngest_rev(fsobj)
412
412
413 root = svn.fs.revision_root(fsobj, rev)
413 root = svn.fs.revision_root(fsobj, rev)
414 content = svn.core.Stream(svn.fs.file_contents(root, path))
414 content = svn.core.Stream(svn.fs.file_contents(root, path))
415 return BytesEnvelope(content.read())
415 return BytesEnvelope(content.read())
416
416
417 @reraise_safe_exceptions
417 @reraise_safe_exceptions
418 def get_file_size(self, wire, revision=None, path=''):
418 def get_file_size(self, wire, revision=None, path=''):
419
419
420 cache_on, context_uid, repo_id = self._cache_on(wire)
420 cache_on, context_uid, repo_id = self._cache_on(wire)
421 region = self._region(wire)
421 region = self._region(wire)
422
422
423 @region.conditional_cache_on_arguments(condition=cache_on)
423 @region.conditional_cache_on_arguments(condition=cache_on)
424 def _get_file_size(_repo_id, _revision, _path):
424 def _get_file_size(_repo_id, _revision, _path):
425 repo = self._factory.repo(wire)
425 repo = self._factory.repo(wire)
426 fsobj = svn.repos.fs(repo)
426 fsobj = svn.repos.fs(repo)
427 if _revision is None:
427 if _revision is None:
428 _revision = svn.fs.youngest_revision(fsobj)
428 _revision = svn.fs.youngest_revision(fsobj)
429 root = svn.fs.revision_root(fsobj, _revision)
429 root = svn.fs.revision_root(fsobj, _revision)
430 size = svn.fs.file_length(root, path)
430 size = svn.fs.file_length(root, path)
431 return size
431 return size
432 return _get_file_size(repo_id, revision, path)
432 return _get_file_size(repo_id, revision, path)
433
433
434 def create_repository(self, wire, compatible_version=None):
434 def create_repository(self, wire, compatible_version=None):
435 log.info('Creating Subversion repository in path "%s"', wire['path'])
435 log.info('Creating Subversion repository in path "%s"', wire['path'])
436 self._factory.repo(wire, create=True,
436 self._factory.repo(wire, create=True,
437 compatible_version=compatible_version)
437 compatible_version=compatible_version)
438
438
439 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
439 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
440 obj = urllib.parse.urlparse(src_url)
440 obj = urllib.parse.urlparse(src_url)
441 username = obj.username or ''
441 username = obj.username or ''
442 password = obj.password or ''
442 password = obj.password or ''
443 return username, password, src_url
443 return username, password, src_url
444
444
445 def import_remote_repository(self, wire, src_url):
445 def import_remote_repository(self, wire, src_url):
446 repo_path = wire['path']
446 repo_path = wire['path']
447 if not self.is_path_valid_repository(wire, repo_path):
447 if not self.is_path_valid_repository(wire, repo_path):
448 raise Exception(
448 raise Exception(
449 f"Path {repo_path} is not a valid Subversion repository.")
449 f"Path {repo_path} is not a valid Subversion repository.")
450
450
451 username, password, src_url = self.get_url_and_credentials(src_url)
451 username, password, src_url = self.get_url_and_credentials(src_url)
452 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
452 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
453 '--trust-server-cert-failures=unknown-ca']
453 '--trust-server-cert-failures=unknown-ca']
454 if username and password:
454 if username and password:
455 rdump_cmd += ['--username', username, '--password', password]
455 rdump_cmd += ['--username', username, '--password', password]
456 rdump_cmd += [src_url]
456 rdump_cmd += [src_url]
457
457
458 rdump = subprocess.Popen(
458 rdump = subprocess.Popen(
459 rdump_cmd,
459 rdump_cmd,
460 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
460 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
461 load = subprocess.Popen(
461 load = subprocess.Popen(
462 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
462 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
463
463
464 # TODO: johbo: This can be a very long operation, might be better
464 # TODO: johbo: This can be a very long operation, might be better
465 # to track some kind of status and provide an api to check if the
465 # to track some kind of status and provide an api to check if the
466 # import is done.
466 # import is done.
467 rdump.wait()
467 rdump.wait()
468 load.wait()
468 load.wait()
469
469
470 log.debug('Return process ended with code: %s', rdump.returncode)
470 log.debug('Return process ended with code: %s', rdump.returncode)
471 if rdump.returncode != 0:
471 if rdump.returncode != 0:
472 errors = rdump.stderr.read()
472 errors = rdump.stderr.read()
473 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
473 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
474
474
475 reason = 'UNKNOWN'
475 reason = 'UNKNOWN'
476 if b'svnrdump: E230001:' in errors:
476 if b'svnrdump: E230001:' in errors:
477 reason = 'INVALID_CERTIFICATE'
477 reason = 'INVALID_CERTIFICATE'
478
478
479 if reason == 'UNKNOWN':
479 if reason == 'UNKNOWN':
480 reason = f'UNKNOWN:{safe_str(errors)}'
480 reason = f'UNKNOWN:{safe_str(errors)}'
481
481
482 raise Exception(
482 raise Exception(
483 'Failed to dump the remote repository from {}. Reason:{}'.format(
483 'Failed to dump the remote repository from {}. Reason:{}'.format(
484 src_url, reason))
484 src_url, reason))
485 if load.returncode != 0:
485 if load.returncode != 0:
486 raise Exception(
486 raise Exception(
487 f'Failed to load the dump of remote repository from {src_url}.')
487 f'Failed to load the dump of remote repository from {src_url}.')
488
488
489 def commit(self, wire, message, author, timestamp, updated, removed):
489 def commit(self, wire, message, author, timestamp, updated, removed):
490
490
491 message = safe_bytes(message)
491 message = safe_bytes(message)
492 author = safe_bytes(author)
492 author = safe_bytes(author)
493
493
494 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
495 fsobj = svn.repos.fs(repo)
495 fsobj = svn.repos.fs(repo)
496
496
497 rev = svn.fs.youngest_rev(fsobj)
497 rev = svn.fs.youngest_rev(fsobj)
498 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
498 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
499 txn_root = svn.fs.txn_root(txn)
499 txn_root = svn.fs.txn_root(txn)
500
500
501 for node in updated:
501 for node in updated:
502 TxnNodeProcessor(node, txn_root).update()
502 TxnNodeProcessor(node, txn_root).update()
503 for node in removed:
503 for node in removed:
504 TxnNodeProcessor(node, txn_root).remove()
504 TxnNodeProcessor(node, txn_root).remove()
505
505
506 svn_txn_id = safe_str(svn.fs.svn_fs_txn_name(txn))
507 full_repo_path = wire['path']
508 txn_id_data = {'svn_txn_id': svn_txn_id, 'rc_internal_commit': True}
509
510 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
506 commit_id = svn.repos.fs_commit_txn(repo, txn)
511 commit_id = svn.repos.fs_commit_txn(repo, txn)
507
512
508 if timestamp:
513 if timestamp:
509 apr_time = apr_time_t(timestamp)
514 apr_time = apr_time_t(timestamp)
510 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
515 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
511 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
516 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
512
517
513 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
518 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
514 return commit_id
519 return commit_id
515
520
516 @reraise_safe_exceptions
521 @reraise_safe_exceptions
517 def diff(self, wire, rev1, rev2, path1=None, path2=None,
522 def diff(self, wire, rev1, rev2, path1=None, path2=None,
518 ignore_whitespace=False, context=3):
523 ignore_whitespace=False, context=3):
519
524
520 wire.update(cache=False)
525 wire.update(cache=False)
521 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
522 diff_creator = SvnDiffer(
527 diff_creator = SvnDiffer(
523 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
528 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
524 try:
529 try:
525 return BytesEnvelope(diff_creator.generate_diff())
530 return BytesEnvelope(diff_creator.generate_diff())
526 except svn.core.SubversionException as e:
531 except svn.core.SubversionException as e:
527 log.exception(
532 log.exception(
528 "Error during diff operation operation. "
533 "Error during diff operation operation. "
529 "Path might not exist %s, %s", path1, path2)
534 "Path might not exist %s, %s", path1, path2)
530 return BytesEnvelope(b'')
535 return BytesEnvelope(b'')
531
536
532 @reraise_safe_exceptions
537 @reraise_safe_exceptions
533 def is_large_file(self, wire, path):
538 def is_large_file(self, wire, path):
534 return False
539 return False
535
540
536 @reraise_safe_exceptions
541 @reraise_safe_exceptions
537 def is_binary(self, wire, rev, path):
542 def is_binary(self, wire, rev, path):
538 cache_on, context_uid, repo_id = self._cache_on(wire)
543 cache_on, context_uid, repo_id = self._cache_on(wire)
539 region = self._region(wire)
544 region = self._region(wire)
540
545
541 @region.conditional_cache_on_arguments(condition=cache_on)
546 @region.conditional_cache_on_arguments(condition=cache_on)
542 def _is_binary(_repo_id, _rev, _path):
547 def _is_binary(_repo_id, _rev, _path):
543 raw_bytes = self.get_file_content(wire, rev, path)
548 raw_bytes = self.get_file_content(wire, rev, path)
544 if not raw_bytes:
549 if not raw_bytes:
545 return False
550 return False
546 return b'\0' in raw_bytes
551 return b'\0' in raw_bytes
547
552
548 return _is_binary(repo_id, rev, path)
553 return _is_binary(repo_id, rev, path)
549
554
550 @reraise_safe_exceptions
555 @reraise_safe_exceptions
551 def md5_hash(self, wire, rev, path):
556 def md5_hash(self, wire, rev, path):
552 cache_on, context_uid, repo_id = self._cache_on(wire)
557 cache_on, context_uid, repo_id = self._cache_on(wire)
553 region = self._region(wire)
558 region = self._region(wire)
554
559
555 @region.conditional_cache_on_arguments(condition=cache_on)
560 @region.conditional_cache_on_arguments(condition=cache_on)
556 def _md5_hash(_repo_id, _rev, _path):
561 def _md5_hash(_repo_id, _rev, _path):
557 return ''
562 return ''
558
563
559 return _md5_hash(repo_id, rev, path)
564 return _md5_hash(repo_id, rev, path)
560
565
561 @reraise_safe_exceptions
566 @reraise_safe_exceptions
562 def run_svn_command(self, wire, cmd, **opts):
567 def run_svn_command(self, wire, cmd, **opts):
563 path = wire.get('path', None)
568 path = wire.get('path', None)
564 debug_mode = rhodecode.ConfigGet().get_bool('debug')
569 debug_mode = rhodecode.ConfigGet().get_bool('debug')
565
570
566 if path and os.path.isdir(path):
571 if path and os.path.isdir(path):
567 opts['cwd'] = path
572 opts['cwd'] = path
568
573
569 safe_call = opts.pop('_safe', False)
574 safe_call = opts.pop('_safe', False)
570
575
571 svnenv = os.environ.copy()
576 svnenv = os.environ.copy()
572 svnenv.update(opts.pop('extra_env', {}))
577 svnenv.update(opts.pop('extra_env', {}))
573
578
574 _opts = {'env': svnenv, 'shell': False}
579 _opts = {'env': svnenv, 'shell': False}
575
580
576 try:
581 try:
577 _opts.update(opts)
582 _opts.update(opts)
578 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
583 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
579
584
580 return b''.join(proc), b''.join(proc.stderr)
585 return b''.join(proc), b''.join(proc.stderr)
581 except OSError as err:
586 except OSError as err:
582 if safe_call:
587 if safe_call:
583 return '', safe_str(err).strip()
588 return '', safe_str(err).strip()
584 else:
589 else:
585 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
590 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
586 call_opts = {}
591 call_opts = {}
587 if debug_mode:
592 if debug_mode:
588 call_opts = _opts
593 call_opts = _opts
589
594
590 tb_err = ("Couldn't run svn command ({}).\n"
595 tb_err = ("Couldn't run svn command ({}).\n"
591 "Original error was:{}\n"
596 "Original error was:{}\n"
592 "Call options:{}\n"
597 "Call options:{}\n"
593 .format(cmd, err, call_opts))
598 .format(cmd, err, call_opts))
594 log.exception(tb_err)
599 log.exception(tb_err)
595 raise exceptions.VcsException()(tb_err)
600 raise exceptions.VcsException()(tb_err)
596
601
597 @reraise_safe_exceptions
602 @reraise_safe_exceptions
598 def install_hooks(self, wire, force=False):
603 def install_hooks(self, wire, force=False):
599 from vcsserver.hook_utils import install_svn_hooks
604 from vcsserver.hook_utils import install_svn_hooks
600 repo_path = wire['path']
605 repo_path = wire['path']
601 binary_dir = settings.BINARY_DIR
606 binary_dir = settings.BINARY_DIR
602 executable = None
607 executable = None
603 if binary_dir:
608 if binary_dir:
604 executable = os.path.join(binary_dir, 'python3')
609 executable = os.path.join(binary_dir, 'python3')
605 return install_svn_hooks(repo_path, force_create=force)
610 return install_svn_hooks(repo_path, force_create=force)
606
611
607 @reraise_safe_exceptions
612 @reraise_safe_exceptions
608 def get_hooks_info(self, wire):
613 def get_hooks_info(self, wire):
609 from vcsserver.hook_utils import (
614 from vcsserver.hook_utils import (
610 get_svn_pre_hook_version, get_svn_post_hook_version)
615 get_svn_pre_hook_version, get_svn_post_hook_version)
611 repo_path = wire['path']
616 repo_path = wire['path']
612 return {
617 return {
613 'pre_version': get_svn_pre_hook_version(repo_path),
618 'pre_version': get_svn_pre_hook_version(repo_path),
614 'post_version': get_svn_post_hook_version(repo_path),
619 'post_version': get_svn_post_hook_version(repo_path),
615 }
620 }
616
621
617 @reraise_safe_exceptions
622 @reraise_safe_exceptions
618 def set_head_ref(self, wire, head_name):
623 def set_head_ref(self, wire, head_name):
619 pass
624 pass
620
625
621 @reraise_safe_exceptions
626 @reraise_safe_exceptions
622 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
627 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
623 archive_dir_name, commit_id, cache_config):
628 archive_dir_name, commit_id, cache_config):
624
629
625 def walk_tree(root, root_dir, _commit_id):
630 def walk_tree(root, root_dir, _commit_id):
626 """
631 """
627 Special recursive svn repo walker
632 Special recursive svn repo walker
628 """
633 """
629 root_dir = safe_bytes(root_dir)
634 root_dir = safe_bytes(root_dir)
630
635
631 filemode_default = 0o100644
636 filemode_default = 0o100644
632 filemode_executable = 0o100755
637 filemode_executable = 0o100755
633
638
634 file_iter = svn.fs.dir_entries(root, root_dir)
639 file_iter = svn.fs.dir_entries(root, root_dir)
635 for f_name in file_iter:
640 for f_name in file_iter:
636 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
641 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
637
642
638 if f_type == 'dir':
643 if f_type == 'dir':
639 # return only DIR, and then all entries in that dir
644 # return only DIR, and then all entries in that dir
640 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
645 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
641 new_root = os.path.join(root_dir, f_name)
646 new_root = os.path.join(root_dir, f_name)
642 yield from walk_tree(root, new_root, _commit_id)
647 yield from walk_tree(root, new_root, _commit_id)
643 else:
648 else:
644
649
645 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
650 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
646 prop_list = svn.fs.node_proplist(root, f_path)
651 prop_list = svn.fs.node_proplist(root, f_path)
647
652
648 f_mode = filemode_default
653 f_mode = filemode_default
649 if prop_list.get('svn:executable'):
654 if prop_list.get('svn:executable'):
650 f_mode = filemode_executable
655 f_mode = filemode_executable
651
656
652 f_is_link = False
657 f_is_link = False
653 if prop_list.get('svn:special'):
658 if prop_list.get('svn:special'):
654 f_is_link = True
659 f_is_link = True
655
660
656 data = {
661 data = {
657 'is_link': f_is_link,
662 'is_link': f_is_link,
658 'mode': f_mode,
663 'mode': f_mode,
659 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
664 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
660 }
665 }
661
666
662 yield f_path, data, f_type
667 yield f_path, data, f_type
663
668
664 def file_walker(_commit_id, path):
669 def file_walker(_commit_id, path):
665 repo = self._factory.repo(wire)
670 repo = self._factory.repo(wire)
666 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
671 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
667
672
668 def no_content():
673 def no_content():
669 raise NoContentException()
674 raise NoContentException()
670
675
671 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
676 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
672 file_path = f_name
677 file_path = f_name
673
678
674 if f_type == 'dir':
679 if f_type == 'dir':
675 mode = f_data['mode']
680 mode = f_data['mode']
676 yield ArchiveNode(file_path, mode, False, no_content)
681 yield ArchiveNode(file_path, mode, False, no_content)
677 else:
682 else:
678 mode = f_data['mode']
683 mode = f_data['mode']
679 is_link = f_data['is_link']
684 is_link = f_data['is_link']
680 data_stream = f_data['content_stream']
685 data_stream = f_data['content_stream']
681 yield ArchiveNode(file_path, mode, is_link, data_stream)
686 yield ArchiveNode(file_path, mode, is_link, data_stream)
682
687
683 return store_archive_in_cache(
688 return store_archive_in_cache(
684 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
689 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
685
690
686
691
687 class SvnDiffer:
692 class SvnDiffer:
688 """
693 """
689 Utility to create diffs based on difflib and the Subversion api
694 Utility to create diffs based on difflib and the Subversion api
690 """
695 """
691
696
692 binary_content = False
697 binary_content = False
693
698
694 def __init__(
699 def __init__(
695 self, repo, src_rev, src_path, tgt_rev, tgt_path,
700 self, repo, src_rev, src_path, tgt_rev, tgt_path,
696 ignore_whitespace, context):
701 ignore_whitespace, context):
697 self.repo = repo
702 self.repo = repo
698 self.ignore_whitespace = ignore_whitespace
703 self.ignore_whitespace = ignore_whitespace
699 self.context = context
704 self.context = context
700
705
701 fsobj = svn.repos.fs(repo)
706 fsobj = svn.repos.fs(repo)
702
707
703 self.tgt_rev = tgt_rev
708 self.tgt_rev = tgt_rev
704 self.tgt_path = tgt_path or ''
709 self.tgt_path = tgt_path or ''
705 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
710 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
706 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
711 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
707
712
708 self.src_rev = src_rev
713 self.src_rev = src_rev
709 self.src_path = src_path or self.tgt_path
714 self.src_path = src_path or self.tgt_path
710 self.src_root = svn.fs.revision_root(fsobj, src_rev)
715 self.src_root = svn.fs.revision_root(fsobj, src_rev)
711 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
716 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
712
717
713 self._validate()
718 self._validate()
714
719
715 def _validate(self):
720 def _validate(self):
716 if (self.tgt_kind != svn.core.svn_node_none and
721 if (self.tgt_kind != svn.core.svn_node_none and
717 self.src_kind != svn.core.svn_node_none and
722 self.src_kind != svn.core.svn_node_none and
718 self.src_kind != self.tgt_kind):
723 self.src_kind != self.tgt_kind):
719 # TODO: johbo: proper error handling
724 # TODO: johbo: proper error handling
720 raise Exception(
725 raise Exception(
721 "Source and target are not compatible for diff generation. "
726 "Source and target are not compatible for diff generation. "
722 "Source type: %s, target type: %s" %
727 "Source type: %s, target type: %s" %
723 (self.src_kind, self.tgt_kind))
728 (self.src_kind, self.tgt_kind))
724
729
725 def generate_diff(self) -> bytes:
730 def generate_diff(self) -> bytes:
726 buf = io.BytesIO()
731 buf = io.BytesIO()
727 if self.tgt_kind == svn.core.svn_node_dir:
732 if self.tgt_kind == svn.core.svn_node_dir:
728 self._generate_dir_diff(buf)
733 self._generate_dir_diff(buf)
729 else:
734 else:
730 self._generate_file_diff(buf)
735 self._generate_file_diff(buf)
731 return buf.getvalue()
736 return buf.getvalue()
732
737
733 def _generate_dir_diff(self, buf: io.BytesIO):
738 def _generate_dir_diff(self, buf: io.BytesIO):
734 editor = DiffChangeEditor()
739 editor = DiffChangeEditor()
735 editor_ptr, editor_baton = svn.delta.make_editor(editor)
740 editor_ptr, editor_baton = svn.delta.make_editor(editor)
736 svn.repos.dir_delta2(
741 svn.repos.dir_delta2(
737 self.src_root,
742 self.src_root,
738 self.src_path,
743 self.src_path,
739 '', # src_entry
744 '', # src_entry
740 self.tgt_root,
745 self.tgt_root,
741 self.tgt_path,
746 self.tgt_path,
742 editor_ptr, editor_baton,
747 editor_ptr, editor_baton,
743 authorization_callback_allow_all,
748 authorization_callback_allow_all,
744 False, # text_deltas
749 False, # text_deltas
745 svn.core.svn_depth_infinity, # depth
750 svn.core.svn_depth_infinity, # depth
746 False, # entry_props
751 False, # entry_props
747 False, # ignore_ancestry
752 False, # ignore_ancestry
748 )
753 )
749
754
750 for path, __, change in sorted(editor.changes):
755 for path, __, change in sorted(editor.changes):
751 self._generate_node_diff(
756 self._generate_node_diff(
752 buf, change, path, self.tgt_path, path, self.src_path)
757 buf, change, path, self.tgt_path, path, self.src_path)
753
758
754 def _generate_file_diff(self, buf: io.BytesIO):
759 def _generate_file_diff(self, buf: io.BytesIO):
755 change = None
760 change = None
756 if self.src_kind == svn.core.svn_node_none:
761 if self.src_kind == svn.core.svn_node_none:
757 change = "add"
762 change = "add"
758 elif self.tgt_kind == svn.core.svn_node_none:
763 elif self.tgt_kind == svn.core.svn_node_none:
759 change = "delete"
764 change = "delete"
760 tgt_base, tgt_path = vcspath.split(self.tgt_path)
765 tgt_base, tgt_path = vcspath.split(self.tgt_path)
761 src_base, src_path = vcspath.split(self.src_path)
766 src_base, src_path = vcspath.split(self.src_path)
762 self._generate_node_diff(
767 self._generate_node_diff(
763 buf, change, tgt_path, tgt_base, src_path, src_base)
768 buf, change, tgt_path, tgt_base, src_path, src_base)
764
769
765 def _generate_node_diff(
770 def _generate_node_diff(
766 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
771 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
767
772
768 tgt_path_bytes = safe_bytes(tgt_path)
773 tgt_path_bytes = safe_bytes(tgt_path)
769 tgt_path = safe_str(tgt_path)
774 tgt_path = safe_str(tgt_path)
770
775
771 src_path_bytes = safe_bytes(src_path)
776 src_path_bytes = safe_bytes(src_path)
772 src_path = safe_str(src_path)
777 src_path = safe_str(src_path)
773
778
774 if self.src_rev == self.tgt_rev and tgt_base == src_base:
779 if self.src_rev == self.tgt_rev and tgt_base == src_base:
775 # makes consistent behaviour with git/hg to return empty diff if
780 # makes consistent behaviour with git/hg to return empty diff if
776 # we compare same revisions
781 # we compare same revisions
777 return
782 return
778
783
779 tgt_full_path = vcspath.join(tgt_base, tgt_path)
784 tgt_full_path = vcspath.join(tgt_base, tgt_path)
780 src_full_path = vcspath.join(src_base, src_path)
785 src_full_path = vcspath.join(src_base, src_path)
781
786
782 self.binary_content = False
787 self.binary_content = False
783 mime_type = self._get_mime_type(tgt_full_path)
788 mime_type = self._get_mime_type(tgt_full_path)
784
789
785 if mime_type and not mime_type.startswith(b'text'):
790 if mime_type and not mime_type.startswith(b'text'):
786 self.binary_content = True
791 self.binary_content = True
787 buf.write(b"=" * 67 + b'\n')
792 buf.write(b"=" * 67 + b'\n')
788 buf.write(b"Cannot display: file marked as a binary type.\n")
793 buf.write(b"Cannot display: file marked as a binary type.\n")
789 buf.write(b"svn:mime-type = %s\n" % mime_type)
794 buf.write(b"svn:mime-type = %s\n" % mime_type)
790 buf.write(b"Index: %b\n" % tgt_path_bytes)
795 buf.write(b"Index: %b\n" % tgt_path_bytes)
791 buf.write(b"=" * 67 + b'\n')
796 buf.write(b"=" * 67 + b'\n')
792 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
797 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
793
798
794 if change == 'add':
799 if change == 'add':
795 # TODO: johbo: SVN is missing a zero here compared to git
800 # TODO: johbo: SVN is missing a zero here compared to git
796 buf.write(b"new file mode 10644\n")
801 buf.write(b"new file mode 10644\n")
797
802
798 # TODO(marcink): intro to binary detection of svn patches
803 # TODO(marcink): intro to binary detection of svn patches
799 # if self.binary_content:
804 # if self.binary_content:
800 # buf.write(b'GIT binary patch\n')
805 # buf.write(b'GIT binary patch\n')
801
806
802 buf.write(b"--- /dev/null\t(revision 0)\n")
807 buf.write(b"--- /dev/null\t(revision 0)\n")
803 src_lines = []
808 src_lines = []
804 else:
809 else:
805 if change == 'delete':
810 if change == 'delete':
806 buf.write(b"deleted file mode 10644\n")
811 buf.write(b"deleted file mode 10644\n")
807
812
808 # TODO(marcink): intro to binary detection of svn patches
813 # TODO(marcink): intro to binary detection of svn patches
809 # if self.binary_content:
814 # if self.binary_content:
810 # buf.write('GIT binary patch\n')
815 # buf.write('GIT binary patch\n')
811
816
812 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
817 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
813 src_lines = self._svn_readlines(self.src_root, src_full_path)
818 src_lines = self._svn_readlines(self.src_root, src_full_path)
814
819
815 if change == 'delete':
820 if change == 'delete':
816 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
821 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
817 tgt_lines = []
822 tgt_lines = []
818 else:
823 else:
819 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
824 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
820 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
825 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
821
826
822 # we made our diff header, time to generate the diff content into our buffer
827 # we made our diff header, time to generate the diff content into our buffer
823
828
824 if not self.binary_content:
829 if not self.binary_content:
825 udiff = svn_diff.unified_diff(
830 udiff = svn_diff.unified_diff(
826 src_lines, tgt_lines, context=self.context,
831 src_lines, tgt_lines, context=self.context,
827 ignore_blank_lines=self.ignore_whitespace,
832 ignore_blank_lines=self.ignore_whitespace,
828 ignore_case=False,
833 ignore_case=False,
829 ignore_space_changes=self.ignore_whitespace)
834 ignore_space_changes=self.ignore_whitespace)
830
835
831 buf.writelines(udiff)
836 buf.writelines(udiff)
832
837
833 def _get_mime_type(self, path) -> bytes:
838 def _get_mime_type(self, path) -> bytes:
834 try:
839 try:
835 mime_type = svn.fs.node_prop(
840 mime_type = svn.fs.node_prop(
836 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
841 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
837 except svn.core.SubversionException:
842 except svn.core.SubversionException:
838 mime_type = svn.fs.node_prop(
843 mime_type = svn.fs.node_prop(
839 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
844 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
840 return mime_type
845 return mime_type
841
846
842 def _svn_readlines(self, fs_root, node_path):
847 def _svn_readlines(self, fs_root, node_path):
843 if self.binary_content:
848 if self.binary_content:
844 return []
849 return []
845 node_kind = svn.fs.check_path(fs_root, node_path)
850 node_kind = svn.fs.check_path(fs_root, node_path)
846 if node_kind not in (
851 if node_kind not in (
847 svn.core.svn_node_file, svn.core.svn_node_symlink):
852 svn.core.svn_node_file, svn.core.svn_node_symlink):
848 return []
853 return []
849 content = svn.core.Stream(
854 content = svn.core.Stream(
850 svn.fs.file_contents(fs_root, node_path)).read()
855 svn.fs.file_contents(fs_root, node_path)).read()
851
856
852 return content.splitlines(True)
857 return content.splitlines(True)
853
858
854
859
855 class DiffChangeEditor(svn.delta.Editor):
860 class DiffChangeEditor(svn.delta.Editor):
856 """
861 """
857 Records changes between two given revisions
862 Records changes between two given revisions
858 """
863 """
859
864
860 def __init__(self):
865 def __init__(self):
861 self.changes = []
866 self.changes = []
862
867
863 def delete_entry(self, path, revision, parent_baton, pool=None):
868 def delete_entry(self, path, revision, parent_baton, pool=None):
864 self.changes.append((path, None, 'delete'))
869 self.changes.append((path, None, 'delete'))
865
870
866 def add_file(
871 def add_file(
867 self, path, parent_baton, copyfrom_path, copyfrom_revision,
872 self, path, parent_baton, copyfrom_path, copyfrom_revision,
868 file_pool=None):
873 file_pool=None):
869 self.changes.append((path, 'file', 'add'))
874 self.changes.append((path, 'file', 'add'))
870
875
871 def open_file(self, path, parent_baton, base_revision, file_pool=None):
876 def open_file(self, path, parent_baton, base_revision, file_pool=None):
872 self.changes.append((path, 'file', 'change'))
877 self.changes.append((path, 'file', 'change'))
873
878
874
879
875 def authorization_callback_allow_all(root, path, pool):
880 def authorization_callback_allow_all(root, path, pool):
876 return True
881 return True
877
882
878
883
879 class TxnNodeProcessor:
884 class TxnNodeProcessor:
880 """
885 """
881 Utility to process the change of one node within a transaction root.
886 Utility to process the change of one node within a transaction root.
882
887
883 It encapsulates the knowledge of how to add, update or remove
888 It encapsulates the knowledge of how to add, update or remove
884 a node for a given transaction root. The purpose is to support the method
889 a node for a given transaction root. The purpose is to support the method
885 `SvnRemote.commit`.
890 `SvnRemote.commit`.
886 """
891 """
887
892
888 def __init__(self, node, txn_root):
893 def __init__(self, node, txn_root):
889 assert_bytes(node['path'])
894 assert_bytes(node['path'])
890
895
891 self.node = node
896 self.node = node
892 self.txn_root = txn_root
897 self.txn_root = txn_root
893
898
894 def update(self):
899 def update(self):
895 self._ensure_parent_dirs()
900 self._ensure_parent_dirs()
896 self._add_file_if_node_does_not_exist()
901 self._add_file_if_node_does_not_exist()
897 self._update_file_content()
902 self._update_file_content()
898 self._update_file_properties()
903 self._update_file_properties()
899
904
900 def remove(self):
905 def remove(self):
901 svn.fs.delete(self.txn_root, self.node['path'])
906 svn.fs.delete(self.txn_root, self.node['path'])
902 # TODO: Clean up directory if empty
907 # TODO: Clean up directory if empty
903
908
904 def _ensure_parent_dirs(self):
909 def _ensure_parent_dirs(self):
905 curdir = vcspath.dirname(self.node['path'])
910 curdir = vcspath.dirname(self.node['path'])
906 dirs_to_create = []
911 dirs_to_create = []
907 while not self._svn_path_exists(curdir):
912 while not self._svn_path_exists(curdir):
908 dirs_to_create.append(curdir)
913 dirs_to_create.append(curdir)
909 curdir = vcspath.dirname(curdir)
914 curdir = vcspath.dirname(curdir)
910
915
911 for curdir in reversed(dirs_to_create):
916 for curdir in reversed(dirs_to_create):
912 log.debug('Creating missing directory "%s"', curdir)
917 log.debug('Creating missing directory "%s"', curdir)
913 svn.fs.make_dir(self.txn_root, curdir)
918 svn.fs.make_dir(self.txn_root, curdir)
914
919
915 def _svn_path_exists(self, path):
920 def _svn_path_exists(self, path):
916 path_status = svn.fs.check_path(self.txn_root, path)
921 path_status = svn.fs.check_path(self.txn_root, path)
917 return path_status != svn.core.svn_node_none
922 return path_status != svn.core.svn_node_none
918
923
919 def _add_file_if_node_does_not_exist(self):
924 def _add_file_if_node_does_not_exist(self):
920 kind = svn.fs.check_path(self.txn_root, self.node['path'])
925 kind = svn.fs.check_path(self.txn_root, self.node['path'])
921 if kind == svn.core.svn_node_none:
926 if kind == svn.core.svn_node_none:
922 svn.fs.make_file(self.txn_root, self.node['path'])
927 svn.fs.make_file(self.txn_root, self.node['path'])
923
928
924 def _update_file_content(self):
929 def _update_file_content(self):
925 assert_bytes(self.node['content'])
930 assert_bytes(self.node['content'])
926
931
927 handler, baton = svn.fs.apply_textdelta(
932 handler, baton = svn.fs.apply_textdelta(
928 self.txn_root, self.node['path'], None, None)
933 self.txn_root, self.node['path'], None, None)
929 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
934 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
930
935
931 def _update_file_properties(self):
936 def _update_file_properties(self):
932 properties = self.node.get('properties', {})
937 properties = self.node.get('properties', {})
933 for key, value in properties.items():
938 for key, value in properties.items():
934 svn.fs.change_node_prop(
939 svn.fs.change_node_prop(
935 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
940 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
936
941
937
942
938 def apr_time_t(timestamp):
943 def apr_time_t(timestamp):
939 """
944 """
940 Convert a Python timestamp into APR timestamp type apr_time_t
945 Convert a Python timestamp into APR timestamp type apr_time_t
941 """
946 """
942 return int(timestamp * 1E6)
947 return int(timestamp * 1E6)
943
948
944
949
945 def svn_opt_revision_value_t(num):
950 def svn_opt_revision_value_t(num):
946 """
951 """
947 Put `num` into a `svn_opt_revision_value_t` structure.
952 Put `num` into a `svn_opt_revision_value_t` structure.
948 """
953 """
949 value = svn.core.svn_opt_revision_value_t()
954 value = svn.core.svn_opt_revision_value_t()
950 value.number = num
955 value.number = num
951 revision = svn.core.svn_opt_revision_t()
956 revision = svn.core.svn_opt_revision_t()
952 revision.kind = svn.core.svn_opt_revision_number
957 revision.kind = svn.core.svn_opt_revision_number
953 revision.value = value
958 revision.value = value
954 return revision
959 return revision
General Comments 0
You need to be logged in to leave comments. Login now