Show More
@@ -1,175 +1,175 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | import os |
|
18 | import os | |
19 | import shutil |
|
19 | import shutil | |
20 | import logging |
|
20 | import logging | |
21 | from collections import OrderedDict |
|
21 | from collections import OrderedDict | |
22 |
|
22 | |||
23 | log = logging.getLogger(__name__) |
|
23 | log = logging.getLogger(__name__) | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | class OidHandler: |
|
26 | class OidHandler: | |
27 |
|
27 | |||
28 | def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href, |
|
28 | def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href, | |
29 | obj_verify_href=None): |
|
29 | obj_verify_href=None): | |
30 | self.current_store = store |
|
30 | self.current_store = store | |
31 | self.repo_name = repo_name |
|
31 | self.repo_name = repo_name | |
32 | self.auth = auth |
|
32 | self.auth = auth | |
33 | self.oid = oid |
|
33 | self.oid = oid | |
34 | self.obj_size = obj_size |
|
34 | self.obj_size = obj_size | |
35 | self.obj_data = obj_data |
|
35 | self.obj_data = obj_data | |
36 | self.obj_href = obj_href |
|
36 | self.obj_href = obj_href | |
37 | self.obj_verify_href = obj_verify_href |
|
37 | self.obj_verify_href = obj_verify_href | |
38 |
|
38 | |||
39 | def get_store(self, mode=None): |
|
39 | def get_store(self, mode=None): | |
40 | return self.current_store |
|
40 | return self.current_store | |
41 |
|
41 | |||
42 | def get_auth(self): |
|
42 | def get_auth(self): | |
43 | """returns auth header for re-use in upload/download""" |
|
43 | """returns auth header for re-use in upload/download""" | |
44 | return " ".join(self.auth) |
|
44 | return " ".join(self.auth) | |
45 |
|
45 | |||
46 | def download(self): |
|
46 | def download(self): | |
47 |
|
47 | |||
48 | store = self.get_store() |
|
48 | store = self.get_store() | |
49 | response = None |
|
49 | response = None | |
50 | has_errors = None |
|
50 | has_errors = None | |
51 |
|
51 | |||
52 | if not store.has_oid(): |
|
52 | if not store.has_oid(): | |
53 | # error reply back to client that something is wrong with dl |
|
53 | # error reply back to client that something is wrong with dl | |
54 | err_msg = f'object: {store.oid} does not exist in store' |
|
54 | err_msg = f'object: {store.oid} does not exist in store' | |
55 | has_errors = OrderedDict( |
|
55 | has_errors = OrderedDict( | |
56 | error=OrderedDict( |
|
56 | error=OrderedDict( | |
57 | code=404, |
|
57 | code=404, | |
58 | message=err_msg |
|
58 | message=err_msg | |
59 | ) |
|
59 | ) | |
60 | ) |
|
60 | ) | |
61 |
|
61 | |||
62 | download_action = OrderedDict( |
|
62 | download_action = OrderedDict( | |
63 | href=self.obj_href, |
|
63 | href=self.obj_href, | |
64 | header=OrderedDict([("Authorization", self.get_auth())]) |
|
64 | header=OrderedDict([("Authorization", self.get_auth())]) | |
65 | ) |
|
65 | ) | |
66 | if not has_errors: |
|
66 | if not has_errors: | |
67 | response = OrderedDict(download=download_action) |
|
67 | response = OrderedDict(download=download_action) | |
68 | return response, has_errors |
|
68 | return response, has_errors | |
69 |
|
69 | |||
70 | def upload(self, skip_existing=True): |
|
70 | def upload(self, skip_existing=True): | |
71 | """ |
|
71 | """ | |
72 | Write upload action for git-lfs server |
|
72 | Write upload action for git-lfs server | |
73 | """ |
|
73 | """ | |
74 |
|
74 | |||
75 | store = self.get_store() |
|
75 | store = self.get_store() | |
76 | response = None |
|
76 | response = None | |
77 | has_errors = None |
|
77 | has_errors = None | |
78 |
|
78 | |||
79 | # verify if we have the OID before, if we do, reply with empty |
|
79 | # verify if we have the OID before, if we do, reply with empty | |
80 | if store.has_oid(): |
|
80 | if store.has_oid(): | |
81 | log.debug('LFS: store already has oid %s', store.oid) |
|
81 | log.debug('LFS: store already has oid %s', store.oid) | |
82 |
|
82 | |||
83 | # validate size |
|
83 | # validate size | |
84 | store_size = store.size_oid() |
|
84 | store_size = store.size_oid() | |
85 | size_match = store_size == self.obj_size |
|
85 | size_match = store_size == self.obj_size | |
86 | if not size_match: |
|
86 | if not size_match: | |
87 | log.warning( |
|
87 | log.warning( | |
88 | 'LFS: size mismatch for oid:%s, in store:%s expected: %s', |
|
88 | 'LFS: size mismatch for oid:%s, in store:%s expected: %s', | |
89 | self.oid, store_size, self.obj_size) |
|
89 | self.oid, store_size, self.obj_size) | |
90 | elif skip_existing: |
|
90 | elif skip_existing: | |
91 | log.debug('LFS: skipping further action as oid is existing') |
|
91 | log.debug('LFS: skipping further action as oid is existing') | |
92 | return response, has_errors |
|
92 | return response, has_errors | |
93 |
|
93 | |||
94 | chunked = ("Transfer-Encoding", "chunked") |
|
94 | chunked = ("Transfer-Encoding", "chunked") | |
95 | upload_action = OrderedDict( |
|
95 | upload_action = OrderedDict( | |
96 | href=self.obj_href, |
|
96 | href=self.obj_href, | |
97 | header=OrderedDict([("Authorization", self.get_auth()), chunked]) |
|
97 | header=OrderedDict([("Authorization", self.get_auth()), chunked]) | |
98 | ) |
|
98 | ) | |
99 | if not has_errors: |
|
99 | if not has_errors: | |
100 | response = OrderedDict(upload=upload_action) |
|
100 | response = OrderedDict(upload=upload_action) | |
101 | # if specified in handler, return the verification endpoint |
|
101 | # if specified in handler, return the verification endpoint | |
102 | if self.obj_verify_href: |
|
102 | if self.obj_verify_href: | |
103 | verify_action = OrderedDict( |
|
103 | verify_action = OrderedDict( | |
104 | href=self.obj_verify_href, |
|
104 | href=self.obj_verify_href, | |
105 | header=OrderedDict([("Authorization", self.get_auth())]) |
|
105 | header=OrderedDict([("Authorization", self.get_auth())]) | |
106 | ) |
|
106 | ) | |
107 | response['verify'] = verify_action |
|
107 | response['verify'] = verify_action | |
108 | return response, has_errors |
|
108 | return response, has_errors | |
109 |
|
109 | |||
110 | def exec_operation(self, operation, *args, **kwargs): |
|
110 | def exec_operation(self, operation, *args, **kwargs): | |
111 | handler = getattr(self, operation) |
|
111 | handler = getattr(self, operation) | |
112 | log.debug('LFS: handling request using %s handler', handler) |
|
112 | log.debug('LFS: handling request using %s handler', handler) | |
113 | return handler(*args, **kwargs) |
|
113 | return handler(*args, **kwargs) | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | class LFSOidStore: |
|
116 | class LFSOidStore: | |
117 |
|
117 | |||
118 | def __init__(self, oid, repo, store_location=None): |
|
118 | def __init__(self, oid, repo, store_location=None): | |
119 | self.oid = oid |
|
119 | self.oid = oid | |
120 | self.repo = repo |
|
120 | self.repo = repo | |
121 | self.store_path = store_location or self.get_default_store() |
|
121 | self.store_path = store_location or self.get_default_store() | |
122 | self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp') |
|
122 | self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp') | |
123 | self.oid_path = os.path.join(self.store_path, oid) |
|
123 | self.oid_path = os.path.join(self.store_path, oid) | |
124 | self.fd = None |
|
124 | self.fd = None | |
125 |
|
125 | |||
126 | def get_engine(self, mode): |
|
126 | def get_engine(self, mode): | |
127 | """ |
|
127 | """ | |
128 | engine = .get_engine(mode='wb') |
|
128 | engine = .get_engine(mode='wb') | |
129 | with engine as f: |
|
129 | with engine as f: | |
130 | f.write('...') |
|
130 | f.write('...') | |
131 | """ |
|
131 | """ | |
132 |
|
132 | |||
133 |
class StoreEngine |
|
133 | class StoreEngine: | |
134 | def __init__(self, mode, store_path, oid_path, tmp_oid_path): |
|
134 | def __init__(self, mode, store_path, oid_path, tmp_oid_path): | |
135 | self.mode = mode |
|
135 | self.mode = mode | |
136 | self.store_path = store_path |
|
136 | self.store_path = store_path | |
137 | self.oid_path = oid_path |
|
137 | self.oid_path = oid_path | |
138 | self.tmp_oid_path = tmp_oid_path |
|
138 | self.tmp_oid_path = tmp_oid_path | |
139 |
|
139 | |||
140 | def __enter__(self): |
|
140 | def __enter__(self): | |
141 | if not os.path.isdir(self.store_path): |
|
141 | if not os.path.isdir(self.store_path): | |
142 | os.makedirs(self.store_path) |
|
142 | os.makedirs(self.store_path) | |
143 |
|
143 | |||
144 | # TODO(marcink): maybe write metadata here with size/oid ? |
|
144 | # TODO(marcink): maybe write metadata here with size/oid ? | |
145 | fd = open(self.tmp_oid_path, self.mode) |
|
145 | fd = open(self.tmp_oid_path, self.mode) | |
146 | self.fd = fd |
|
146 | self.fd = fd | |
147 | return fd |
|
147 | return fd | |
148 |
|
148 | |||
149 | def __exit__(self, exc_type, exc_value, traceback): |
|
149 | def __exit__(self, exc_type, exc_value, traceback): | |
150 | # close tmp file, and rename to final destination |
|
150 | # close tmp file, and rename to final destination | |
151 | self.fd.close() |
|
151 | self.fd.close() | |
152 | shutil.move(self.tmp_oid_path, self.oid_path) |
|
152 | shutil.move(self.tmp_oid_path, self.oid_path) | |
153 |
|
153 | |||
154 | return StoreEngine( |
|
154 | return StoreEngine( | |
155 | mode, self.store_path, self.oid_path, self.tmp_oid_path) |
|
155 | mode, self.store_path, self.oid_path, self.tmp_oid_path) | |
156 |
|
156 | |||
157 | def get_default_store(self): |
|
157 | def get_default_store(self): | |
158 | """ |
|
158 | """ | |
159 | Default store, consistent with defaults of Mercurial large files store |
|
159 | Default store, consistent with defaults of Mercurial large files store | |
160 | which is /home/username/.cache/largefiles |
|
160 | which is /home/username/.cache/largefiles | |
161 | """ |
|
161 | """ | |
162 | user_home = os.path.expanduser("~") |
|
162 | user_home = os.path.expanduser("~") | |
163 | return os.path.join(user_home, '.cache', 'lfs-store') |
|
163 | return os.path.join(user_home, '.cache', 'lfs-store') | |
164 |
|
164 | |||
165 | def has_oid(self): |
|
165 | def has_oid(self): | |
166 | return os.path.exists(os.path.join(self.store_path, self.oid)) |
|
166 | return os.path.exists(os.path.join(self.store_path, self.oid)) | |
167 |
|
167 | |||
168 | def size_oid(self): |
|
168 | def size_oid(self): | |
169 | size = -1 |
|
169 | size = -1 | |
170 |
|
170 | |||
171 | if self.has_oid(): |
|
171 | if self.has_oid(): | |
172 | oid = os.path.join(self.store_path, self.oid) |
|
172 | oid = os.path.join(self.store_path, self.oid) | |
173 | size = os.stat(oid).st_size |
|
173 | size = os.stat(oid).st_size | |
174 |
|
174 | |||
175 | return size |
|
175 | return size |
@@ -1,154 +1,154 b'' | |||||
1 | import re |
|
1 | import re | |
2 | import random |
|
2 | import random | |
3 | from collections import deque |
|
3 | from collections import deque | |
4 | from datetime import timedelta |
|
4 | from datetime import timedelta | |
5 | from repoze.lru import lru_cache |
|
5 | from repoze.lru import lru_cache | |
6 |
|
6 | |||
7 | from .timer import Timer |
|
7 | from .timer import Timer | |
8 |
|
8 | |||
9 | TAG_INVALID_CHARS_RE = re.compile( |
|
9 | TAG_INVALID_CHARS_RE = re.compile( | |
10 | r"[^\w\d_\-:/\.]", |
|
10 | r"[^\w\d_\-:/\.]", | |
11 | #re.UNICODE |
|
11 | #re.UNICODE | |
12 | ) |
|
12 | ) | |
13 | TAG_INVALID_CHARS_SUBS = "_" |
|
13 | TAG_INVALID_CHARS_SUBS = "_" | |
14 |
|
14 | |||
15 | # we save and expose methods called by statsd for discovery |
|
15 | # we save and expose methods called by statsd for discovery | |
16 | buckets_dict = { |
|
16 | buckets_dict = { | |
17 |
|
17 | |||
18 | } |
|
18 | } | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | @lru_cache(maxsize=500) |
|
21 | @lru_cache(maxsize=500) | |
22 | def _normalize_tags_with_cache(tag_list): |
|
22 | def _normalize_tags_with_cache(tag_list): | |
23 | return [TAG_INVALID_CHARS_RE.sub(TAG_INVALID_CHARS_SUBS, tag) for tag in tag_list] |
|
23 | return [TAG_INVALID_CHARS_RE.sub(TAG_INVALID_CHARS_SUBS, tag) for tag in tag_list] | |
24 |
|
24 | |||
25 |
|
25 | |||
26 | def normalize_tags(tag_list): |
|
26 | def normalize_tags(tag_list): | |
27 | # We have to turn our input tag list into a non-mutable tuple for it to |
|
27 | # We have to turn our input tag list into a non-mutable tuple for it to | |
28 | # be hashable (and thus usable) by the @lru_cache decorator. |
|
28 | # be hashable (and thus usable) by the @lru_cache decorator. | |
29 | return _normalize_tags_with_cache(tuple(tag_list)) |
|
29 | return _normalize_tags_with_cache(tuple(tag_list)) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | class StatsClientBase: |
|
32 | class StatsClientBase: | |
33 | """A Base class for various statsd clients.""" |
|
33 | """A Base class for various statsd clients.""" | |
34 |
|
34 | |||
35 | def close(self): |
|
35 | def close(self): | |
36 | """Used to close and clean up any underlying resources.""" |
|
36 | """Used to close and clean up any underlying resources.""" | |
37 | raise NotImplementedError() |
|
37 | raise NotImplementedError() | |
38 |
|
38 | |||
39 | def _send(self): |
|
39 | def _send(self): | |
40 | raise NotImplementedError() |
|
40 | raise NotImplementedError() | |
41 |
|
41 | |||
42 | def pipeline(self): |
|
42 | def pipeline(self): | |
43 | raise NotImplementedError() |
|
43 | raise NotImplementedError() | |
44 |
|
44 | |||
45 | def timer(self, stat, rate=1, tags=None, auto_send=True): |
|
45 | def timer(self, stat, rate=1, tags=None, auto_send=True): | |
46 | """ |
|
46 | """ | |
47 | statsd = StatsdClient.statsd |
|
47 | statsd = StatsdClient.statsd | |
48 | with statsd.timer('bucket_name', auto_send=True) as tmr: |
|
48 | with statsd.timer('bucket_name', auto_send=True) as tmr: | |
49 | # This block will be timed. |
|
49 | # This block will be timed. | |
50 | for i in range(0, 100000): |
|
50 | for i in range(0, 100000): | |
51 | i ** 2 |
|
51 | i ** 2 | |
52 | # you can access time here... |
|
52 | # you can access time here... | |
53 | elapsed_ms = tmr.ms |
|
53 | elapsed_ms = tmr.ms | |
54 | """ |
|
54 | """ | |
55 | return Timer(self, stat, rate, tags, auto_send=auto_send) |
|
55 | return Timer(self, stat, rate, tags, auto_send=auto_send) | |
56 |
|
56 | |||
57 | def timing(self, stat, delta, rate=1, tags=None, use_decimals=True): |
|
57 | def timing(self, stat, delta, rate=1, tags=None, use_decimals=True): | |
58 | """ |
|
58 | """ | |
59 | Send new timing information. |
|
59 | Send new timing information. | |
60 |
|
60 | |||
61 | `delta` can be either a number of milliseconds or a timedelta. |
|
61 | `delta` can be either a number of milliseconds or a timedelta. | |
62 | """ |
|
62 | """ | |
63 | if isinstance(delta, timedelta): |
|
63 | if isinstance(delta, timedelta): | |
64 | # Convert timedelta to number of milliseconds. |
|
64 | # Convert timedelta to number of milliseconds. | |
65 | delta = delta.total_seconds() * 1000. |
|
65 | delta = delta.total_seconds() * 1000. | |
66 | if use_decimals: |
|
66 | if use_decimals: | |
67 | fmt = '%0.6f|ms' |
|
67 | fmt = '%0.6f|ms' | |
68 | else: |
|
68 | else: | |
69 | fmt = '%s|ms' |
|
69 | fmt = '%s|ms' | |
70 | self._send_stat(stat, fmt % delta, rate, tags) |
|
70 | self._send_stat(stat, fmt % delta, rate, tags) | |
71 |
|
71 | |||
72 | def incr(self, stat, count=1, rate=1, tags=None): |
|
72 | def incr(self, stat, count=1, rate=1, tags=None): | |
73 | """Increment a stat by `count`.""" |
|
73 | """Increment a stat by `count`.""" | |
74 |
self._send_stat(stat, ' |
|
74 | self._send_stat(stat, f'{count}|c', rate, tags) | |
75 |
|
75 | |||
76 | def decr(self, stat, count=1, rate=1, tags=None): |
|
76 | def decr(self, stat, count=1, rate=1, tags=None): | |
77 | """Decrement a stat by `count`.""" |
|
77 | """Decrement a stat by `count`.""" | |
78 | self.incr(stat, -count, rate, tags) |
|
78 | self.incr(stat, -count, rate, tags) | |
79 |
|
79 | |||
80 | def gauge(self, stat, value, rate=1, delta=False, tags=None): |
|
80 | def gauge(self, stat, value, rate=1, delta=False, tags=None): | |
81 | """Set a gauge value.""" |
|
81 | """Set a gauge value.""" | |
82 | if value < 0 and not delta: |
|
82 | if value < 0 and not delta: | |
83 | if rate < 1: |
|
83 | if rate < 1: | |
84 | if random.random() > rate: |
|
84 | if random.random() > rate: | |
85 | return |
|
85 | return | |
86 | with self.pipeline() as pipe: |
|
86 | with self.pipeline() as pipe: | |
87 | pipe._send_stat(stat, '0|g', 1) |
|
87 | pipe._send_stat(stat, '0|g', 1) | |
88 |
pipe._send_stat(stat, ' |
|
88 | pipe._send_stat(stat, f'{value}|g', 1) | |
89 | else: |
|
89 | else: | |
90 | prefix = '+' if delta and value >= 0 else '' |
|
90 | prefix = '+' if delta and value >= 0 else '' | |
91 |
self._send_stat(stat, ' |
|
91 | self._send_stat(stat, f'{prefix}{value}|g', rate, tags) | |
92 |
|
92 | |||
93 | def set(self, stat, value, rate=1): |
|
93 | def set(self, stat, value, rate=1): | |
94 | """Set a set value.""" |
|
94 | """Set a set value.""" | |
95 |
self._send_stat(stat, ' |
|
95 | self._send_stat(stat, f'{value}|s', rate) | |
96 |
|
96 | |||
97 | def histogram(self, stat, value, rate=1, tags=None): |
|
97 | def histogram(self, stat, value, rate=1, tags=None): | |
98 | """Set a histogram""" |
|
98 | """Set a histogram""" | |
99 |
self._send_stat(stat, ' |
|
99 | self._send_stat(stat, f'{value}|h', rate, tags) | |
100 |
|
100 | |||
101 | def _send_stat(self, stat, value, rate, tags=None): |
|
101 | def _send_stat(self, stat, value, rate, tags=None): | |
102 | self._after(self._prepare(stat, value, rate, tags)) |
|
102 | self._after(self._prepare(stat, value, rate, tags)) | |
103 |
|
103 | |||
104 | def _prepare(self, stat, value, rate, tags=None): |
|
104 | def _prepare(self, stat, value, rate, tags=None): | |
105 | global buckets_dict |
|
105 | global buckets_dict | |
106 | buckets_dict[stat] = 1 |
|
106 | buckets_dict[stat] = 1 | |
107 |
|
107 | |||
108 | if rate < 1: |
|
108 | if rate < 1: | |
109 | if random.random() > rate: |
|
109 | if random.random() > rate: | |
110 | return |
|
110 | return | |
111 |
value = ' |
|
111 | value = f'{value}|@{rate}' | |
112 |
|
112 | |||
113 | if self._prefix: |
|
113 | if self._prefix: | |
114 |
stat = ' |
|
114 | stat = f'{self._prefix}.{stat}' | |
115 |
|
115 | |||
116 | res = '%s:%s%s' % ( |
|
116 | res = '%s:%s%s' % ( | |
117 | stat, |
|
117 | stat, | |
118 | value, |
|
118 | value, | |
119 | ("|#" + ",".join(normalize_tags(tags))) if tags else "", |
|
119 | ("|#" + ",".join(normalize_tags(tags))) if tags else "", | |
120 | ) |
|
120 | ) | |
121 | return res |
|
121 | return res | |
122 |
|
122 | |||
123 | def _after(self, data): |
|
123 | def _after(self, data): | |
124 | if data: |
|
124 | if data: | |
125 | self._send(data) |
|
125 | self._send(data) | |
126 |
|
126 | |||
127 |
|
127 | |||
128 | class PipelineBase(StatsClientBase): |
|
128 | class PipelineBase(StatsClientBase): | |
129 |
|
129 | |||
130 | def __init__(self, client): |
|
130 | def __init__(self, client): | |
131 | self._client = client |
|
131 | self._client = client | |
132 | self._prefix = client._prefix |
|
132 | self._prefix = client._prefix | |
133 | self._stats = deque() |
|
133 | self._stats = deque() | |
134 |
|
134 | |||
135 | def _send(self): |
|
135 | def _send(self): | |
136 | raise NotImplementedError() |
|
136 | raise NotImplementedError() | |
137 |
|
137 | |||
138 | def _after(self, data): |
|
138 | def _after(self, data): | |
139 | if data is not None: |
|
139 | if data is not None: | |
140 | self._stats.append(data) |
|
140 | self._stats.append(data) | |
141 |
|
141 | |||
142 | def __enter__(self): |
|
142 | def __enter__(self): | |
143 | return self |
|
143 | return self | |
144 |
|
144 | |||
145 | def __exit__(self, typ, value, tb): |
|
145 | def __exit__(self, typ, value, tb): | |
146 | self.send() |
|
146 | self.send() | |
147 |
|
147 | |||
148 | def send(self): |
|
148 | def send(self): | |
149 | if not self._stats: |
|
149 | if not self._stats: | |
150 | return |
|
150 | return | |
151 | self._send() |
|
151 | self._send() | |
152 |
|
152 | |||
153 | def pipeline(self): |
|
153 | def pipeline(self): | |
154 | return self.__class__(self) |
|
154 | return self.__class__(self) |
@@ -1,417 +1,417 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | """Handles the Git smart protocol.""" |
|
18 | """Handles the Git smart protocol.""" | |
19 |
|
19 | |||
20 | import os |
|
20 | import os | |
21 | import socket |
|
21 | import socket | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | import dulwich.protocol |
|
24 | import dulwich.protocol | |
25 | from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K |
|
25 | from dulwich.protocol import CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K | |
26 | from webob import Request, Response, exc |
|
26 | from webob import Request, Response, exc | |
27 |
|
27 | |||
28 | from vcsserver.lib.rc_json import json |
|
28 | from vcsserver.lib.rc_json import json | |
29 | from vcsserver import hooks, subprocessio |
|
29 | from vcsserver import hooks, subprocessio | |
30 | from vcsserver.str_utils import ascii_bytes |
|
30 | from vcsserver.str_utils import ascii_bytes | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | log = logging.getLogger(__name__) |
|
33 | log = logging.getLogger(__name__) | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | class FileWrapper: |
|
36 | class FileWrapper: | |
37 | """File wrapper that ensures how much data is read from it.""" |
|
37 | """File wrapper that ensures how much data is read from it.""" | |
38 |
|
38 | |||
39 | def __init__(self, fd, content_length): |
|
39 | def __init__(self, fd, content_length): | |
40 | self.fd = fd |
|
40 | self.fd = fd | |
41 | self.content_length = content_length |
|
41 | self.content_length = content_length | |
42 | self.remain = content_length |
|
42 | self.remain = content_length | |
43 |
|
43 | |||
44 | def read(self, size): |
|
44 | def read(self, size): | |
45 | if size <= self.remain: |
|
45 | if size <= self.remain: | |
46 | try: |
|
46 | try: | |
47 | data = self.fd.read(size) |
|
47 | data = self.fd.read(size) | |
48 | except socket.error: |
|
48 | except socket.error: | |
49 | raise IOError(self) |
|
49 | raise IOError(self) | |
50 | self.remain -= size |
|
50 | self.remain -= size | |
51 | elif self.remain: |
|
51 | elif self.remain: | |
52 | data = self.fd.read(self.remain) |
|
52 | data = self.fd.read(self.remain) | |
53 | self.remain = 0 |
|
53 | self.remain = 0 | |
54 | else: |
|
54 | else: | |
55 | data = None |
|
55 | data = None | |
56 | return data |
|
56 | return data | |
57 |
|
57 | |||
58 | def __repr__(self): |
|
58 | def __repr__(self): | |
59 | return '<FileWrapper {} len: {}, read: {}>'.format( |
|
59 | return '<FileWrapper {} len: {}, read: {}>'.format( | |
60 | self.fd, self.content_length, self.content_length - self.remain |
|
60 | self.fd, self.content_length, self.content_length - self.remain | |
61 | ) |
|
61 | ) | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | class GitRepository: |
|
64 | class GitRepository: | |
65 | """WSGI app for handling Git smart protocol endpoints.""" |
|
65 | """WSGI app for handling Git smart protocol endpoints.""" | |
66 |
|
66 | |||
67 | git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs')) |
|
67 | git_folder_signature = frozenset(('config', 'head', 'info', 'objects', 'refs')) | |
68 | commands = frozenset(('git-upload-pack', 'git-receive-pack')) |
|
68 | commands = frozenset(('git-upload-pack', 'git-receive-pack')) | |
69 | valid_accepts = frozenset(f'application/x-{c}-result' for c in commands) |
|
69 | valid_accepts = frozenset(f'application/x-{c}-result' for c in commands) | |
70 |
|
70 | |||
71 | # The last bytes are the SHA1 of the first 12 bytes. |
|
71 | # The last bytes are the SHA1 of the first 12 bytes. | |
72 | EMPTY_PACK = ( |
|
72 | EMPTY_PACK = ( | |
73 | b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' + |
|
73 | b'PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08' + | |
74 | b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e' |
|
74 | b'\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e' | |
75 | ) |
|
75 | ) | |
76 | FLUSH_PACKET = b"0000" |
|
76 | FLUSH_PACKET = b"0000" | |
77 |
|
77 | |||
78 | SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K)) |
|
78 | SIDE_BAND_CAPS = frozenset((CAPABILITY_SIDE_BAND, CAPABILITY_SIDE_BAND_64K)) | |
79 |
|
79 | |||
80 | def __init__(self, repo_name, content_path, git_path, update_server_info, extras): |
|
80 | def __init__(self, repo_name, content_path, git_path, update_server_info, extras): | |
81 | files = frozenset(f.lower() for f in os.listdir(content_path)) |
|
81 | files = frozenset(f.lower() for f in os.listdir(content_path)) | |
82 | valid_dir_signature = self.git_folder_signature.issubset(files) |
|
82 | valid_dir_signature = self.git_folder_signature.issubset(files) | |
83 |
|
83 | |||
84 | if not valid_dir_signature: |
|
84 | if not valid_dir_signature: | |
85 | raise OSError(f'{content_path} missing git signature') |
|
85 | raise OSError(f'{content_path} missing git signature') | |
86 |
|
86 | |||
87 | self.content_path = content_path |
|
87 | self.content_path = content_path | |
88 | self.repo_name = repo_name |
|
88 | self.repo_name = repo_name | |
89 | self.extras = extras |
|
89 | self.extras = extras | |
90 | self.git_path = git_path |
|
90 | self.git_path = git_path | |
91 | self.update_server_info = update_server_info |
|
91 | self.update_server_info = update_server_info | |
92 |
|
92 | |||
93 | def _get_fixedpath(self, path): |
|
93 | def _get_fixedpath(self, path): | |
94 | """ |
|
94 | """ | |
95 | Small fix for repo_path |
|
95 | Small fix for repo_path | |
96 |
|
96 | |||
97 | :param path: |
|
97 | :param path: | |
98 | """ |
|
98 | """ | |
99 | path = path.split(self.repo_name, 1)[-1] |
|
99 | path = path.split(self.repo_name, 1)[-1] | |
100 | if path.startswith('.git'): |
|
100 | if path.startswith('.git'): | |
101 | # for bare repos we still get the .git prefix inside, we skip it |
|
101 | # for bare repos we still get the .git prefix inside, we skip it | |
102 | # here, and remove from the service command |
|
102 | # here, and remove from the service command | |
103 | path = path[4:] |
|
103 | path = path[4:] | |
104 |
|
104 | |||
105 | return path.strip('/') |
|
105 | return path.strip('/') | |
106 |
|
106 | |||
107 | def inforefs(self, request, unused_environ): |
|
107 | def inforefs(self, request, unused_environ): | |
108 | """ |
|
108 | """ | |
109 | WSGI Response producer for HTTP GET Git Smart |
|
109 | WSGI Response producer for HTTP GET Git Smart | |
110 | HTTP /info/refs request. |
|
110 | HTTP /info/refs request. | |
111 | """ |
|
111 | """ | |
112 |
|
112 | |||
113 | git_command = request.GET.get('service') |
|
113 | git_command = request.GET.get('service') | |
114 | if git_command not in self.commands: |
|
114 | if git_command not in self.commands: | |
115 | log.debug('command %s not allowed', git_command) |
|
115 | log.debug('command %s not allowed', git_command) | |
116 | return exc.HTTPForbidden() |
|
116 | return exc.HTTPForbidden() | |
117 |
|
117 | |||
118 | # please, resist the urge to add '\n' to git capture and increment |
|
118 | # please, resist the urge to add '\n' to git capture and increment | |
119 | # line count by 1. |
|
119 | # line count by 1. | |
120 | # by git docs: Documentation/technical/http-protocol.txt#L214 \n is |
|
120 | # by git docs: Documentation/technical/http-protocol.txt#L214 \n is | |
121 | # a part of protocol. |
|
121 | # a part of protocol. | |
122 | # The code in Git client not only does NOT need '\n', but actually |
|
122 | # The code in Git client not only does NOT need '\n', but actually | |
123 | # blows up if you sprinkle "flush" (0000) as "0001\n". |
|
123 | # blows up if you sprinkle "flush" (0000) as "0001\n". | |
124 | # It reads binary, per number of bytes specified. |
|
124 | # It reads binary, per number of bytes specified. | |
125 | # if you do add '\n' as part of data, count it. |
|
125 | # if you do add '\n' as part of data, count it. | |
126 |
server_advert = '# service= |
|
126 | server_advert = f'# service={git_command}\n' | |
127 | packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower() |
|
127 | packet_len = hex(len(server_advert) + 4)[2:].rjust(4, '0').lower() | |
128 | try: |
|
128 | try: | |
129 | gitenv = dict(os.environ) |
|
129 | gitenv = dict(os.environ) | |
130 | # forget all configs |
|
130 | # forget all configs | |
131 | gitenv['RC_SCM_DATA'] = json.dumps(self.extras) |
|
131 | gitenv['RC_SCM_DATA'] = json.dumps(self.extras) | |
132 | command = [self.git_path, git_command[4:], '--stateless-rpc', |
|
132 | command = [self.git_path, git_command[4:], '--stateless-rpc', | |
133 | '--advertise-refs', self.content_path] |
|
133 | '--advertise-refs', self.content_path] | |
134 | out = subprocessio.SubprocessIOChunker( |
|
134 | out = subprocessio.SubprocessIOChunker( | |
135 | command, |
|
135 | command, | |
136 | env=gitenv, |
|
136 | env=gitenv, | |
137 | starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET], |
|
137 | starting_values=[ascii_bytes(packet_len + server_advert) + self.FLUSH_PACKET], | |
138 | shell=False |
|
138 | shell=False | |
139 | ) |
|
139 | ) | |
140 | except OSError: |
|
140 | except OSError: | |
141 | log.exception('Error processing command') |
|
141 | log.exception('Error processing command') | |
142 | raise exc.HTTPExpectationFailed() |
|
142 | raise exc.HTTPExpectationFailed() | |
143 |
|
143 | |||
144 | resp = Response() |
|
144 | resp = Response() | |
145 | resp.content_type = f'application/x-{git_command}-advertisement' |
|
145 | resp.content_type = f'application/x-{git_command}-advertisement' | |
146 | resp.charset = None |
|
146 | resp.charset = None | |
147 | resp.app_iter = out |
|
147 | resp.app_iter = out | |
148 |
|
148 | |||
149 | return resp |
|
149 | return resp | |
150 |
|
150 | |||
151 | def _get_want_capabilities(self, request): |
|
151 | def _get_want_capabilities(self, request): | |
152 | """Read the capabilities found in the first want line of the request.""" |
|
152 | """Read the capabilities found in the first want line of the request.""" | |
153 | pos = request.body_file_seekable.tell() |
|
153 | pos = request.body_file_seekable.tell() | |
154 | first_line = request.body_file_seekable.readline() |
|
154 | first_line = request.body_file_seekable.readline() | |
155 | request.body_file_seekable.seek(pos) |
|
155 | request.body_file_seekable.seek(pos) | |
156 |
|
156 | |||
157 | return frozenset( |
|
157 | return frozenset( | |
158 | dulwich.protocol.extract_want_line_capabilities(first_line)[1]) |
|
158 | dulwich.protocol.extract_want_line_capabilities(first_line)[1]) | |
159 |
|
159 | |||
160 | def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages): |
|
160 | def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages): | |
161 | """ |
|
161 | """ | |
162 | Construct a response with an empty PACK file. |
|
162 | Construct a response with an empty PACK file. | |
163 |
|
163 | |||
164 | We use an empty PACK file, as that would trigger the failure of the pull |
|
164 | We use an empty PACK file, as that would trigger the failure of the pull | |
165 | or clone command. |
|
165 | or clone command. | |
166 |
|
166 | |||
167 | We also print in the error output a message explaining why the command |
|
167 | We also print in the error output a message explaining why the command | |
168 | was aborted. |
|
168 | was aborted. | |
169 |
|
169 | |||
170 | If additionally, the user is accepting messages we send them the output |
|
170 | If additionally, the user is accepting messages we send them the output | |
171 | of the pre-pull hook. |
|
171 | of the pre-pull hook. | |
172 |
|
172 | |||
173 | Note that for clients not supporting side-band we just send them the |
|
173 | Note that for clients not supporting side-band we just send them the | |
174 | emtpy PACK file. |
|
174 | emtpy PACK file. | |
175 | """ |
|
175 | """ | |
176 |
|
176 | |||
177 | if self.SIDE_BAND_CAPS.intersection(capabilities): |
|
177 | if self.SIDE_BAND_CAPS.intersection(capabilities): | |
178 | response = [] |
|
178 | response = [] | |
179 | proto = dulwich.protocol.Protocol(None, response.append) |
|
179 | proto = dulwich.protocol.Protocol(None, response.append) | |
180 | proto.write_pkt_line(dulwich.protocol.NAK_LINE) |
|
180 | proto.write_pkt_line(dulwich.protocol.NAK_LINE) | |
181 |
|
181 | |||
182 | self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities) |
|
182 | self._write_sideband_to_proto(proto, ascii_bytes(pre_pull_messages, allow_bytes=True), capabilities) | |
183 | # N.B.(skreft): Do not change the sideband channel to 3, as that |
|
183 | # N.B.(skreft): Do not change the sideband channel to 3, as that | |
184 | # produces a fatal error in the client: |
|
184 | # produces a fatal error in the client: | |
185 | # fatal: error in sideband demultiplexer |
|
185 | # fatal: error in sideband demultiplexer | |
186 | proto.write_sideband( |
|
186 | proto.write_sideband( | |
187 | dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, |
|
187 | dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, | |
188 | ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True)) |
|
188 | ascii_bytes('Pre pull hook failed: aborting\n', allow_bytes=True)) | |
189 | proto.write_sideband( |
|
189 | proto.write_sideband( | |
190 | dulwich.protocol.SIDE_BAND_CHANNEL_DATA, |
|
190 | dulwich.protocol.SIDE_BAND_CHANNEL_DATA, | |
191 | ascii_bytes(self.EMPTY_PACK, allow_bytes=True)) |
|
191 | ascii_bytes(self.EMPTY_PACK, allow_bytes=True)) | |
192 |
|
192 | |||
193 | # writes b"0000" as default |
|
193 | # writes b"0000" as default | |
194 | proto.write_pkt_line(None) |
|
194 | proto.write_pkt_line(None) | |
195 |
|
195 | |||
196 | return response |
|
196 | return response | |
197 | else: |
|
197 | else: | |
198 | return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)] |
|
198 | return [ascii_bytes(self.EMPTY_PACK, allow_bytes=True)] | |
199 |
|
199 | |||
200 | def _build_post_pull_response(self, response, capabilities, start_message, end_message): |
|
200 | def _build_post_pull_response(self, response, capabilities, start_message, end_message): | |
201 | """ |
|
201 | """ | |
202 | Given a list response we inject the post-pull messages. |
|
202 | Given a list response we inject the post-pull messages. | |
203 |
|
203 | |||
204 | We only inject the messages if the client supports sideband, and the |
|
204 | We only inject the messages if the client supports sideband, and the | |
205 | response has the format: |
|
205 | response has the format: | |
206 | 0008NAK\n...0000 |
|
206 | 0008NAK\n...0000 | |
207 |
|
207 | |||
208 | Note that we do not check the no-progress capability as by default, git |
|
208 | Note that we do not check the no-progress capability as by default, git | |
209 | sends it, which effectively would block all messages. |
|
209 | sends it, which effectively would block all messages. | |
210 | """ |
|
210 | """ | |
211 |
|
211 | |||
212 | if not self.SIDE_BAND_CAPS.intersection(capabilities): |
|
212 | if not self.SIDE_BAND_CAPS.intersection(capabilities): | |
213 | return response |
|
213 | return response | |
214 |
|
214 | |||
215 | if not start_message and not end_message: |
|
215 | if not start_message and not end_message: | |
216 | return response |
|
216 | return response | |
217 |
|
217 | |||
218 | try: |
|
218 | try: | |
219 | iter(response) |
|
219 | iter(response) | |
220 | # iterator probably will work, we continue |
|
220 | # iterator probably will work, we continue | |
221 | except TypeError: |
|
221 | except TypeError: | |
222 | raise TypeError(f'response must be an iterator: got {type(response)}') |
|
222 | raise TypeError(f'response must be an iterator: got {type(response)}') | |
223 | if isinstance(response, (list, tuple)): |
|
223 | if isinstance(response, (list, tuple)): | |
224 | raise TypeError(f'response must be an iterator: got {type(response)}') |
|
224 | raise TypeError(f'response must be an iterator: got {type(response)}') | |
225 |
|
225 | |||
226 | def injected_response(): |
|
226 | def injected_response(): | |
227 |
|
227 | |||
228 | do_loop = 1 |
|
228 | do_loop = 1 | |
229 | header_injected = 0 |
|
229 | header_injected = 0 | |
230 | next_item = None |
|
230 | next_item = None | |
231 | has_item = False |
|
231 | has_item = False | |
232 | item = b'' |
|
232 | item = b'' | |
233 |
|
233 | |||
234 | while do_loop: |
|
234 | while do_loop: | |
235 |
|
235 | |||
236 | try: |
|
236 | try: | |
237 | next_item = next(response) |
|
237 | next_item = next(response) | |
238 | except StopIteration: |
|
238 | except StopIteration: | |
239 | do_loop = 0 |
|
239 | do_loop = 0 | |
240 |
|
240 | |||
241 | if has_item: |
|
241 | if has_item: | |
242 | # last item ! alter it now |
|
242 | # last item ! alter it now | |
243 | if do_loop == 0 and item.endswith(self.FLUSH_PACKET): |
|
243 | if do_loop == 0 and item.endswith(self.FLUSH_PACKET): | |
244 | new_response = [item[:-4]] |
|
244 | new_response = [item[:-4]] | |
245 | new_response.extend(self._get_messages(end_message, capabilities)) |
|
245 | new_response.extend(self._get_messages(end_message, capabilities)) | |
246 | new_response.append(self.FLUSH_PACKET) |
|
246 | new_response.append(self.FLUSH_PACKET) | |
247 | item = b''.join(new_response) |
|
247 | item = b''.join(new_response) | |
248 |
|
248 | |||
249 | yield item |
|
249 | yield item | |
250 |
|
250 | |||
251 | has_item = True |
|
251 | has_item = True | |
252 | item = next_item |
|
252 | item = next_item | |
253 |
|
253 | |||
254 | # alter item if it's the initial chunk |
|
254 | # alter item if it's the initial chunk | |
255 | if not header_injected and item.startswith(b'0008NAK\n'): |
|
255 | if not header_injected and item.startswith(b'0008NAK\n'): | |
256 | new_response = [b'0008NAK\n'] |
|
256 | new_response = [b'0008NAK\n'] | |
257 | new_response.extend(self._get_messages(start_message, capabilities)) |
|
257 | new_response.extend(self._get_messages(start_message, capabilities)) | |
258 | new_response.append(item[8:]) |
|
258 | new_response.append(item[8:]) | |
259 | item = b''.join(new_response) |
|
259 | item = b''.join(new_response) | |
260 | header_injected = 1 |
|
260 | header_injected = 1 | |
261 |
|
261 | |||
262 | return injected_response() |
|
262 | return injected_response() | |
263 |
|
263 | |||
264 | def _write_sideband_to_proto(self, proto, data, capabilities): |
|
264 | def _write_sideband_to_proto(self, proto, data, capabilities): | |
265 | """ |
|
265 | """ | |
266 | Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS |
|
266 | Write the data to the proto's sideband number 2 == SIDE_BAND_CHANNEL_PROGRESS | |
267 |
|
267 | |||
268 | We do not use dulwich's write_sideband directly as it only supports |
|
268 | We do not use dulwich's write_sideband directly as it only supports | |
269 | side-band-64k. |
|
269 | side-band-64k. | |
270 | """ |
|
270 | """ | |
271 | if not data: |
|
271 | if not data: | |
272 | return |
|
272 | return | |
273 |
|
273 | |||
274 | # N.B.(skreft): The values below are explained in the pack protocol |
|
274 | # N.B.(skreft): The values below are explained in the pack protocol | |
275 | # documentation, section Packfile Data. |
|
275 | # documentation, section Packfile Data. | |
276 | # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt |
|
276 | # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt | |
277 | if CAPABILITY_SIDE_BAND_64K in capabilities: |
|
277 | if CAPABILITY_SIDE_BAND_64K in capabilities: | |
278 | chunk_size = 65515 |
|
278 | chunk_size = 65515 | |
279 | elif CAPABILITY_SIDE_BAND in capabilities: |
|
279 | elif CAPABILITY_SIDE_BAND in capabilities: | |
280 | chunk_size = 995 |
|
280 | chunk_size = 995 | |
281 | else: |
|
281 | else: | |
282 | return |
|
282 | return | |
283 |
|
283 | |||
284 | chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size)) |
|
284 | chunker = (data[i:i + chunk_size] for i in range(0, len(data), chunk_size)) | |
285 |
|
285 | |||
286 | for chunk in chunker: |
|
286 | for chunk in chunker: | |
287 | proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True)) |
|
287 | proto.write_sideband(dulwich.protocol.SIDE_BAND_CHANNEL_PROGRESS, ascii_bytes(chunk, allow_bytes=True)) | |
288 |
|
288 | |||
289 | def _get_messages(self, data, capabilities): |
|
289 | def _get_messages(self, data, capabilities): | |
290 | """Return a list with packets for sending data in sideband number 2.""" |
|
290 | """Return a list with packets for sending data in sideband number 2.""" | |
291 | response = [] |
|
291 | response = [] | |
292 | proto = dulwich.protocol.Protocol(None, response.append) |
|
292 | proto = dulwich.protocol.Protocol(None, response.append) | |
293 |
|
293 | |||
294 | self._write_sideband_to_proto(proto, data, capabilities) |
|
294 | self._write_sideband_to_proto(proto, data, capabilities) | |
295 |
|
295 | |||
296 | return response |
|
296 | return response | |
297 |
|
297 | |||
298 | def backend(self, request, environ): |
|
298 | def backend(self, request, environ): | |
299 | """ |
|
299 | """ | |
300 | WSGI Response producer for HTTP POST Git Smart HTTP requests. |
|
300 | WSGI Response producer for HTTP POST Git Smart HTTP requests. | |
301 | Reads commands and data from HTTP POST's body. |
|
301 | Reads commands and data from HTTP POST's body. | |
302 | returns an iterator obj with contents of git command's |
|
302 | returns an iterator obj with contents of git command's | |
303 | response to stdout |
|
303 | response to stdout | |
304 | """ |
|
304 | """ | |
305 | # TODO(skreft): think how we could detect an HTTPLockedException, as |
|
305 | # TODO(skreft): think how we could detect an HTTPLockedException, as | |
306 | # we probably want to have the same mechanism used by mercurial and |
|
306 | # we probably want to have the same mechanism used by mercurial and | |
307 | # simplevcs. |
|
307 | # simplevcs. | |
308 | # For that we would need to parse the output of the command looking for |
|
308 | # For that we would need to parse the output of the command looking for | |
309 | # some signs of the HTTPLockedError, parse the data and reraise it in |
|
309 | # some signs of the HTTPLockedError, parse the data and reraise it in | |
310 | # pygrack. However, that would interfere with the streaming. |
|
310 | # pygrack. However, that would interfere with the streaming. | |
311 | # |
|
311 | # | |
312 | # Now the output of a blocked push is: |
|
312 | # Now the output of a blocked push is: | |
313 | # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git |
|
313 | # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git | |
314 | # POST git-receive-pack (1047 bytes) |
|
314 | # POST git-receive-pack (1047 bytes) | |
315 | # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto` |
|
315 | # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto` | |
316 | # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git |
|
316 | # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git | |
317 | # ! [remote rejected] master -> master (pre-receive hook declined) |
|
317 | # ! [remote rejected] master -> master (pre-receive hook declined) | |
318 | # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git' |
|
318 | # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git' | |
319 |
|
319 | |||
320 | git_command = self._get_fixedpath(request.path_info) |
|
320 | git_command = self._get_fixedpath(request.path_info) | |
321 | if git_command not in self.commands: |
|
321 | if git_command not in self.commands: | |
322 | log.debug('command %s not allowed', git_command) |
|
322 | log.debug('command %s not allowed', git_command) | |
323 | return exc.HTTPForbidden() |
|
323 | return exc.HTTPForbidden() | |
324 |
|
324 | |||
325 | capabilities = None |
|
325 | capabilities = None | |
326 | if git_command == 'git-upload-pack': |
|
326 | if git_command == 'git-upload-pack': | |
327 | capabilities = self._get_want_capabilities(request) |
|
327 | capabilities = self._get_want_capabilities(request) | |
328 |
|
328 | |||
329 | if 'CONTENT_LENGTH' in environ: |
|
329 | if 'CONTENT_LENGTH' in environ: | |
330 | inputstream = FileWrapper(request.body_file_seekable, |
|
330 | inputstream = FileWrapper(request.body_file_seekable, | |
331 | request.content_length) |
|
331 | request.content_length) | |
332 | else: |
|
332 | else: | |
333 | inputstream = request.body_file_seekable |
|
333 | inputstream = request.body_file_seekable | |
334 |
|
334 | |||
335 | resp = Response() |
|
335 | resp = Response() | |
336 | resp.content_type = f'application/x-{git_command}-result' |
|
336 | resp.content_type = f'application/x-{git_command}-result' | |
337 | resp.charset = None |
|
337 | resp.charset = None | |
338 |
|
338 | |||
339 | pre_pull_messages = '' |
|
339 | pre_pull_messages = '' | |
340 | # Upload-pack == clone |
|
340 | # Upload-pack == clone | |
341 | if git_command == 'git-upload-pack': |
|
341 | if git_command == 'git-upload-pack': | |
342 | hook_response = hooks.git_pre_pull(self.extras) |
|
342 | hook_response = hooks.git_pre_pull(self.extras) | |
343 | if hook_response.status != 0: |
|
343 | if hook_response.status != 0: | |
344 | pre_pull_messages = hook_response.output |
|
344 | pre_pull_messages = hook_response.output | |
345 | resp.app_iter = self._build_failed_pre_pull_response( |
|
345 | resp.app_iter = self._build_failed_pre_pull_response( | |
346 | capabilities, pre_pull_messages) |
|
346 | capabilities, pre_pull_messages) | |
347 | return resp |
|
347 | return resp | |
348 |
|
348 | |||
349 | gitenv = dict(os.environ) |
|
349 | gitenv = dict(os.environ) | |
350 | # forget all configs |
|
350 | # forget all configs | |
351 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
351 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
352 | gitenv['RC_SCM_DATA'] = json.dumps(self.extras) |
|
352 | gitenv['RC_SCM_DATA'] = json.dumps(self.extras) | |
353 | cmd = [self.git_path, git_command[4:], '--stateless-rpc', |
|
353 | cmd = [self.git_path, git_command[4:], '--stateless-rpc', | |
354 | self.content_path] |
|
354 | self.content_path] | |
355 | log.debug('handling cmd %s', cmd) |
|
355 | log.debug('handling cmd %s', cmd) | |
356 |
|
356 | |||
357 | out = subprocessio.SubprocessIOChunker( |
|
357 | out = subprocessio.SubprocessIOChunker( | |
358 | cmd, |
|
358 | cmd, | |
359 | input_stream=inputstream, |
|
359 | input_stream=inputstream, | |
360 | env=gitenv, |
|
360 | env=gitenv, | |
361 | cwd=self.content_path, |
|
361 | cwd=self.content_path, | |
362 | shell=False, |
|
362 | shell=False, | |
363 | fail_on_stderr=False, |
|
363 | fail_on_stderr=False, | |
364 | fail_on_return_code=False |
|
364 | fail_on_return_code=False | |
365 | ) |
|
365 | ) | |
366 |
|
366 | |||
367 | if self.update_server_info and git_command == 'git-receive-pack': |
|
367 | if self.update_server_info and git_command == 'git-receive-pack': | |
368 | # We need to fully consume the iterator here, as the |
|
368 | # We need to fully consume the iterator here, as the | |
369 | # update-server-info command needs to be run after the push. |
|
369 | # update-server-info command needs to be run after the push. | |
370 | out = list(out) |
|
370 | out = list(out) | |
371 |
|
371 | |||
372 | # Updating refs manually after each push. |
|
372 | # Updating refs manually after each push. | |
373 | # This is required as some clients are exposing Git repos internally |
|
373 | # This is required as some clients are exposing Git repos internally | |
374 | # with the dumb protocol. |
|
374 | # with the dumb protocol. | |
375 | cmd = [self.git_path, 'update-server-info'] |
|
375 | cmd = [self.git_path, 'update-server-info'] | |
376 | log.debug('handling cmd %s', cmd) |
|
376 | log.debug('handling cmd %s', cmd) | |
377 | output = subprocessio.SubprocessIOChunker( |
|
377 | output = subprocessio.SubprocessIOChunker( | |
378 | cmd, |
|
378 | cmd, | |
379 | input_stream=inputstream, |
|
379 | input_stream=inputstream, | |
380 | env=gitenv, |
|
380 | env=gitenv, | |
381 | cwd=self.content_path, |
|
381 | cwd=self.content_path, | |
382 | shell=False, |
|
382 | shell=False, | |
383 | fail_on_stderr=False, |
|
383 | fail_on_stderr=False, | |
384 | fail_on_return_code=False |
|
384 | fail_on_return_code=False | |
385 | ) |
|
385 | ) | |
386 | # Consume all the output so the subprocess finishes |
|
386 | # Consume all the output so the subprocess finishes | |
387 | for _ in output: |
|
387 | for _ in output: | |
388 | pass |
|
388 | pass | |
389 |
|
389 | |||
390 | # Upload-pack == clone |
|
390 | # Upload-pack == clone | |
391 | if git_command == 'git-upload-pack': |
|
391 | if git_command == 'git-upload-pack': | |
392 | hook_response = hooks.git_post_pull(self.extras) |
|
392 | hook_response = hooks.git_post_pull(self.extras) | |
393 | post_pull_messages = hook_response.output |
|
393 | post_pull_messages = hook_response.output | |
394 | resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages) |
|
394 | resp.app_iter = self._build_post_pull_response(out, capabilities, pre_pull_messages, post_pull_messages) | |
395 | else: |
|
395 | else: | |
396 | resp.app_iter = out |
|
396 | resp.app_iter = out | |
397 |
|
397 | |||
398 | return resp |
|
398 | return resp | |
399 |
|
399 | |||
400 | def __call__(self, environ, start_response): |
|
400 | def __call__(self, environ, start_response): | |
401 | request = Request(environ) |
|
401 | request = Request(environ) | |
402 | _path = self._get_fixedpath(request.path_info) |
|
402 | _path = self._get_fixedpath(request.path_info) | |
403 | if _path.startswith('info/refs'): |
|
403 | if _path.startswith('info/refs'): | |
404 | app = self.inforefs |
|
404 | app = self.inforefs | |
405 | else: |
|
405 | else: | |
406 | app = self.backend |
|
406 | app = self.backend | |
407 |
|
407 | |||
408 | try: |
|
408 | try: | |
409 | resp = app(request, environ) |
|
409 | resp = app(request, environ) | |
410 | except exc.HTTPException as error: |
|
410 | except exc.HTTPException as error: | |
411 | log.exception('HTTP Error') |
|
411 | log.exception('HTTP Error') | |
412 | resp = error |
|
412 | resp = error | |
413 | except Exception: |
|
413 | except Exception: | |
414 | log.exception('Unknown error') |
|
414 | log.exception('Unknown error') | |
415 | resp = exc.HTTPInternalServerError() |
|
415 | resp = exc.HTTPInternalServerError() | |
416 |
|
416 | |||
417 | return resp(environ, start_response) |
|
417 | return resp(environ, start_response) |
@@ -1,206 +1,206 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | import os |
|
18 | import os | |
19 | import sys |
|
19 | import sys | |
20 | import stat |
|
20 | import stat | |
21 | import pytest |
|
21 | import pytest | |
22 | import vcsserver |
|
22 | import vcsserver | |
23 | import tempfile |
|
23 | import tempfile | |
24 | from vcsserver import hook_utils |
|
24 | from vcsserver import hook_utils | |
25 | from vcsserver.tests.fixture import no_newline_id_generator |
|
25 | from vcsserver.tests.fixture import no_newline_id_generator | |
26 | from vcsserver.str_utils import safe_bytes, safe_str |
|
26 | from vcsserver.str_utils import safe_bytes, safe_str | |
27 | from vcsserver.utils import AttributeDict |
|
27 | from vcsserver.utils import AttributeDict | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | class TestCheckRhodecodeHook: |
|
30 | class TestCheckRhodecodeHook: | |
31 |
|
31 | |||
32 | def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir): |
|
32 | def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir): | |
33 | hook = os.path.join(str(tmpdir), 'fake_hook_file.py') |
|
33 | hook = os.path.join(str(tmpdir), 'fake_hook_file.py') | |
34 | with open(hook, 'wb') as f: |
|
34 | with open(hook, 'wb') as f: | |
35 | f.write(b'dummy test') |
|
35 | f.write(b'dummy test') | |
36 | result = hook_utils.check_rhodecode_hook(hook) |
|
36 | result = hook_utils.check_rhodecode_hook(hook) | |
37 | assert result is False |
|
37 | assert result is False | |
38 |
|
38 | |||
39 | def test_returns_true_when_no_hook_file_found(self, tmpdir): |
|
39 | def test_returns_true_when_no_hook_file_found(self, tmpdir): | |
40 | hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py') |
|
40 | hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py') | |
41 | result = hook_utils.check_rhodecode_hook(hook) |
|
41 | result = hook_utils.check_rhodecode_hook(hook) | |
42 | assert result |
|
42 | assert result | |
43 |
|
43 | |||
44 | @pytest.mark.parametrize("file_content, expected_result", [ |
|
44 | @pytest.mark.parametrize("file_content, expected_result", [ | |
45 | ("RC_HOOK_VER = '3.3.3'\n", True), |
|
45 | ("RC_HOOK_VER = '3.3.3'\n", True), | |
46 | ("RC_HOOK = '3.3.3'\n", False), |
|
46 | ("RC_HOOK = '3.3.3'\n", False), | |
47 | ], ids=no_newline_id_generator) |
|
47 | ], ids=no_newline_id_generator) | |
48 | def test_signatures(self, file_content, expected_result, tmpdir): |
|
48 | def test_signatures(self, file_content, expected_result, tmpdir): | |
49 | hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py') |
|
49 | hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py') | |
50 | with open(hook, 'wb') as f: |
|
50 | with open(hook, 'wb') as f: | |
51 | f.write(safe_bytes(file_content)) |
|
51 | f.write(safe_bytes(file_content)) | |
52 |
|
52 | |||
53 | result = hook_utils.check_rhodecode_hook(hook) |
|
53 | result = hook_utils.check_rhodecode_hook(hook) | |
54 |
|
54 | |||
55 | assert result is expected_result |
|
55 | assert result is expected_result | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | class BaseInstallHooks: |
|
58 | class BaseInstallHooks: | |
59 | HOOK_FILES = () |
|
59 | HOOK_FILES = () | |
60 |
|
60 | |||
61 | def _check_hook_file_mode(self, file_path): |
|
61 | def _check_hook_file_mode(self, file_path): | |
62 |
assert os.path.exists(file_path), 'path |
|
62 | assert os.path.exists(file_path), f'path {file_path} missing' | |
63 | stat_info = os.stat(file_path) |
|
63 | stat_info = os.stat(file_path) | |
64 |
|
64 | |||
65 | file_mode = stat.S_IMODE(stat_info.st_mode) |
|
65 | file_mode = stat.S_IMODE(stat_info.st_mode) | |
66 | expected_mode = int('755', 8) |
|
66 | expected_mode = int('755', 8) | |
67 | assert expected_mode == file_mode |
|
67 | assert expected_mode == file_mode | |
68 |
|
68 | |||
69 | def _check_hook_file_content(self, file_path, executable): |
|
69 | def _check_hook_file_content(self, file_path, executable): | |
70 | executable = executable or sys.executable |
|
70 | executable = executable or sys.executable | |
71 | with open(file_path, 'rt') as hook_file: |
|
71 | with open(file_path, 'rt') as hook_file: | |
72 | content = hook_file.read() |
|
72 | content = hook_file.read() | |
73 |
|
73 | |||
74 | expected_env = '#!{}'.format(executable) |
|
74 | expected_env = '#!{}'.format(executable) | |
75 | expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.__version__) |
|
75 | expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(vcsserver.__version__) | |
76 | assert content.strip().startswith(expected_env) |
|
76 | assert content.strip().startswith(expected_env) | |
77 | assert expected_rc_version in content |
|
77 | assert expected_rc_version in content | |
78 |
|
78 | |||
79 | def _create_fake_hook(self, file_path, content): |
|
79 | def _create_fake_hook(self, file_path, content): | |
80 | with open(file_path, 'w') as hook_file: |
|
80 | with open(file_path, 'w') as hook_file: | |
81 | hook_file.write(content) |
|
81 | hook_file.write(content) | |
82 |
|
82 | |||
83 | def create_dummy_repo(self, repo_type): |
|
83 | def create_dummy_repo(self, repo_type): | |
84 | tmpdir = tempfile.mkdtemp() |
|
84 | tmpdir = tempfile.mkdtemp() | |
85 | repo = AttributeDict() |
|
85 | repo = AttributeDict() | |
86 | if repo_type == 'git': |
|
86 | if repo_type == 'git': | |
87 | repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo') |
|
87 | repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo') | |
88 | os.makedirs(repo.path) |
|
88 | os.makedirs(repo.path) | |
89 | os.makedirs(os.path.join(repo.path, 'hooks')) |
|
89 | os.makedirs(os.path.join(repo.path, 'hooks')) | |
90 | repo.bare = True |
|
90 | repo.bare = True | |
91 |
|
91 | |||
92 | elif repo_type == 'svn': |
|
92 | elif repo_type == 'svn': | |
93 | repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo') |
|
93 | repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo') | |
94 | os.makedirs(repo.path) |
|
94 | os.makedirs(repo.path) | |
95 | os.makedirs(os.path.join(repo.path, 'hooks')) |
|
95 | os.makedirs(os.path.join(repo.path, 'hooks')) | |
96 |
|
96 | |||
97 | return repo |
|
97 | return repo | |
98 |
|
98 | |||
99 | def check_hooks(self, repo_path, repo_bare=True): |
|
99 | def check_hooks(self, repo_path, repo_bare=True): | |
100 | for file_name in self.HOOK_FILES: |
|
100 | for file_name in self.HOOK_FILES: | |
101 | if repo_bare: |
|
101 | if repo_bare: | |
102 | file_path = os.path.join(repo_path, 'hooks', file_name) |
|
102 | file_path = os.path.join(repo_path, 'hooks', file_name) | |
103 | else: |
|
103 | else: | |
104 | file_path = os.path.join(repo_path, '.git', 'hooks', file_name) |
|
104 | file_path = os.path.join(repo_path, '.git', 'hooks', file_name) | |
105 | self._check_hook_file_mode(file_path) |
|
105 | self._check_hook_file_mode(file_path) | |
106 | self._check_hook_file_content(file_path, sys.executable) |
|
106 | self._check_hook_file_content(file_path, sys.executable) | |
107 |
|
107 | |||
108 |
|
108 | |||
109 | class TestInstallGitHooks(BaseInstallHooks): |
|
109 | class TestInstallGitHooks(BaseInstallHooks): | |
110 | HOOK_FILES = ('pre-receive', 'post-receive') |
|
110 | HOOK_FILES = ('pre-receive', 'post-receive') | |
111 |
|
111 | |||
112 | def test_hooks_are_installed(self): |
|
112 | def test_hooks_are_installed(self): | |
113 | repo = self.create_dummy_repo('git') |
|
113 | repo = self.create_dummy_repo('git') | |
114 | result = hook_utils.install_git_hooks(repo.path, repo.bare) |
|
114 | result = hook_utils.install_git_hooks(repo.path, repo.bare) | |
115 | assert result |
|
115 | assert result | |
116 | self.check_hooks(repo.path, repo.bare) |
|
116 | self.check_hooks(repo.path, repo.bare) | |
117 |
|
117 | |||
118 | def test_hooks_are_replaced(self): |
|
118 | def test_hooks_are_replaced(self): | |
119 | repo = self.create_dummy_repo('git') |
|
119 | repo = self.create_dummy_repo('git') | |
120 | hooks_path = os.path.join(repo.path, 'hooks') |
|
120 | hooks_path = os.path.join(repo.path, 'hooks') | |
121 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
121 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
122 | self._create_fake_hook( |
|
122 | self._create_fake_hook( | |
123 | file_path, content="RC_HOOK_VER = 'abcde'\n") |
|
123 | file_path, content="RC_HOOK_VER = 'abcde'\n") | |
124 |
|
124 | |||
125 | result = hook_utils.install_git_hooks(repo.path, repo.bare) |
|
125 | result = hook_utils.install_git_hooks(repo.path, repo.bare) | |
126 | assert result |
|
126 | assert result | |
127 | self.check_hooks(repo.path, repo.bare) |
|
127 | self.check_hooks(repo.path, repo.bare) | |
128 |
|
128 | |||
129 | def test_non_rc_hooks_are_not_replaced(self): |
|
129 | def test_non_rc_hooks_are_not_replaced(self): | |
130 | repo = self.create_dummy_repo('git') |
|
130 | repo = self.create_dummy_repo('git') | |
131 | hooks_path = os.path.join(repo.path, 'hooks') |
|
131 | hooks_path = os.path.join(repo.path, 'hooks') | |
132 | non_rc_content = 'echo "non rc hook"\n' |
|
132 | non_rc_content = 'echo "non rc hook"\n' | |
133 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
133 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
134 | self._create_fake_hook( |
|
134 | self._create_fake_hook( | |
135 | file_path, content=non_rc_content) |
|
135 | file_path, content=non_rc_content) | |
136 |
|
136 | |||
137 | result = hook_utils.install_git_hooks(repo.path, repo.bare) |
|
137 | result = hook_utils.install_git_hooks(repo.path, repo.bare) | |
138 | assert result |
|
138 | assert result | |
139 |
|
139 | |||
140 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
140 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
141 | with open(file_path, 'rt') as hook_file: |
|
141 | with open(file_path, 'rt') as hook_file: | |
142 | content = hook_file.read() |
|
142 | content = hook_file.read() | |
143 | assert content == non_rc_content |
|
143 | assert content == non_rc_content | |
144 |
|
144 | |||
145 | def test_non_rc_hooks_are_replaced_with_force_flag(self): |
|
145 | def test_non_rc_hooks_are_replaced_with_force_flag(self): | |
146 | repo = self.create_dummy_repo('git') |
|
146 | repo = self.create_dummy_repo('git') | |
147 | hooks_path = os.path.join(repo.path, 'hooks') |
|
147 | hooks_path = os.path.join(repo.path, 'hooks') | |
148 | non_rc_content = 'echo "non rc hook"\n' |
|
148 | non_rc_content = 'echo "non rc hook"\n' | |
149 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
149 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
150 | self._create_fake_hook( |
|
150 | self._create_fake_hook( | |
151 | file_path, content=non_rc_content) |
|
151 | file_path, content=non_rc_content) | |
152 |
|
152 | |||
153 | result = hook_utils.install_git_hooks( |
|
153 | result = hook_utils.install_git_hooks( | |
154 | repo.path, repo.bare, force_create=True) |
|
154 | repo.path, repo.bare, force_create=True) | |
155 | assert result |
|
155 | assert result | |
156 | self.check_hooks(repo.path, repo.bare) |
|
156 | self.check_hooks(repo.path, repo.bare) | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | class TestInstallSvnHooks(BaseInstallHooks): |
|
159 | class TestInstallSvnHooks(BaseInstallHooks): | |
160 | HOOK_FILES = ('pre-commit', 'post-commit') |
|
160 | HOOK_FILES = ('pre-commit', 'post-commit') | |
161 |
|
161 | |||
162 | def test_hooks_are_installed(self): |
|
162 | def test_hooks_are_installed(self): | |
163 | repo = self.create_dummy_repo('svn') |
|
163 | repo = self.create_dummy_repo('svn') | |
164 | result = hook_utils.install_svn_hooks(repo.path) |
|
164 | result = hook_utils.install_svn_hooks(repo.path) | |
165 | assert result |
|
165 | assert result | |
166 | self.check_hooks(repo.path) |
|
166 | self.check_hooks(repo.path) | |
167 |
|
167 | |||
168 | def test_hooks_are_replaced(self): |
|
168 | def test_hooks_are_replaced(self): | |
169 | repo = self.create_dummy_repo('svn') |
|
169 | repo = self.create_dummy_repo('svn') | |
170 | hooks_path = os.path.join(repo.path, 'hooks') |
|
170 | hooks_path = os.path.join(repo.path, 'hooks') | |
171 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
171 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
172 | self._create_fake_hook( |
|
172 | self._create_fake_hook( | |
173 | file_path, content="RC_HOOK_VER = 'abcde'\n") |
|
173 | file_path, content="RC_HOOK_VER = 'abcde'\n") | |
174 |
|
174 | |||
175 | result = hook_utils.install_svn_hooks(repo.path) |
|
175 | result = hook_utils.install_svn_hooks(repo.path) | |
176 | assert result |
|
176 | assert result | |
177 | self.check_hooks(repo.path) |
|
177 | self.check_hooks(repo.path) | |
178 |
|
178 | |||
179 | def test_non_rc_hooks_are_not_replaced(self): |
|
179 | def test_non_rc_hooks_are_not_replaced(self): | |
180 | repo = self.create_dummy_repo('svn') |
|
180 | repo = self.create_dummy_repo('svn') | |
181 | hooks_path = os.path.join(repo.path, 'hooks') |
|
181 | hooks_path = os.path.join(repo.path, 'hooks') | |
182 | non_rc_content = 'echo "non rc hook"\n' |
|
182 | non_rc_content = 'echo "non rc hook"\n' | |
183 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
183 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
184 | self._create_fake_hook( |
|
184 | self._create_fake_hook( | |
185 | file_path, content=non_rc_content) |
|
185 | file_path, content=non_rc_content) | |
186 |
|
186 | |||
187 | result = hook_utils.install_svn_hooks(repo.path) |
|
187 | result = hook_utils.install_svn_hooks(repo.path) | |
188 | assert result |
|
188 | assert result | |
189 |
|
189 | |||
190 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
190 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
191 | with open(file_path, 'rt') as hook_file: |
|
191 | with open(file_path, 'rt') as hook_file: | |
192 | content = hook_file.read() |
|
192 | content = hook_file.read() | |
193 | assert content == non_rc_content |
|
193 | assert content == non_rc_content | |
194 |
|
194 | |||
195 | def test_non_rc_hooks_are_replaced_with_force_flag(self): |
|
195 | def test_non_rc_hooks_are_replaced_with_force_flag(self): | |
196 | repo = self.create_dummy_repo('svn') |
|
196 | repo = self.create_dummy_repo('svn') | |
197 | hooks_path = os.path.join(repo.path, 'hooks') |
|
197 | hooks_path = os.path.join(repo.path, 'hooks') | |
198 | non_rc_content = 'echo "non rc hook"\n' |
|
198 | non_rc_content = 'echo "non rc hook"\n' | |
199 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: |
|
199 | for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]: | |
200 | self._create_fake_hook( |
|
200 | self._create_fake_hook( | |
201 | file_path, content=non_rc_content) |
|
201 | file_path, content=non_rc_content) | |
202 |
|
202 | |||
203 | result = hook_utils.install_svn_hooks( |
|
203 | result = hook_utils.install_svn_hooks( | |
204 | repo.path, force_create=True) |
|
204 | repo.path, force_create=True) | |
205 | assert result |
|
205 | assert result | |
206 | self.check_hooks(repo.path, ) |
|
206 | self.check_hooks(repo.path, ) |
General Comments 0
You need to be logged in to leave comments.
Login now