Show More
@@ -0,0 +1,37 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | import logging | |
|
4 | ||
|
5 | from alembic.migration import MigrationContext | |
|
6 | from alembic.operations import Operations | |
|
7 | from sqlalchemy import Column, LargeBinary | |
|
8 | ||
|
9 | from rhodecode.lib.dbmigrate.versions import _reset_base | |
|
10 | from rhodecode.model import init_model_encryption | |
|
11 | ||
|
12 | ||
|
13 | log = logging.getLogger(__name__) | |
|
14 | ||
|
15 | ||
|
16 | def upgrade(migrate_engine): | |
|
17 | """ | |
|
18 | Upgrade operations go here. | |
|
19 | Don't create your own engine; bind migrate_engine to your metadata | |
|
20 | """ | |
|
21 | _reset_base(migrate_engine) | |
|
22 | from rhodecode.lib.dbmigrate.schema import db_4_16_0_2 | |
|
23 | ||
|
24 | init_model_encryption(db_4_16_0_2) | |
|
25 | ||
|
26 | context = MigrationContext.configure(migrate_engine.connect()) | |
|
27 | op = Operations(context) | |
|
28 | ||
|
29 | repo_group = db_4_16_0_2.RepoGroup.__table__ | |
|
30 | ||
|
31 | with op.batch_alter_table(repo_group.name) as batch_op: | |
|
32 | batch_op.add_column( | |
|
33 | Column("changeset_cache", LargeBinary(1024), nullable=True)) | |
|
34 | ||
|
35 | ||
|
36 | def downgrade(migrate_engine): | |
|
37 | pass |
@@ -45,7 +45,7 b' PYRAMID_SETTINGS = {}' | |||
|
45 | 45 | EXTENSIONS = {} |
|
46 | 46 | |
|
47 | 47 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
48 |
__dbversion__ = 9 |
|
|
48 | __dbversion__ = 98 # defines current db version for migrations | |
|
49 | 49 | __platform__ = platform.system() |
|
50 | 50 | __license__ = 'AGPLv3, and Commercial License' |
|
51 | 51 | __author__ = 'RhodeCode GmbH' |
@@ -703,8 +703,9 b' class HomeView(BaseAppView):' | |||
|
703 | 703 | def repo_group_main_page(self): |
|
704 | 704 | c = self.load_default_context() |
|
705 | 705 | c.repo_group = self.request.db_repo_group |
|
706 | repo_data, repo_group_data = self._get_groups_and_repos( | |
|
707 | c.repo_group.group_id) | |
|
706 | repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) | |
|
707 | ||
|
708 | c.repo_group.update_commit_cache() | |
|
708 | 709 | |
|
709 | 710 | # json used to render the grids |
|
710 | 711 | c.repos_data = json.dumps(repo_data) |
@@ -485,9 +485,6 b' class DbManage(object):' | |||
|
485 | 485 | self.populate_default_permissions() |
|
486 | 486 | return fixed |
|
487 | 487 | |
|
488 | def update_repo_info(self): | |
|
489 | RepoModel.update_repoinfo() | |
|
490 | ||
|
491 | 488 | def config_prompt(self, test_repo_path='', retries=3): |
|
492 | 489 | defaults = self.cli_args |
|
493 | 490 | _path = defaults.get('repos_location') |
@@ -614,7 +614,7 b' class Repository(Base, BaseModel):' | |||
|
614 | 614 | if (cs_cache != self.changeset_cache or not self.changeset_cache): |
|
615 | 615 | _default = datetime.datetime.fromtimestamp(0) |
|
616 | 616 | last_change = cs_cache.get('date') or _default |
|
617 |
log.debug('updated repo %s with new c |
|
|
617 | log.debug('updated repo %s with new commit cache %s', self.repo_name, cs_cache) | |
|
618 | 618 | self.updated_on = last_change |
|
619 | 619 | self.changeset_cache = cs_cache |
|
620 | 620 | Session().add(self) |
@@ -2164,7 +2164,7 b' class Repository(Base, BaseModel):' | |||
|
2164 | 2164 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
2165 | 2165 | _default = datetime.datetime.fromtimestamp(0) |
|
2166 | 2166 | last_change = cs_cache.get('date') or _default |
|
2167 |
log.debug('updated repo %s with new c |
|
|
2167 | log.debug('updated repo %s with new commit cache %s', | |
|
2168 | 2168 | self.repo_name, cs_cache) |
|
2169 | 2169 | self.updated_on = last_change |
|
2170 | 2170 | self.changeset_cache = cs_cache |
@@ -2230,7 +2230,7 b' class Repository(Base, BaseModel):' | |||
|
2230 | 2230 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
2231 | 2231 | _default = datetime.datetime.fromtimestamp(0) |
|
2232 | 2232 | last_change = cs_cache.get('date') or _default |
|
2233 |
log.debug('updated repo %s with new c |
|
|
2233 | log.debug('updated repo %s with new commit cache %s', | |
|
2234 | 2234 | self.repo_name, cs_cache) |
|
2235 | 2235 | self.updated_on = last_change |
|
2236 | 2236 | self.changeset_cache = cs_cache |
@@ -2278,7 +2278,7 b' class Repository(Base, BaseModel):' | |||
|
2278 | 2278 | # if yes, we use the current timestamp instead. Imagine you get |
|
2279 | 2279 | # old commit pushed 1y ago, we'd set last update 1y to ago. |
|
2280 | 2280 | last_change = _default |
|
2281 |
log.debug('updated repo %s with new c |
|
|
2281 | log.debug('updated repo %s with new commit cache %s', | |
|
2282 | 2282 | self.repo_name, cs_cache) |
|
2283 | 2283 | self.updated_on = last_change |
|
2284 | 2284 | self.changeset_cache = cs_cache |
@@ -2301,7 +2301,7 b' class Repository(Base, BaseModel):' | |||
|
2301 | 2301 | # if yes, we use the current timestamp instead. Imagine you get |
|
2302 | 2302 | # old commit pushed 1y ago, we'd set last update 1y to ago. |
|
2303 | 2303 | last_change = _default |
|
2304 |
log.debug('updated repo %s with new c |
|
|
2304 | log.debug('updated repo %s with new commit cache %s', | |
|
2305 | 2305 | self.repo_name, cs_cache) |
|
2306 | 2306 | self.updated_on = last_change |
|
2307 | 2307 | self.changeset_cache = cs_cache |
@@ -2301,7 +2301,7 b' class Repository(Base, BaseModel):' | |||
|
2301 | 2301 | # if yes, we use the current timestamp instead. Imagine you get |
|
2302 | 2302 | # old commit pushed 1y ago, we'd set last update 1y to ago. |
|
2303 | 2303 | last_change = _default |
|
2304 |
log.debug('updated repo %s with new c |
|
|
2304 | log.debug('updated repo %s with new commit cache %s', | |
|
2305 | 2305 | self.repo_name, cs_cache) |
|
2306 | 2306 | self.updated_on = last_change |
|
2307 | 2307 | self.changeset_cache = cs_cache |
@@ -1865,7 +1865,7 b' class Repository(Base, BaseModel):' | |||
|
1865 | 1865 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1866 | 1866 | _default = datetime.datetime.fromtimestamp(0) |
|
1867 | 1867 | last_change = cs_cache.get('date') or _default |
|
1868 |
log.debug('updated repo %s with new c |
|
|
1868 | log.debug('updated repo %s with new commit cache %s', | |
|
1869 | 1869 | self.repo_name, cs_cache) |
|
1870 | 1870 | self.updated_on = last_change |
|
1871 | 1871 | self.changeset_cache = cs_cache |
@@ -1868,7 +1868,7 b' class Repository(Base, BaseModel):' | |||
|
1868 | 1868 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1869 | 1869 | _default = datetime.datetime.fromtimestamp(0) |
|
1870 | 1870 | last_change = cs_cache.get('date') or _default |
|
1871 |
log.debug('updated repo %s with new c |
|
|
1871 | log.debug('updated repo %s with new commit cache %s', | |
|
1872 | 1872 | self.repo_name, cs_cache) |
|
1873 | 1873 | self.updated_on = last_change |
|
1874 | 1874 | self.changeset_cache = cs_cache |
@@ -1867,7 +1867,7 b' class Repository(Base, BaseModel):' | |||
|
1867 | 1867 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1868 | 1868 | _default = datetime.datetime.fromtimestamp(0) |
|
1869 | 1869 | last_change = cs_cache.get('date') or _default |
|
1870 |
log.debug('updated repo %s with new c |
|
|
1870 | log.debug('updated repo %s with new commit cache %s', | |
|
1871 | 1871 | self.repo_name, cs_cache) |
|
1872 | 1872 | self.updated_on = last_change |
|
1873 | 1873 | self.changeset_cache = cs_cache |
@@ -1869,7 +1869,7 b' class Repository(Base, BaseModel):' | |||
|
1869 | 1869 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1870 | 1870 | _default = datetime.datetime.fromtimestamp(0) |
|
1871 | 1871 | last_change = cs_cache.get('date') or _default |
|
1872 |
log.debug('updated repo %s with new c |
|
|
1872 | log.debug('updated repo %s with new commit cache %s', | |
|
1873 | 1873 | self.repo_name, cs_cache) |
|
1874 | 1874 | self.updated_on = last_change |
|
1875 | 1875 | self.changeset_cache = cs_cache |
@@ -1869,7 +1869,7 b' class Repository(Base, BaseModel):' | |||
|
1869 | 1869 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1870 | 1870 | _default = datetime.datetime.fromtimestamp(0) |
|
1871 | 1871 | last_change = cs_cache.get('date') or _default |
|
1872 |
log.debug('updated repo %s with new c |
|
|
1872 | log.debug('updated repo %s with new commit cache %s', | |
|
1873 | 1873 | self.repo_name, cs_cache) |
|
1874 | 1874 | self.updated_on = last_change |
|
1875 | 1875 | self.changeset_cache = cs_cache |
@@ -1912,7 +1912,7 b' class Repository(Base, BaseModel):' | |||
|
1912 | 1912 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1913 | 1913 | _default = datetime.datetime.fromtimestamp(0) |
|
1914 | 1914 | last_change = cs_cache.get('date') or _default |
|
1915 |
log.debug('updated repo %s with new c |
|
|
1915 | log.debug('updated repo %s with new commit cache %s', | |
|
1916 | 1916 | self.repo_name, cs_cache) |
|
1917 | 1917 | self.updated_on = last_change |
|
1918 | 1918 | self.changeset_cache = cs_cache |
@@ -1913,7 +1913,7 b' class Repository(Base, BaseModel):' | |||
|
1913 | 1913 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1914 | 1914 | _default = datetime.datetime.fromtimestamp(0) |
|
1915 | 1915 | last_change = cs_cache.get('date') or _default |
|
1916 |
log.debug('updated repo %s with new c |
|
|
1916 | log.debug('updated repo %s with new commit cache %s', | |
|
1917 | 1917 | self.repo_name, cs_cache) |
|
1918 | 1918 | self.updated_on = last_change |
|
1919 | 1919 | self.changeset_cache = cs_cache |
@@ -2100,7 +2100,7 b' class Repository(Base, BaseModel):' | |||
|
2100 | 2100 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
2101 | 2101 | _default = datetime.datetime.fromtimestamp(0) |
|
2102 | 2102 | last_change = cs_cache.get('date') or _default |
|
2103 |
log.debug('updated repo %s with new c |
|
|
2103 | log.debug('updated repo %s with new commit cache %s', | |
|
2104 | 2104 | self.repo_name, cs_cache) |
|
2105 | 2105 | self.updated_on = last_change |
|
2106 | 2106 | self.changeset_cache = cs_cache |
@@ -30,7 +30,7 b' def upgrade(migrate_engine):' | |||
|
30 | 30 | op = Operations(context) |
|
31 | 31 | |
|
32 | 32 | repo_group = db_4_16_0_2.RepoGroup.__table__ |
|
33 | ||
|
33 | ||
|
34 | 34 | with op.batch_alter_table(repo_group.name) as batch_op: |
|
35 | 35 | batch_op.add_column( |
|
36 | 36 | Column("repo_group_name_hash", String(1024), nullable=True, unique=False)) |
@@ -26,7 +26,7 b' def upgrade(migrate_engine):' | |||
|
26 | 26 | op = Operations(context) |
|
27 | 27 | |
|
28 | 28 | repo_group = db_4_16_0_2.RepoGroup.__table__ |
|
29 | ||
|
29 | ||
|
30 | 30 | with op.batch_alter_table(repo_group.name) as batch_op: |
|
31 | 31 | batch_op.alter_column("repo_group_name_hash", nullable=False) |
|
32 | 32 |
@@ -1489,10 +1489,12 b' def breadcrumb_repo_link(repo):' | |||
|
1489 | 1489 | """ |
|
1490 | 1490 | |
|
1491 | 1491 | path = [ |
|
1492 |
link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name) |
|
|
1492 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name), | |
|
1493 | title='last change:{}'.format(format_date(group.last_commit_change))) | |
|
1493 | 1494 | for group in repo.groups_with_parents |
|
1494 | 1495 | ] + [ |
|
1495 |
link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name) |
|
|
1496 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name), | |
|
1497 | title='last change:{}'.format(format_date(repo.last_commit_change))) | |
|
1496 | 1498 | ] |
|
1497 | 1499 | |
|
1498 | 1500 | return literal(' » '.join(path)) |
@@ -1510,11 +1512,13 b' def breadcrumb_repo_group_link(repo_grou' | |||
|
1510 | 1512 | |
|
1511 | 1513 | path = [ |
|
1512 | 1514 | link_to(group.name, |
|
1513 |
route_path('repo_group_home', repo_group_name=group.group_name) |
|
|
1515 | route_path('repo_group_home', repo_group_name=group.group_name), | |
|
1516 | title='last change:{}'.format(format_date(group.last_commit_change))) | |
|
1514 | 1517 | for group in repo_group.parents |
|
1515 | 1518 | ] + [ |
|
1516 | 1519 | link_to(repo_group.name, |
|
1517 |
route_path('repo_group_home', repo_group_name=repo_group.group_name) |
|
|
1520 | route_path('repo_group_home', repo_group_name=repo_group.group_name), | |
|
1521 | title='last change:{}'.format(format_date(repo_group.last_commit_change))) | |
|
1518 | 1522 | ] |
|
1519 | 1523 | |
|
1520 | 1524 | return literal(' » '.join(path)) |
@@ -101,23 +101,34 b' def parse_datetime(text):' | |||
|
101 | 101 | :param text: string of desired date/datetime or something more verbose, |
|
102 | 102 | like *yesterday*, *2weeks 3days*, etc. |
|
103 | 103 | """ |
|
104 | if not text: | |
|
105 | raise ValueError('Wrong date: "%s"' % text) | |
|
106 | ||
|
107 | if isinstance(text, datetime.datetime): | |
|
108 | return text | |
|
104 | 109 | |
|
105 | 110 | text = text.strip().lower() |
|
106 | 111 | |
|
107 | INPUT_FORMATS = ( | |
|
112 | input_formats = ( | |
|
108 | 113 | '%Y-%m-%d %H:%M:%S', |
|
114 | '%Y-%m-%dT%H:%M:%S', | |
|
109 | 115 | '%Y-%m-%d %H:%M', |
|
116 | '%Y-%m-%dT%H:%M', | |
|
110 | 117 | '%Y-%m-%d', |
|
111 | 118 | '%m/%d/%Y %H:%M:%S', |
|
119 | '%m/%d/%YT%H:%M:%S', | |
|
112 | 120 | '%m/%d/%Y %H:%M', |
|
121 | '%m/%d/%YT%H:%M', | |
|
113 | 122 | '%m/%d/%Y', |
|
114 | 123 | '%m/%d/%y %H:%M:%S', |
|
124 | '%m/%d/%yT%H:%M:%S', | |
|
115 | 125 | '%m/%d/%y %H:%M', |
|
126 | '%m/%d/%yT%H:%M', | |
|
116 | 127 | '%m/%d/%y', |
|
117 | 128 | ) |
|
118 | for format in INPUT_FORMATS: | |
|
129 | for format_def in input_formats: | |
|
119 | 130 | try: |
|
120 | return datetime.datetime(*time.strptime(text, format)[:6]) | |
|
131 | return datetime.datetime(*time.strptime(text, format_def)[:6]) | |
|
121 | 132 | except ValueError: |
|
122 | 133 | pass |
|
123 | 134 |
@@ -59,7 +59,7 b' from rhodecode.lib.vcs.backends.base imp' | |||
|
59 | 59 | from rhodecode.lib.utils2 import ( |
|
60 | 60 | str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe, |
|
61 | 61 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, |
|
62 | glob2re, StrictAttributeDict, cleaned_uri) | |
|
62 | glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict) | |
|
63 | 63 | from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \ |
|
64 | 64 | JsonRaw |
|
65 | 65 | from rhodecode.lib.ext_json import json |
@@ -1730,7 +1730,9 b' class Repository(Base, BaseModel):' | |||
|
1730 | 1730 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
1731 | 1731 | dummy = EmptyCommit().__json__() |
|
1732 | 1732 | if not self._changeset_cache: |
|
1733 | return dummy | |
|
1733 | dummy['source_repo_id'] = self.repo_id | |
|
1734 | return json.loads(json.dumps(dummy)) | |
|
1735 | ||
|
1734 | 1736 | try: |
|
1735 | 1737 | return json.loads(self._changeset_cache) |
|
1736 | 1738 | except TypeError: |
@@ -2183,6 +2185,16 b' class Repository(Base, BaseModel):' | |||
|
2183 | 2185 | return make_lock, currently_locked, lock_info |
|
2184 | 2186 | |
|
2185 | 2187 | @property |
|
2188 | def last_commit_change(self): | |
|
2189 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |
|
2190 | empty_date = datetime.datetime.fromtimestamp(0) | |
|
2191 | date_latest = self.changeset_cache.get('date', empty_date) | |
|
2192 | try: | |
|
2193 | return parse_datetime(date_latest) | |
|
2194 | except Exception: | |
|
2195 | return empty_date | |
|
2196 | ||
|
2197 | @property | |
|
2186 | 2198 | def last_db_change(self): |
|
2187 | 2199 | return self.updated_on |
|
2188 | 2200 | |
@@ -2275,6 +2287,7 b' class Repository(Base, BaseModel):' | |||
|
2275 | 2287 | """ |
|
2276 | 2288 | Update cache of last changeset for repository, keys should be:: |
|
2277 | 2289 | |
|
2290 | source_repo_id | |
|
2278 | 2291 | short_id |
|
2279 | 2292 | raw_id |
|
2280 | 2293 | revision |
@@ -2283,7 +2296,6 b' class Repository(Base, BaseModel):' | |||
|
2283 | 2296 | date |
|
2284 | 2297 | author |
|
2285 | 2298 | |
|
2286 | :param cs_cache: | |
|
2287 | 2299 | """ |
|
2288 | 2300 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
2289 | 2301 | if cs_cache is None: |
@@ -2310,17 +2322,20 b' class Repository(Base, BaseModel):' | |||
|
2310 | 2322 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
2311 | 2323 | _default = datetime.datetime.utcnow() |
|
2312 | 2324 | last_change = cs_cache.get('date') or _default |
|
2313 | if self.updated_on and self.updated_on > last_change: | |
|
2314 | # we check if last update is newer than the new value | |
|
2315 | # if yes, we use the current timestamp instead. Imagine you get | |
|
2316 | # old commit pushed 1y ago, we'd set last update 1y to ago. | |
|
2317 | last_change = _default | |
|
2318 | log.debug('updated repo %s with new cs cache %s', | |
|
2319 | self.repo_name, cs_cache) | |
|
2320 | self.updated_on = last_change | |
|
2325 | # we check if last update is newer than the new value | |
|
2326 | # if yes, we use the current timestamp instead. Imagine you get | |
|
2327 | # old commit pushed 1y ago, we'd set last update 1y to ago. | |
|
2328 | last_change_timestamp = datetime_to_time(last_change) | |
|
2329 | current_timestamp = datetime_to_time(last_change) | |
|
2330 | if last_change_timestamp > current_timestamp: | |
|
2331 | cs_cache['date'] = _default | |
|
2332 | ||
|
2321 | 2333 | self.changeset_cache = cs_cache |
|
2322 | 2334 | Session().add(self) |
|
2323 | 2335 | Session().commit() |
|
2336 | ||
|
2337 | log.debug('updated repo %s with new commit cache %s', | |
|
2338 | self.repo_name, cs_cache) | |
|
2324 | 2339 | else: |
|
2325 | 2340 | log.debug('Skipping update_commit_cache for repo:`%s` ' |
|
2326 | 2341 | 'commit already with latest changes', self.repo_name) |
@@ -2489,6 +2504,8 b' class RepoGroup(Base, BaseModel):' | |||
|
2489 | 2504 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2490 | 2505 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
2491 | 2506 | personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) |
|
2507 | _changeset_cache = Column( | |
|
2508 | "changeset_cache", LargeBinary(), nullable=True) # JSON data | |
|
2492 | 2509 | |
|
2493 | 2510 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
2494 | 2511 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
@@ -2513,6 +2530,29 b' class RepoGroup(Base, BaseModel):' | |||
|
2513 | 2530 | self._group_name = value |
|
2514 | 2531 | self.group_name_hash = self.hash_repo_group_name(value) |
|
2515 | 2532 | |
|
2533 | @hybrid_property | |
|
2534 | def changeset_cache(self): | |
|
2535 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
|
2536 | dummy = EmptyCommit().__json__() | |
|
2537 | if not self._changeset_cache: | |
|
2538 | dummy['source_repo_id'] = '' | |
|
2539 | return json.loads(json.dumps(dummy)) | |
|
2540 | ||
|
2541 | try: | |
|
2542 | return json.loads(self._changeset_cache) | |
|
2543 | except TypeError: | |
|
2544 | return dummy | |
|
2545 | except Exception: | |
|
2546 | log.error(traceback.format_exc()) | |
|
2547 | return dummy | |
|
2548 | ||
|
2549 | @changeset_cache.setter | |
|
2550 | def changeset_cache(self, val): | |
|
2551 | try: | |
|
2552 | self._changeset_cache = json.dumps(val) | |
|
2553 | except Exception: | |
|
2554 | log.error(traceback.format_exc()) | |
|
2555 | ||
|
2516 | 2556 | @validates('group_parent_id') |
|
2517 | 2557 | def validate_group_parent_id(self, key, val): |
|
2518 | 2558 | """ |
@@ -2608,8 +2648,7 b' class RepoGroup(Base, BaseModel):' | |||
|
2608 | 2648 | return q.all() |
|
2609 | 2649 | |
|
2610 | 2650 | @property |
|
2611 | def parents(self): | |
|
2612 | parents_recursion_limit = 10 | |
|
2651 | def parents(self, parents_recursion_limit = 10): | |
|
2613 | 2652 | groups = [] |
|
2614 | 2653 | if self.parent_group is None: |
|
2615 | 2654 | return groups |
@@ -2632,6 +2671,16 b' class RepoGroup(Base, BaseModel):' | |||
|
2632 | 2671 | return groups |
|
2633 | 2672 | |
|
2634 | 2673 | @property |
|
2674 | def last_commit_change(self): | |
|
2675 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |
|
2676 | empty_date = datetime.datetime.fromtimestamp(0) | |
|
2677 | date_latest = self.changeset_cache.get('date', empty_date) | |
|
2678 | try: | |
|
2679 | return parse_datetime(date_latest) | |
|
2680 | except Exception: | |
|
2681 | return empty_date | |
|
2682 | ||
|
2683 | @property | |
|
2635 | 2684 | def last_db_change(self): |
|
2636 | 2685 | return self.updated_on |
|
2637 | 2686 | |
@@ -2670,7 +2719,7 b' class RepoGroup(Base, BaseModel):' | |||
|
2670 | 2719 | |
|
2671 | 2720 | return cnt + children_count(self) |
|
2672 | 2721 | |
|
2673 | def _recursive_objects(self, include_repos=True): | |
|
2722 | def _recursive_objects(self, include_repos=True, include_groups=True): | |
|
2674 | 2723 | all_ = [] |
|
2675 | 2724 | |
|
2676 | 2725 | def _get_members(root_gr): |
@@ -2680,11 +2729,16 b' class RepoGroup(Base, BaseModel):' | |||
|
2680 | 2729 | childs = root_gr.children.all() |
|
2681 | 2730 | if childs: |
|
2682 | 2731 | for gr in childs: |
|
2683 |
|
|
|
2732 | if include_groups: | |
|
2733 | all_.append(gr) | |
|
2684 | 2734 | _get_members(gr) |
|
2685 | 2735 | |
|
2736 | root_group = [] | |
|
2737 | if include_groups: | |
|
2738 | root_group = [self] | |
|
2739 | ||
|
2686 | 2740 | _get_members(self) |
|
2687 |
return |
|
|
2741 | return root_group + all_ | |
|
2688 | 2742 | |
|
2689 | 2743 | def recursive_groups_and_repos(self): |
|
2690 | 2744 | """ |
@@ -2698,6 +2752,12 b' class RepoGroup(Base, BaseModel):' | |||
|
2698 | 2752 | """ |
|
2699 | 2753 | return self._recursive_objects(include_repos=False) |
|
2700 | 2754 | |
|
2755 | def recursive_repos(self): | |
|
2756 | """ | |
|
2757 | Returns all children repositories for this group | |
|
2758 | """ | |
|
2759 | return self._recursive_objects(include_groups=False) | |
|
2760 | ||
|
2701 | 2761 | def get_new_name(self, group_name): |
|
2702 | 2762 | """ |
|
2703 | 2763 | returns new full group name based on parent and new name |
@@ -2708,6 +2768,62 b' class RepoGroup(Base, BaseModel):' | |||
|
2708 | 2768 | self.parent_group else []) |
|
2709 | 2769 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
2710 | 2770 | |
|
2771 | def update_commit_cache(self, config=None): | |
|
2772 | """ | |
|
2773 | Update cache of last changeset for newest repository inside this group, keys should be:: | |
|
2774 | ||
|
2775 | source_repo_id | |
|
2776 | short_id | |
|
2777 | raw_id | |
|
2778 | revision | |
|
2779 | parents | |
|
2780 | message | |
|
2781 | date | |
|
2782 | author | |
|
2783 | ||
|
2784 | """ | |
|
2785 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |
|
2786 | ||
|
2787 | def repo_groups_and_repos(): | |
|
2788 | all_entries = OrderedDefaultDict(list) | |
|
2789 | ||
|
2790 | def _get_members(root_gr, pos=0): | |
|
2791 | ||
|
2792 | for repo in root_gr.repositories: | |
|
2793 | all_entries[root_gr].append(repo) | |
|
2794 | ||
|
2795 | # fill in all parent positions | |
|
2796 | for parent_group in root_gr.parents: | |
|
2797 | all_entries[parent_group].extend(all_entries[root_gr]) | |
|
2798 | ||
|
2799 | children_groups = root_gr.children.all() | |
|
2800 | if children_groups: | |
|
2801 | for cnt, gr in enumerate(children_groups, 1): | |
|
2802 | _get_members(gr, pos=pos+cnt) | |
|
2803 | ||
|
2804 | _get_members(root_gr=self) | |
|
2805 | return all_entries | |
|
2806 | ||
|
2807 | empty_date = datetime.datetime.fromtimestamp(0) | |
|
2808 | for repo_group, repos in repo_groups_and_repos().items(): | |
|
2809 | ||
|
2810 | latest_repo_cs_cache = {} | |
|
2811 | for repo in repos: | |
|
2812 | repo_cs_cache = repo.changeset_cache | |
|
2813 | date_latest = latest_repo_cs_cache.get('date', empty_date) | |
|
2814 | date_current = repo_cs_cache.get('date', empty_date) | |
|
2815 | current_timestamp = datetime_to_time(parse_datetime(date_latest)) | |
|
2816 | if current_timestamp < datetime_to_time(parse_datetime(date_current)): | |
|
2817 | latest_repo_cs_cache = repo_cs_cache | |
|
2818 | latest_repo_cs_cache['source_repo_id'] = repo.repo_id | |
|
2819 | ||
|
2820 | repo_group.changeset_cache = latest_repo_cs_cache | |
|
2821 | Session().add(repo_group) | |
|
2822 | Session().commit() | |
|
2823 | ||
|
2824 | log.debug('updated repo group %s with new commit cache %s', | |
|
2825 | repo_group.group_name, latest_repo_cs_cache) | |
|
2826 | ||
|
2711 | 2827 | def permissions(self, with_admins=True, with_owner=True, |
|
2712 | 2828 | expand_from_user_groups=False): |
|
2713 | 2829 | """ |
@@ -192,7 +192,7 b' class RepoModel(BaseModel):' | |||
|
192 | 192 | return repo_log |
|
193 | 193 | |
|
194 | 194 | @classmethod |
|
195 |
def update_ |
|
|
195 | def update_commit_cache(cls, repositories=None): | |
|
196 | 196 | if not repositories: |
|
197 | 197 | repositories = Repository.getAll() |
|
198 | 198 | for repo in repositories: |
@@ -309,6 +309,10 b' class RepoGroupModel(BaseModel):' | |||
|
309 | 309 | # trigger the post hook |
|
310 | 310 | from rhodecode.lib.hooks_base import log_create_repository_group |
|
311 | 311 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
312 | ||
|
313 | # update repo group commit caches initially | |
|
314 | repo_group.update_commit_cache() | |
|
315 | ||
|
312 | 316 | log_create_repository_group( |
|
313 | 317 | created_by=user.username, **repo_group.get_dict()) |
|
314 | 318 | |
@@ -686,6 +690,13 b' class RepoGroupModel(BaseModel):' | |||
|
686 | 690 | 'revoked permission from usergroup: {} on repogroup: {}'.format( |
|
687 | 691 | group_name, repo_group), namespace='security.repogroup') |
|
688 | 692 | |
|
693 | @classmethod | |
|
694 | def update_commit_cache(cls, repo_groups=None): | |
|
695 | if not repo_groups: | |
|
696 | repo_groups = RepoGroup.getAll() | |
|
697 | for repo_group in repo_groups: | |
|
698 | repo_group.update_commit_cache() | |
|
699 | ||
|
689 | 700 | def get_repo_groups_as_dict(self, repo_group_list=None, admin=False, |
|
690 | 701 | super_user_actions=False): |
|
691 | 702 | |
@@ -707,6 +718,11 b' class RepoGroupModel(BaseModel):' | |||
|
707 | 718 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
708 | 719 | return _render("last_change", last_change) |
|
709 | 720 | |
|
721 | def last_rev(repo_name, cs_cache): | |
|
722 | return _render('revision', repo_name, cs_cache.get('revision'), | |
|
723 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
|
724 | cs_cache.get('message'), cs_cache.get('date')) | |
|
725 | ||
|
710 | 726 | def desc(desc, personal): |
|
711 | 727 | return _render( |
|
712 | 728 | 'repo_group_desc', desc, personal, c.visual.stylify_metatags) |
@@ -723,13 +739,15 b' class RepoGroupModel(BaseModel):' | |||
|
723 | 739 | |
|
724 | 740 | repo_group_data = [] |
|
725 | 741 | for group in repo_group_list: |
|
742 | cs_cache = group.changeset_cache | |
|
743 | last_repo_name = cs_cache.get('source_repo_name') | |
|
726 | 744 | |
|
727 | 745 | row = { |
|
728 | 746 | "menu": quick_menu(group.group_name), |
|
729 | 747 | "name": repo_group_lnk(group.group_name), |
|
730 | 748 | "name_raw": group.group_name, |
|
731 |
"last_change": last_change(group.last_ |
|
|
732 |
"last_change_raw": datetime_to_time(group.last_ |
|
|
749 | "last_change": last_change(group.last_commit_change), | |
|
750 | "last_change_raw": datetime_to_time(group.last_commit_change), | |
|
733 | 751 | |
|
734 | 752 | "last_changeset": "", |
|
735 | 753 | "last_changeset_raw": "", |
@@ -292,7 +292,7 b'' | |||
|
292 | 292 | position: absolute; |
|
293 | 293 | top: 100%; |
|
294 | 294 | left: 0; |
|
295 |
min-width: 1 |
|
|
295 | min-width: 180px; | |
|
296 | 296 | margin: 2px 0 0; |
|
297 | 297 | padding: 0; |
|
298 | 298 | text-align: left; |
@@ -1,10 +1,16 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | <% |
|
4 | source_repo_id = c.repo_group.changeset_cache.get('source_repo_id') | |
|
5 | ||
|
4 | 6 | elems = [ |
|
5 | 7 | (_('Repository Group ID'), c.repo_group.group_id, '', ''), |
|
6 | 8 | (_('Owner'), lambda:base.gravatar_with_user(c.repo_group.user.email), '', ''), |
|
7 | 9 | (_('Created on'), h.format_date(c.repo_group.created_on), '', ''), |
|
10 | (_('Updated on'), h.format_date(c.repo_group.updated_on), '', ''), | |
|
11 | (_('Cached Commit date'), (c.repo_group.changeset_cache.get('date')), '', ''), | |
|
12 | (_('Cached Commit repo_id'), (h.link_to_if(source_repo_id, source_repo_id, h.route_path('repo_summary', repo_name='_{}'.format(source_repo_id)))), '', ''), | |
|
13 | ||
|
8 | 14 | (_('Is Personal Group'), c.repo_group.personal or False, '', ''), |
|
9 | 15 | |
|
10 | 16 | (_('Total repositories'), c.repo_group.repositories_recursive_count, '', ''), |
@@ -7,6 +7,7 b'' | |||
|
7 | 7 | (_('Created on'), h.format_date(c.rhodecode_db_repo.created_on), '', ''), |
|
8 | 8 | (_('Updated on'), h.format_date(c.rhodecode_db_repo.updated_on), '', ''), |
|
9 | 9 | (_('Cached Commit id'), lambda: h.link_to(c.rhodecode_db_repo.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.changeset_cache.get('raw_id'))), '', ''), |
|
10 | (_('Cached Commit date'), c.rhodecode_db_repo.changeset_cache.get('date'), '', ''), | |
|
10 | 11 | (_('Attached scoped tokens'), len(c.rhodecode_db_repo.scoped_tokens), '', [x.user for x in c.rhodecode_db_repo.scoped_tokens]), |
|
11 | 12 | (_('Pull requests source'), len(c.rhodecode_db_repo.pull_requests_source), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.source_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_source]), |
|
12 | 13 | (_('Pull requests target'), len(c.rhodecode_db_repo.pull_requests_target), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.target_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_target]), |
General Comments 0
You need to be logged in to leave comments.
Login now