# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
kallithea.model.db
~~~~~~~~~~~~~~~~~~
Database Models for Kallithea
This file was forked by the Kallithea project in July 2014.
Original author and date, and relevant copyright and licensing information is below:
:created_on: Apr 08, 2010
:author: marcink
:copyright: (c) 2013 RhodeCode GmbH, and others.
:license: GPLv3, see LICENSE.md for more details.
import os
import time
import logging
import datetime
import traceback
import hashlib
import collections
import functools
from sqlalchemy import *
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
from beaker.cache import cache_region, region_invalidate
from webob.exc import HTTPNotFound
from pylons.i18n.translation import lazy_ugettext as _
from kallithea import DB_PREFIX
from kallithea.lib.exceptions import DefaultUserException
from kallithea.lib.vcs import get_backend
from kallithea.lib.vcs.utils.helpers import get_scm
from kallithea.lib.vcs.utils.lazy import LazyProperty
from kallithea.lib.vcs.backends.base import EmptyChangeset
from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int, \
get_clone_url, urlreadable
from kallithea.lib.compat import json
from kallithea.lib.caching_query import FromCache
from kallithea.model.meta import Base, Session
URL_SEP = '/'
log = logging.getLogger(__name__)
#==============================================================================
# BASE CLASSES
_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
class BaseModel(object):
Base Model for all classes
@classmethod
def _get_keys(cls):
"""return column names for this model """
return class_mapper(cls).c.keys()
def get_dict(self):
return dict with keys and values corresponding
to this model data """
d = {}
for k in self._get_keys():
d[k] = getattr(self, k)
# also use __json__() if present to get additional fields
_json_attr = getattr(self, '__json__', None)
if _json_attr:
# update with attributes from __json__
if callable(_json_attr):
_json_attr = _json_attr()
for k, val in _json_attr.iteritems():
d[k] = val
return d
def get_appstruct(self):
"""return list with keys and values tuples corresponding
l = []
l.append((k, getattr(self, k),))
return l
def populate_obj(self, populate_dict):
"""populate model with data from given populate_dict"""
if k in populate_dict:
setattr(self, k, populate_dict[k])
def query(cls):
return Session().query(cls)
def get(cls, id_):
if id_:
return cls.query().get(id_)
def get_or_404(cls, id_):
try:
id_ = int(id_)
except (TypeError, ValueError):
raise HTTPNotFound
res = cls.query().get(id_)
if res is None:
return res
def getAll(cls):
# deprecated and left for backward compatibility
return cls.get_all()
def get_all(cls):
return cls.query().all()
def delete(cls, id_):
obj = cls.query().get(id_)
Session().delete(obj)
def __repr__(self):
if hasattr(self, '__unicode__'):
# python repr needs to return str
return safe_str(self.__unicode__())
except UnicodeDecodeError:
pass
return '<DB:%s>' % (self.__class__.__name__)
class Setting(Base, BaseModel):
__tablename__ = DB_PREFIX + 'settings'
__table_args__ = (
UniqueConstraint('app_settings_name'),
{'extend_existing': True, 'mysql_engine': 'InnoDB',
'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
)
SETTINGS_TYPES = {
'str': safe_str,
'int': safe_int,
'unicode': safe_unicode,
'bool': str2bool,
'list': functools.partial(aslist, sep=',')
}
DEFAULT_UPDATE_URL = ''
app_settings_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
app_settings_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
_app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False), nullable=True, unique=None, default=None)
_app_settings_type = Column("app_settings_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
def __init__(self, key='', val='', type='unicode'):
self.app_settings_name = key
self.app_settings_value = val
self.app_settings_type = type
@validates('_app_settings_value')
def validate_settings_value(self, key, val):
assert type(val) == unicode
return val
@hybrid_property
def app_settings_value(self):
v = self._app_settings_value
_type = self.app_settings_type
converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
return converter(v)
@app_settings_value.setter
def app_settings_value(self, val):
Setter that will always make sure we use unicode in app_settings_value
:param val:
self._app_settings_value = safe_unicode(val)
def app_settings_type(self):
return self._app_settings_type
@app_settings_type.setter
def app_settings_type(self, val):
if val not in self.SETTINGS_TYPES:
raise Exception('type must be one of %s got %s'
% (self.SETTINGS_TYPES.keys(), val))
self._app_settings_type = val
def __unicode__(self):
return u"<%s('%s:%s[%s]')>" % (
self.__class__.__name__,
self.app_settings_name, self.app_settings_value, self.app_settings_type
def get_by_name(cls, key):
return cls.query()\
.filter(cls.app_settings_name == key).scalar()
def get_by_name_or_create(cls, key, val='', type='unicode'):
res = cls.get_by_name(key)
res = cls(key, val, type)
def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
Creates or updates Kallithea setting. If updates are triggered, it will only
update parameters that are explicitly set. Optional instance will be skipped.
:param key:
:param type:
:return:
val = Optional.extract(val)
type = Optional.extract(type)
else:
res.app_settings_name = key
if not isinstance(val, Optional):
# update if set
res.app_settings_value = val
if not isinstance(type, Optional):
res.app_settings_type = type
def get_app_settings(cls, cache=False):
ret = cls.query()
if cache:
ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
if ret is None:
raise Exception('Could not get application settings !')
settings = {}
for each in ret:
settings[each.app_settings_name] = \
each.app_settings_value
return settings
def get_auth_plugins(cls, cache=False):
auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
return auth_plugins
def get_auth_settings(cls, cache=False):
ret = cls.query()\
.filter(cls.app_settings_name.startswith('auth_')).all()
fd = {}
for row in ret:
fd[row.app_settings_name] = row.app_settings_value
return fd
def get_default_repo_settings(cls, cache=False, strip_prefix=False):
.filter(cls.app_settings_name.startswith('default_')).all()
key = row.app_settings_name
if strip_prefix:
key = remove_prefix(key, prefix='default_')
fd.update({key: row.app_settings_value})
def get_server_info(cls):
import pkg_resources
import platform
import kallithea
from kallithea.lib.utils import check_git_version
mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
info = {
'modules': sorted(mods, key=lambda k: k[0].lower()),
'py_version': platform.python_version(),
'platform': safe_unicode(platform.platform()),
'kallithea_version': kallithea.__version__,
'git_version': safe_unicode(check_git_version()),
'git_path': kallithea.CONFIG.get('git_path')
return info
class Ui(Base, BaseModel):
__tablename__ = DB_PREFIX + 'ui'
UniqueConstraint('ui_key'),
HOOK_UPDATE = 'changegroup.update'
HOOK_REPO_SIZE = 'changegroup.repo_size'
HOOK_PUSH = 'changegroup.push_logger'
HOOK_PRE_PUSH = 'prechangegroup.pre_push'
HOOK_PULL = 'outgoing.pull_logger'
HOOK_PRE_PULL = 'preoutgoing.pre_pull'
ui_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
ui_section = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
ui_key = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
ui_value = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
ui_active = Column(Boolean(), nullable=True, unique=None, default=True)
# def __init__(self, section='', key='', value=''):
# self.ui_section = section
# self.ui_key = key
# self.ui_value = value
def get_by_key(cls, key):
return cls.query().filter(cls.ui_key == key).scalar()
def get_builtin_hooks(cls):
q = cls.query()
q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
return q.all()
def get_custom_hooks(cls):
q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
q = q.filter(cls.ui_section == 'hooks')
def get_repos_location(cls):
return cls.get_by_key('/').ui_value
def create_or_update_hook(cls, key, val):
new_ui = cls.get_by_key(key) or cls()
new_ui.ui_section = 'hooks'
new_ui.ui_active = True
new_ui.ui_key = key
new_ui.ui_value = val
Session().add(new_ui)
return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
self.ui_key, self.ui_value)
class User(Base, BaseModel):
__tablename__ = 'users'
UniqueConstraint('username'), UniqueConstraint('email'),
Index('u_username_idx', 'username'),
Index('u_email_idx', 'email'),
DEFAULT_USER = 'default'
DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
user_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
username = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
password = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
active = Column(Boolean(), nullable=True, unique=None, default=True)
admin = Column(Boolean(), nullable=True, unique=None, default=False)
name = Column("firstname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
lastname = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
_email = Column("email", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
last_login = Column(DateTime(timezone=False), nullable=True, unique=None, default=None)
extern_type = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
extern_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
api_key = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
inherit_default_permissions = Column(Boolean(), nullable=False, unique=None, default=True)
created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
_user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
user_log = relationship('UserLog')
user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
repositories = relationship('Repository')
repo_groups = relationship('RepoGroup')
user_groups = relationship('UserGroup')
user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
group_member = relationship('UserGroupMember', cascade='all')
notifications = relationship('UserNotification', cascade='all')
# notifications assigned to this user
user_created_notifications = relationship('Notification', cascade='all')
# comments created by this user
user_comments = relationship('ChangesetComment', cascade='all')
#extra emails for this user
user_emails = relationship('UserEmailMap', cascade='all')
#extra API keys
user_api_keys = relationship('UserApiKeys', cascade='all')
def email(self):
return self._email
@email.setter
def email(self, val):
self._email = val.lower() if val else None
@property
def firstname(self):
# alias for future
return self.name
def emails(self):
other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
return [self.email] + [x.email for x in other]
def api_keys(self):
other = UserApiKeys.query().filter(UserApiKeys.user==self).all()
return [self.api_key] + [x.api_key for x in other]
def ip_addresses(self):
ret = UserIpMap.query().filter(UserIpMap.user == self).all()
return [x.ip_addr for x in ret]
def full_name(self):
return '%s %s' % (self.firstname, self.lastname)
def full_name_or_username(self):
Show full name.
If full name is not set, fall back to username.
return ('%s %s' % (self.firstname, self.lastname)
if (self.firstname and self.lastname) else self.username)
def full_name_and_username(self):
Show full name and username as 'Firstname Lastname (username)'.
return ('%s %s (%s)' % (self.firstname, self.lastname, self.username)
def full_contact(self):
return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
def short_contact(self):
def is_admin(self):
return self.admin
def AuthUser(self):
Returns instance of AuthUser for this user
from kallithea.lib.auth import AuthUser
return AuthUser(dbuser=self)
def user_data(self):
if not self._user_data:
return {}
return json.loads(self._user_data)
except TypeError:
@user_data.setter
def user_data(self, val):
self._user_data = json.dumps(val)
except Exception:
log.error(traceback.format_exc())
return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
self.user_id, self.username)
def get_or_404(cls, id_, allow_default=True):
'''
Overridden version of BaseModel.get_or_404, with an extra check on
the default user.
user = super(User, cls).get_or_404(id_)
if allow_default == False:
if user.username == User.DEFAULT_USER:
raise DefaultUserException
return user
def get_by_username(cls, username, case_insensitive=False, cache=False):
if case_insensitive:
q = cls.query().filter(cls.username.ilike(username))
q = cls.query().filter(cls.username == username)
q = q.options(FromCache(
"sql_cache_short",
"get_user_%s" % _hash_key(username)
return q.scalar()
def get_by_api_key(cls, api_key, cache=False, fallback=True):
if len(api_key) != 40 or not api_key.isalnum():
return None
q = cls.query().filter(cls.api_key == api_key)
q = q.options(FromCache("sql_cache_short",
"get_api_key_%s" % api_key))
res = q.scalar()
if fallback and not res:
#fallback to additional keys
_res = UserApiKeys.query()\
.filter(UserApiKeys.api_key == api_key)\
.filter(or_(UserApiKeys.expires == -1,
UserApiKeys.expires >= time.time()))\
.first()
if _res:
res = _res.user
def get_by_email(cls, email, case_insensitive=False, cache=False):
q = cls.query().filter(cls.email.ilike(email))
q = cls.query().filter(cls.email == email)
"get_email_key_%s" % email))
ret = q.scalar()
q = UserEmailMap.query()
# try fetching in alternate email map
q = q.filter(UserEmailMap.email.ilike(email))
q = q.filter(UserEmailMap.email == email)
q = q.options(joinedload(UserEmailMap.user))
"get_email_map_key_%s" % email))
ret = getattr(q.scalar(), 'user', None)
return ret
def get_from_cs_author(cls, author):
Tries to get User objects out of commit author string
:param author:
from kallithea.lib.helpers import email, author_name
# Valid email in the attribute passed, see if they're in the system
_email = email(author)
if _email:
user = cls.get_by_email(_email, case_insensitive=True)
if user is not None:
# Maybe we can match by username?
_author = author_name(author)
user = cls.get_by_username(_author, case_insensitive=True)
def update_lastlogin(self):
"""Update user lastlogin"""
self.last_login = datetime.datetime.now()
Session().add(self)
log.debug('updated user %s lastlogin', self.username)
def get_first_admin(cls):
user = User.query().filter(User.admin == True).first()
if user is None:
raise Exception('Missing administrative account!')
def get_default_user(cls, cache=False):
user = User.get_by_username(User.DEFAULT_USER, cache=cache)
raise Exception('Missing default account!')
def get_api_data(self, details=False):
Common function for generating user related data for API
user = self
data = dict(
user_id=user.user_id,
username=user.username,
firstname=user.name,
lastname=user.lastname,
email=user.email,
emails=user.emails,
active=user.active,
admin=user.admin,
if details:
data.update(dict(
extern_type=user.extern_type,
extern_name=user.extern_name,
api_key=user.api_key,
api_keys=user.api_keys,
last_login=user.last_login,
ip_addresses=user.ip_addresses
))
return data
def __json__(self):
full_name=self.full_name,
full_name_or_username=self.full_name_or_username,
short_contact=self.short_contact,
full_contact=self.full_contact
data.update(self.get_api_data())
class UserApiKeys(Base, BaseModel):
__tablename__ = 'user_api_keys'
Index('uak_api_key_idx', 'api_key'),
Index('uak_api_key_expires_idx', 'api_key', 'expires'),
UniqueConstraint('api_key'),
__mapper_args__ = {}
user_api_key_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
api_key = Column(String(255, convert_unicode=False), nullable=False, unique=True)
description = Column(UnicodeText(1024))
expires = Column(Float(53), nullable=False)
user = relationship('User')
def expired(self):
if self.expires == -1:
return False
return time.time() > self.expires
class UserEmailMap(Base, BaseModel):
__tablename__ = 'user_email_map'
Index('uem_email_idx', 'email'),
UniqueConstraint('email'),
email_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
_email = Column("email", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
@validates('_email')
def validate_email(self, key, email):
# check if this email is not main one
main_email = Session().query(User).filter(User.email == email).scalar()
if main_email is not None:
raise AttributeError('email %s is present is user table' % email)
return email
class UserIpMap(Base, BaseModel):
__tablename__ = 'user_ip_map'
UniqueConstraint('user_id', 'ip_addr'),
ip_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
ip_addr = Column(String(255, convert_unicode=False), nullable=True, unique=False, default=None)
def _get_ip_range(cls, ip_addr):
from kallithea.lib import ipaddr
net = ipaddr.IPNetwork(address=ip_addr)
return [str(net.network), str(net.broadcast)]
return dict(
ip_addr=self.ip_addr,
ip_range=self._get_ip_range(self.ip_addr)
return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
self.user_id, self.ip_addr)
class UserLog(Base, BaseModel):
__tablename__ = 'user_logs'
'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
user_log_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
repository_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
user_ip = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
action = Column(UnicodeText(1200000, convert_unicode=False), nullable=True, unique=None, default=None)
action_date = Column(DateTime(timezone=False), nullable=True, unique=None, default=None)
self.repository_name,
self.action)
def action_as_day(self):
return datetime.date(*self.action_date.timetuple()[:3])
repository = relationship('Repository', cascade='')
class UserGroup(Base, BaseModel):
__tablename__ = 'users_groups'
users_group_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
users_group_name = Column(String(255, convert_unicode=False), nullable=False, unique=True, default=None)
user_group_description = Column(String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
users_group_active = Column(Boolean(), nullable=True, unique=None, default=None)
inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
_group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
members = relationship('UserGroupMember', cascade="all, delete-orphan")
users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
def group_data(self):
if not self._group_data:
return json.loads(self._group_data)
@group_data.setter
def group_data(self, val):
self._group_data = json.dumps(val)
self.users_group_id,
self.users_group_name)
def get_by_group_name(cls, group_name, cache=False,
case_insensitive=False):
q = cls.query().filter(cls.users_group_name.ilike(group_name))
q = cls.query().filter(cls.users_group_name == group_name)
"get_group_%s" % _hash_key(group_name)
def get(cls, user_group_id, cache=False):
user_group = cls.query()
user_group = user_group.options(FromCache("sql_cache_short",
"get_users_group_%s" % user_group_id))
return user_group.get(user_group_id)
def get_api_data(self, with_members=True):
user_group = self
users_group_id=user_group.users_group_id,
group_name=user_group.users_group_name,
group_description=user_group.user_group_description,
active=user_group.users_group_active,
owner=user_group.user.username,
if with_members:
members = []
for user in user_group.members:
user = user.user
members.append(user.get_api_data())
data['members'] = members
class UserGroupMember(Base, BaseModel):
__tablename__ = 'users_groups_members'
users_group_member_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
users_group = relationship('UserGroup')
def __init__(self, gr_id='', u_id=''):
self.users_group_id = gr_id
self.user_id = u_id
class RepositoryField(Base, BaseModel):
__tablename__ = 'repositories_fields'
UniqueConstraint('repository_id', 'field_key'), # no-multi field
PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
repo_field_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
field_key = Column(String(250, convert_unicode=False))
field_label = Column(String(1024, convert_unicode=False), nullable=False)
field_value = Column(String(10000, convert_unicode=False), nullable=False)
field_desc = Column(String(1024, convert_unicode=False), nullable=False)
field_type = Column(String(255), nullable=False, unique=None)
repository = relationship('Repository')
def field_key_prefixed(self):
return 'ex_%s' % self.field_key
def un_prefix_key(cls, key):
if key.startswith(cls.PREFIX):
return key[len(cls.PREFIX):]
return key
def get_by_key_name(cls, key, repo):
row = cls.query()\
.filter(cls.repository == repo)\
.filter(cls.field_key == key).scalar()
return row
class Repository(Base, BaseModel):
__tablename__ = 'repositories'
UniqueConstraint('repo_name'),
Index('r_repo_name_idx', 'repo_name'),
DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
STATE_CREATED = 'repo_state_created'
STATE_PENDING = 'repo_state_pending'
STATE_ERROR = 'repo_state_error'
repo_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
repo_name = Column(String(255, convert_unicode=False), nullable=False, unique=True, default=None)
repo_state = Column(String(255), nullable=True)
clone_uri = Column(String(255, convert_unicode=False), nullable=True, unique=False, default=None)
repo_type = Column(String(255, convert_unicode=False), nullable=False, unique=False, default=None)
private = Column(Boolean(), nullable=True, unique=None, default=None)
enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
description = Column(String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
created_on = Column(DateTime(timezone=False), nullable=False, unique=None, default=datetime.datetime.now)
updated_on = Column(DateTime(timezone=False), nullable=False, unique=None, default=datetime.datetime.now)
_landing_revision = Column("landing_revision", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
enable_locking = Column(Boolean(), nullable=False, unique=None, default=False)
_locked = Column("locked", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
_changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
fork_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
fork = relationship('Repository', remote_side=repo_id)
group = relationship('RepoGroup')
repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
stats = relationship('Statistics', cascade='all', uselist=False)
followers = relationship('UserFollowing',
primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
cascade='all')
extra_fields = relationship('RepositoryField',
cascade="all, delete-orphan")
logs = relationship('UserLog')
comments = relationship('ChangesetComment', cascade="all, delete-orphan")
pull_requests_org = relationship('PullRequest',
primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
pull_requests_other = relationship('PullRequest',
primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
safe_unicode(self.repo_name))
def landing_rev(self):
# always should return [rev_type, rev]
if self._landing_revision:
_rev_info = self._landing_revision.split(':')
if len(_rev_info) < 2:
_rev_info.insert(0, 'rev')
return [_rev_info[0], _rev_info[1]]
return [None, None]
@landing_rev.setter
def landing_rev(self, val):
if ':' not in val:
raise ValueError('value must be delimited with `:` and consist '
'of <rev_type>:<rev>, got %s instead' % val)
self._landing_revision = val
def locked(self):
# always should return [user_id, timelocked]
if self._locked:
_lock_info = self._locked.split(':')
return int(_lock_info[0]), _lock_info[1]
@locked.setter
def locked(self, val):
if val and isinstance(val, (list, tuple)):
self._locked = ':'.join(map(str, val))
self._locked = None
def changeset_cache(self):
cs_cache = json.loads(self._changeset_cache) # might raise on bad data
cs_cache['raw_id'] # verify data, raise exception on error
return cs_cache
except (TypeError, KeyError, ValueError):
return EmptyChangeset().__json__()
@changeset_cache.setter
def changeset_cache(self, val):
self._changeset_cache = json.dumps(val)
def url_sep(cls):
return URL_SEP
def normalize_repo_name(cls, repo_name):
Normalizes os specific repo_name to the format internally stored inside
database using URL_SEP
:param cls:
:param repo_name:
return cls.url_sep().join(repo_name.split(os.sep))
def get_by_repo_name(cls, repo_name):
q = Session().query(cls).filter(cls.repo_name == repo_name)
q = q.options(joinedload(Repository.fork))\
.options(joinedload(Repository.user))\
.options(joinedload(Repository.group))
def get_by_full_path(cls, repo_full_path):
repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
repo_name = cls.normalize_repo_name(repo_name)
return cls.get_by_repo_name(repo_name.strip(URL_SEP))
def get_repo_forks(cls, repo_id):
return cls.query().filter(Repository.fork_id == repo_id)
def base_path(cls):
Returns base path where all repos are stored
q = Session().query(Ui)\
.filter(Ui.ui_key == cls.url_sep())
q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
return q.one().ui_value
def forks(self):
Return forks of this repo
return Repository.get_repo_forks(self.repo_id)
def parent(self):
Returns fork parent
return self.fork
def just_name(self):
return self.repo_name.split(Repository.url_sep())[-1]
def groups_with_parents(self):
groups = []
if self.group is None:
return groups
cur_gr = self.group
groups.insert(0, cur_gr)
while 1:
gr = getattr(cur_gr, 'parent_group', None)
cur_gr = cur_gr.parent_group
if gr is None:
break
groups.insert(0, gr)
def groups_and_repo(self):
return self.groups_with_parents, self.just_name, self.repo_name
@LazyProperty
def repo_path(self):
Returns base full path for that repository means where it actually
exists on a filesystem
q = Session().query(Ui).filter(Ui.ui_key ==
Repository.url_sep())
def repo_full_path(self):
p = [self.repo_path]
# we need to split the name by / since this is how we store the
# names in the database, but that eventually needs to be converted
# into a valid system path
p += self.repo_name.split(Repository.url_sep())
return os.path.join(*map(safe_unicode, p))
def cache_keys(self):
Returns associated cache keys for that repo
return CacheInvalidation.query()\
.filter(CacheInvalidation.cache_args == self.repo_name)\
.order_by(CacheInvalidation.cache_key)\
.all()
def get_new_name(self, repo_name):
returns new full repository name based on assigned group and new new
:param group_name:
path_prefix = self.group.full_path_splitted if self.group else []
return Repository.url_sep().join(path_prefix + [repo_name])
def _ui(self):
Creates an db based ui object for this repository
from kallithea.lib.utils import make_ui
return make_ui('db', clear_session=False)
def is_valid(cls, repo_name):
returns True if given repo name is a valid filesystem repository
from kallithea.lib.utils import is_valid_repo
return is_valid_repo(repo_name, cls.base_path())
def get_api_data(self):
Common function for generating repo api data
repo = self
repo_id=repo.repo_id,
repo_name=repo.repo_name,
repo_type=repo.repo_type,
clone_uri=repo.clone_uri,
private=repo.private,
created_on=repo.created_on,
description=repo.description,
landing_rev=repo.landing_rev,
owner=repo.user.username,
fork_of=repo.fork.repo_name if repo.fork else None,
enable_statistics=repo.enable_statistics,
enable_locking=repo.enable_locking,
enable_downloads=repo.enable_downloads,
last_changeset=repo.changeset_cache,
locked_by=User.get(self.locked[0]).get_api_data() \
if self.locked[0] else None,
locked_date=time_to_datetime(self.locked[1]) \
if self.locked[1] else None
rc_config = Setting.get_app_settings()
repository_fields = str2bool(rc_config.get('repository_fields'))
if repository_fields:
for f in self.extra_fields:
data[f.field_key_prefixed] = f.field_value
def lock(cls, repo, user_id, lock_time=None):
if lock_time is not None:
lock_time = time.time()
repo.locked = [user_id, lock_time]
Session().add(repo)
Session().commit()
def unlock(cls, repo):
repo.locked = None
def getlock(cls, repo):
return repo.locked
def last_db_change(self):
return self.updated_on
def clone_uri_hidden(self):
clone_uri = self.clone_uri
if clone_uri:
import urlobject
url_obj = urlobject.URLObject(self.clone_uri)
if url_obj.password:
clone_uri = url_obj.with_password('*****')
return clone_uri
def clone_url(self, **override):
import kallithea.lib.helpers as h
qualified_home_url = h.canonical_url('home')
uri_tmpl = None
if 'with_id' in override:
uri_tmpl = self.DEFAULT_CLONE_URI_ID
del override['with_id']
if 'uri_tmpl' in override:
uri_tmpl = override['uri_tmpl']
del override['uri_tmpl']
# we didn't override our tmpl from **overrides
if not uri_tmpl:
uri_tmpl = self.DEFAULT_CLONE_URI
from pylons import tmpl_context as c
uri_tmpl = c.clone_uri_tmpl
except AttributeError:
# in any case if we call this outside of request context,
# ie, not having tmpl_context set up
return get_clone_url(uri_tmpl=uri_tmpl,
qualified_home_url=qualified_home_url,
repo_name=self.repo_name,
repo_id=self.repo_id, **override)
def set_state(self, state):
self.repo_state = state
#==========================================================================
# SCM PROPERTIES
def get_changeset(self, rev=None):
return get_changeset_safe(self.scm_instance, rev)
def get_landing_changeset(self):
Returns landing changeset, or if that doesn't exist returns the tip
_rev_type, _rev = self.landing_rev
cs = self.get_changeset(_rev)
if isinstance(cs, EmptyChangeset):
return self.get_changeset()
return cs
def update_changeset_cache(self, cs_cache=None):
Update cache of last changeset for repository, keys should be::
short_id
raw_id
revision
message
date
author
:param cs_cache:
from kallithea.lib.vcs.backends.base import BaseChangeset
if cs_cache is None:
cs_cache = EmptyChangeset()
# use no-cache version here
scm_repo = self.scm_instance_no_cache()
if scm_repo:
cs_cache = scm_repo.get_changeset()
if isinstance(cs_cache, BaseChangeset):
cs_cache = cs_cache.__json__()
if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
_default = datetime.datetime.fromtimestamp(0)
last_change = cs_cache.get('date') or _default
log.debug('updated repo %s with new cs cache %s',
self.repo_name, cs_cache)
self.updated_on = last_change
self.changeset_cache = cs_cache
log.debug('changeset_cache for %s already up to date with %s',
self.repo_name, cs_cache['raw_id'])
def tip(self):
return self.get_changeset('tip')
def author(self):
return self.tip.author
def last_change(self):
return self.scm_instance.last_change
def get_comments(self, revisions=None):
Returns comments for this repository grouped by revisions
:param revisions: filter query by revisions only
cmts = ChangesetComment.query()\
.filter(ChangesetComment.repo == self)
if revisions is not None:
if not revisions:
return [] # don't use sql 'in' on empty set
cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
grouped = collections.defaultdict(list)
for cmt in cmts.all():
grouped[cmt.revision].append(cmt)
return grouped
def statuses(self, revisions):
Returns statuses for this repository.
PRs without any votes do _not_ show up as unreviewed.
:param revisions: list of revisions to get statuses for
statuses = ChangesetStatus.query()\
.filter(ChangesetStatus.repo == self)\
.filter(ChangesetStatus.version == 0)\
.filter(ChangesetStatus.revision.in_(revisions))
grouped = {}
for stat in statuses.all():
pr_id = pr_nice_id = pr_repo = None
if stat.pull_request:
pr_id = stat.pull_request.pull_request_id
pr_nice_id = PullRequest.make_nice_id(pr_id)
pr_repo = stat.pull_request.other_repo.repo_name
grouped[stat.revision] = [str(stat.status), stat.status_lbl,
pr_id, pr_repo, pr_nice_id]
def _repo_size(self):
from kallithea.lib import helpers as h
log.debug('calculating repository size...')
return h.format_byte_size(self.scm_instance.size)
# SCM CACHE INSTANCE
def set_invalidate(self):
Mark caches of this repo as invalid.
CacheInvalidation.set_invalidate(self.repo_name)
def scm_instance_no_cache(self):
return self.__get_instance()
def scm_instance(self):
full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
if full_cache:
return self.scm_instance_cached()
def scm_instance_cached(self, valid_cache_keys=None):
@cache_region('long_term')
def _c(repo_name):
rn = self.repo_name
valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
if not valid:
log.debug('Cache for %s invalidated, getting new object', rn)
region_invalidate(_c, None, rn)
log.debug('Getting scm_instance of %s from cache', rn)
return _c(rn)
def __get_instance(self):
repo_full_path = self.repo_full_path
alias = get_scm(repo_full_path)[0]
log.debug('Creating instance of %s repository from %s',
alias, repo_full_path)
backend = get_backend(alias)
if alias == 'hg':
repo = backend(safe_str(repo_full_path), create=False,
baseui=self._ui)
repo = backend(repo_full_path, create=False)
return repo
return dict(landing_rev = self.landing_rev)
class RepoGroup(Base, BaseModel):
__tablename__ = 'groups'
UniqueConstraint('group_name', 'group_parent_id'),
CheckConstraint('group_id != group_parent_id'),
__mapper_args__ = {'order_by': 'group_name'}
SEP = ' » '
group_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
group_name = Column(String(255, convert_unicode=False), nullable=False, unique=True, default=None)
group_parent_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
group_description = Column(String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
parent_group = relationship('RepoGroup', remote_side=group_id)
def __init__(self, group_name='', parent_group=None):
self.group_name = group_name
self.parent_group = parent_group
return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
self.group_name)
def _generate_choice(cls, repo_group):
"""Return tuple with group_id and name as html literal"""
from webhelpers.html import literal
if repo_group is None:
return (-1, u'-- %s --' % _('top level'))
return repo_group.group_id, literal(cls.SEP.join(repo_group.full_path_splitted))
def groups_choices(cls, groups):
"""Return tuples with group_id and name as html literal."""
return sorted((cls._generate_choice(g) for g in groups),
key=lambda c: c[1].split(cls.SEP))
def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
gr = cls.query()\
.filter(cls.group_name.ilike(group_name))
.filter(cls.group_name == group_name)
gr = gr.options(FromCache(
return gr.scalar()
def parents(self):
parents_recursion_limit = 10
if self.parent_group is None:
cur_gr = self.parent_group
cnt = 0
cnt += 1
if cnt == parents_recursion_limit:
# this will prevent accidental infinite loops
log.error(('more than %s parents found for group %s, stopping '
'recursive parent fetching' % (parents_recursion_limit, self)))
def children(self):
return RepoGroup.query().filter(RepoGroup.parent_group == self)
def name(self):
return self.group_name.split(RepoGroup.url_sep())[-1]
def full_path(self):
return self.group_name
def full_path_splitted(self):
return self.group_name.split(RepoGroup.url_sep())
def repositories(self):
return Repository.query()\
.filter(Repository.group == self)\
.order_by(Repository.repo_name)
def repositories_recursive_count(self):
cnt = self.repositories.count()
def children_count(group):
for child in group.children:
cnt += child.repositories.count()
cnt += children_count(child)
return cnt
return cnt + children_count(self)
def _recursive_objects(self, include_repos=True):
all_ = []
def _get_members(root_gr):
if include_repos:
for r in root_gr.repositories:
all_.append(r)
childs = root_gr.children.all()
if childs:
for gr in childs:
all_.append(gr)
_get_members(gr)
_get_members(self)
return [self] + all_
def recursive_groups_and_repos(self):
Recursive return all groups, with repositories in those groups
return self._recursive_objects()
def recursive_groups(self):
Returns all children groups for this group including children of children
return self._recursive_objects(include_repos=False)
def get_new_name(self, group_name):
returns new full group name based on parent and new name
path_prefix = (self.parent_group.full_path_splitted if
self.parent_group else [])
return RepoGroup.url_sep().join(path_prefix + [group_name])
Common function for generating api data
group = self
group_id=group.group_id,
group_name=group.group_name,
group_description=group.group_description,
parent_group=group.parent_group.group_name if group.parent_group else None,
repositories=[x.repo_name for x in group.repositories],
owner=group.user.username
class Permission(Base, BaseModel):
__tablename__ = 'permissions'
Index('p_perm_name_idx', 'permission_name'),
PERMS = [
('hg.admin', _('Kallithea Administrator')),
('repository.none', _('Default user has no access to new repositories')),
('repository.read', _('Default user has read access to new repositories')),
('repository.write', _('Default user has write access to new repositories')),
('repository.admin', _('Default user has admin access to new repositories')),
('group.none', _('Default user has no access to new repository groups')),
('group.read', _('Default user has read access to new repository groups')),
('group.write', _('Default user has write access to new repository groups')),
('group.admin', _('Default user has admin access to new repository groups')),
('usergroup.none', _('Default user has no access to new user groups')),
('usergroup.read', _('Default user has read access to new user groups')),
('usergroup.write', _('Default user has write access to new user groups')),
('usergroup.admin', _('Default user has admin access to new user groups')),
('hg.repogroup.create.false', _('Only admins can create repository groups')),
('hg.repogroup.create.true', _('Non-admins can create repository groups')),
('hg.usergroup.create.false', _('Only admins can create user groups')),
('hg.usergroup.create.true', _('Non-admins can create user groups')),
('hg.create.none', _('Only admins can create top level repositories')),
('hg.create.repository', _('Non-admins can create top level repositories')),
('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
('hg.fork.none', _('Only admins can fork repositories')),
('hg.fork.repository', _('Non-admins can fork repositories')),
('hg.register.none', _('Registration disabled')),
('hg.register.manual_activate', _('User registration with manual account activation')),
('hg.register.auto_activate', _('User registration with automatic account activation')),
('hg.extern_activate.manual', _('Manual activation of external account')),
('hg.extern_activate.auto', _('Automatic activation of external account')),
]
#definition of system default permissions for DEFAULT user
DEFAULT_USER_PERMISSIONS = [
'repository.read',
'group.read',
'usergroup.read',
'hg.create.repository',
'hg.create.write_on_repogroup.true',
'hg.fork.repository',
'hg.register.manual_activate',
'hg.extern_activate.auto',
# defines which permissions are more important higher the more important
# Weight defines which permissions are more important.
# The higher number the more important.
PERM_WEIGHTS = {
'repository.none': 0,
'repository.read': 1,
'repository.write': 3,
'repository.admin': 4,
'group.none': 0,
'group.read': 1,
'group.write': 3,
'group.admin': 4,
'usergroup.none': 0,
'usergroup.read': 1,
'usergroup.write': 3,
'usergroup.admin': 4,
'hg.repogroup.create.false': 0,
'hg.repogroup.create.true': 1,
'hg.usergroup.create.false': 0,
'hg.usergroup.create.true': 1,
'hg.fork.none': 0,
'hg.fork.repository': 1,
'hg.create.none': 0,
'hg.create.repository': 1
permission_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
permission_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
permission_longname = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
return u"<%s('%s:%s')>" % (
self.__class__.__name__, self.permission_id, self.permission_name
return cls.query().filter(cls.permission_name == key).scalar()
def get_default_perms(cls, default_user_id):
q = Session().query(UserRepoToPerm, Repository, cls)\
.join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
.join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
.filter(UserRepoToPerm.user_id == default_user_id)
def get_default_group_perms(cls, default_user_id):
q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
.join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
.join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
.filter(UserRepoGroupToPerm.user_id == default_user_id)
def get_default_user_group_perms(cls, default_user_id):
q = Session().query(UserUserGroupToPerm, UserGroup, cls)\
.join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
.join((cls, UserUserGroupToPerm.permission_id == cls.permission_id))\
.filter(UserUserGroupToPerm.user_id == default_user_id)
class UserRepoToPerm(Base, BaseModel):
__tablename__ = 'repo_to_perm'
UniqueConstraint('user_id', 'repository_id', 'permission_id'),
repo_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
permission = relationship('Permission')
def create(cls, user, repository, permission):
n = cls()
n.user = user
n.repository = repository
n.permission = permission
Session().add(n)
return n
return u'<%s => %s >' % (self.user, self.repository)
class UserUserGroupToPerm(Base, BaseModel):
__tablename__ = 'user_user_group_to_perm'
UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
user_user_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
user_group = relationship('UserGroup')
def create(cls, user, user_group, permission):
n.user_group = user_group
return u'<%s => %s >' % (self.user, self.user_group)
class UserToPerm(Base, BaseModel):
__tablename__ = 'user_to_perm'
UniqueConstraint('user_id', 'permission_id'),
user_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
return u'<%s => %s >' % (self.user, self.permission)
class UserGroupRepoToPerm(Base, BaseModel):
__tablename__ = 'users_group_repo_to_perm'
UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
users_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
def create(cls, users_group, repository, permission):
n.users_group = users_group
return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
class UserGroupUserGroupToPerm(Base, BaseModel):
__tablename__ = 'user_group_user_group_to_perm'
UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
CheckConstraint('target_user_group_id != user_group_id'),
user_group_user_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
target_user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
def create(cls, target_user_group, user_group, permission):
n.target_user_group = target_user_group
return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
class UserGroupToPerm(Base, BaseModel):
__tablename__ = 'users_group_to_perm'
UniqueConstraint('users_group_id', 'permission_id',),
class UserRepoGroupToPerm(Base, BaseModel):
__tablename__ = 'user_repo_group_to_perm'
UniqueConstraint('user_id', 'group_id', 'permission_id'),
group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
def create(cls, user, repository_group, permission):
n.group = repository_group
class UserGroupRepoGroupToPerm(Base, BaseModel):
__tablename__ = 'users_group_repo_group_to_perm'
UniqueConstraint('users_group_id', 'group_id'),
users_group_repo_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
def create(cls, user_group, repository_group, permission):
n.users_group = user_group
class Statistics(Base, BaseModel):
__tablename__ = 'statistics'
UniqueConstraint('repository_id'),
stat_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
stat_on_revision = Column(Integer(), nullable=False)
commit_activity = Column(LargeBinary(1000000), nullable=False)#JSON data
commit_activity_combined = Column(LargeBinary(), nullable=False)#JSON data
languages = Column(LargeBinary(1000000), nullable=False)#JSON data
repository = relationship('Repository', single_parent=True)
class UserFollowing(Base, BaseModel):
__tablename__ = 'user_followings'
UniqueConstraint('user_id', 'follows_repository_id'),
UniqueConstraint('user_id', 'follows_user_id'),
user_following_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
follows_user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
follows_from = Column(DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
follows_repository = relationship('Repository', order_by='Repository.repo_name')
def get_repo_followers(cls, repo_id):
return cls.query().filter(cls.follows_repo_id == repo_id)
class CacheInvalidation(Base, BaseModel):
__tablename__ = 'cache_invalidation'
UniqueConstraint('cache_key'),
Index('key_idx', 'cache_key'),
# cache_id, not used
cache_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
# cache_key as created by _get_cache_key
cache_key = Column(String(255, convert_unicode=False))
# cache_args is a repo_name
cache_args = Column(String(255, convert_unicode=False))
# instance sets cache_active True when it is caching, other instances set
# cache_active to False to indicate that this cache is invalid
cache_active = Column(Boolean(), nullable=True, unique=None, default=False)
def __init__(self, cache_key, repo_name=''):
self.cache_key = cache_key
self.cache_args = repo_name
self.cache_active = False
self.cache_id, self.cache_key, self.cache_active)
def _cache_key_partition(self):
prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
return prefix, repo_name, suffix
def get_prefix(self):
get prefix that might have been used in _get_cache_key to
generate self.cache_key. Only used for informational purposes
in repo_edit.html.
# prefix, repo_name, suffix
return self._cache_key_partition()[0]
def get_suffix(self):
get suffix that might have been used in _get_cache_key to
return self._cache_key_partition()[2]
def clear_cache(cls):
Delete all cache keys from database.
Should only be run when all instances are down and all entries thus stale.
cls.query().delete()
def _get_cache_key(cls, key):
Wrapper for generating a unique cache key for this instance and "key".
key must / will start with a repo_name which will be stored in .cache_args .
prefix = kallithea.CONFIG.get('instance_id', '')
return "%s%s" % (prefix, key)
def set_invalidate(cls, repo_name, delete=False):
Mark all caches of a repo as invalid in the database.
inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
log.debug('for repo %s got %s invalidation objects',
safe_str(repo_name), inv_objs)
for inv_obj in inv_objs:
log.debug('marking %s key for invalidation based on repo_name=%s',
inv_obj, safe_str(repo_name))
if delete:
Session().delete(inv_obj)
inv_obj.cache_active = False
Session().add(inv_obj)
def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
Mark this cache key as active and currently cached.
Return True if the existing cache registration still was valid.
Return False to indicate that it had been invalidated and caches should be refreshed.
key = (repo_name + '_' + kind) if kind else repo_name
cache_key = cls._get_cache_key(key)
if valid_cache_keys and cache_key in valid_cache_keys:
return True
inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
if not inv_obj:
inv_obj = CacheInvalidation(cache_key, repo_name)
if inv_obj.cache_active:
inv_obj.cache_active = True
except exc.IntegrityError:
raise
# TOCTOU - another thread added the key at the same time; no further action required
def get_valid_cache_keys(cls):
Return opaque object with information of which caches still are valid
and can be used without checking for invalidation.
return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
class ChangesetComment(Base, BaseModel):
__tablename__ = 'changeset_comments'
Index('cc_revision_idx', 'revision'),
Index('cc_pull_request_id_idx', 'pull_request_id'),
comment_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
revision = Column(String(40))
pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'))
line_no = Column(Unicode(10))
f_path = Column(Unicode(1000))
user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
text = Column(UnicodeText(25000), nullable=False)
modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
author = relationship('User')
repo = relationship('Repository')
# status_change is frequently used directly in templates - make it a lazy
# join to avoid fetching each related ChangesetStatus on demand.
# There will only be one ChangesetStatus referencing each comment so the join will not explode.
status_change = relationship('ChangesetStatus',
cascade="all, delete-orphan", lazy='joined')
pull_request = relationship('PullRequest')
def get_users(cls, revision=None, pull_request_id=None):
Returns user associated with this ChangesetComment. ie those
who actually commented
:param revision:
q = Session().query(User)\
.join(ChangesetComment.author)
if revision is not None:
q = q.filter(cls.revision == revision)
elif pull_request_id is not None:
q = q.filter(cls.pull_request_id == pull_request_id)
def url(self):
anchor = "comment-%s" % self.comment_id
if self.revision:
return h.url('changeset_home', repo_name=self.repo.repo_name, revision=self.revision, anchor=anchor)
elif self.pull_request_id is not None:
return self.pull_request.url(anchor=anchor)
class ChangesetStatus(Base, BaseModel):
__tablename__ = 'changeset_statuses'
Index('cs_revision_idx', 'revision'),
Index('cs_version_idx', 'version'),
Index('cs_pull_request_id_idx', 'pull_request_id'),
Index('cs_changeset_comment_id_idx', 'changeset_comment_id'),
Index('cs_pull_request_id_user_id_version_idx', 'pull_request_id', 'user_id', 'version'),
UniqueConstraint('repo_id', 'revision', 'version'),
STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
STATUS_APPROVED = 'approved'
STATUS_REJECTED = 'rejected'
STATUS_UNDER_REVIEW = 'under_review'
STATUSES = [
(STATUS_NOT_REVIEWED, _("Not reviewed")), # (no icon) and default
(STATUS_APPROVED, _("Approved")),
(STATUS_REJECTED, _("Rejected")),
(STATUS_UNDER_REVIEW, _("Under review")),
changeset_status_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
revision = Column(String(40), nullable=False)
status = Column(String(128), nullable=False, default=DEFAULT)
changeset_comment_id = Column(Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
modified_at = Column(DateTime(), nullable=False, default=datetime.datetime.now)
version = Column(Integer(), nullable=False, default=0)
pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
comment = relationship('ChangesetComment')
self.status, self.author
def get_status_lbl(cls, value):
return dict(cls.STATUSES).get(value)
def status_lbl(self):
return ChangesetStatus.get_status_lbl(self.status)
class PullRequest(Base, BaseModel):
__tablename__ = 'pull_requests'
Index('pr_org_repo_id_idx', 'org_repo_id'),
Index('pr_other_repo_id_idx', 'other_repo_id'),
# values for .status
STATUS_NEW = u'new'
STATUS_CLOSED = u'closed'
pull_request_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
title = Column(Unicode(255), nullable=True)
description = Column(UnicodeText(10240))
status = Column(Unicode(255), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
_revisions = Column('revisions', UnicodeText(20500)) # 500 revisions max
org_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
org_ref = Column(Unicode(255), nullable=False)
other_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
other_ref = Column(Unicode(255), nullable=False)
def revisions(self):
return self._revisions.split(':')
@revisions.setter
def revisions(self, val):
self._revisions = safe_unicode(':'.join(val))
def org_ref_parts(self):
return self.org_ref.split(':')
def other_ref_parts(self):
return self.other_ref.split(':')
owner = relationship('User')
reviewers = relationship('PullRequestReviewers',
org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
statuses = relationship('ChangesetStatus')
comments = relationship('ChangesetComment',
def is_closed(self):
return self.status == self.STATUS_CLOSED
def user_review_status(self, user_id):
"""Return the user's latest status votes on PR"""
# note: no filtering on repo - that would be redundant
status = ChangesetStatus.query()\
.filter(ChangesetStatus.pull_request == self)\
.filter(ChangesetStatus.user_id == user_id)\
.order_by(ChangesetStatus.version)\
return str(status.status) if status else ''
def make_nice_id(cls, pull_request_id):
'''Return pull request id nicely formatted for displaying'''
return '#%s' % pull_request_id
def nice_id(self):
'''Return the id of this pull request, nicely formatted for displaying'''
return self.make_nice_id(self.pull_request_id)
revisions=self.revisions
def url(self, **kwargs):
canonical = kwargs.pop('canonical', None)
b = self.org_ref_parts[1]
if b != self.other_ref_parts[1]:
s = '/_/' + b
s = '/_/' + self.title
kwargs['extra'] = urlreadable(s)
if canonical:
return h.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
pull_request_id=self.pull_request_id, **kwargs)
return h.url('pullrequest_show', repo_name=self.other_repo.repo_name,
class PullRequestReviewers(Base, BaseModel):
__tablename__ = 'pull_request_reviewers'
def __init__(self, user=None, pull_request=None):
self.user = user
self.pull_request = pull_request
pull_requests_reviewers_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
class Notification(Base, BaseModel):
__tablename__ = 'notifications'
Index('notification_type_idx', 'type'),
TYPE_CHANGESET_COMMENT = u'cs_comment'
TYPE_MESSAGE = u'message'
TYPE_MENTION = u'mention'
TYPE_REGISTRATION = u'registration'
TYPE_PULL_REQUEST = u'pull_request'
TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
notification_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
subject = Column(Unicode(512), nullable=True)
body = Column(UnicodeText(50000), nullable=True)
created_by = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
type_ = Column('type', Unicode(255))
created_by_user = relationship('User')
notifications_to_users = relationship('UserNotification', cascade="all, delete-orphan")
def recipients(self):
return [x.user for x in UserNotification.query()
.filter(UserNotification.notification == self)
.order_by(UserNotification.user_id.asc()).all()]
def create(cls, created_by, subject, body, recipients, type_=None):
if type_ is None:
type_ = Notification.TYPE_MESSAGE
notification = cls()
notification.created_by_user = created_by
notification.subject = subject
notification.body = body
notification.type_ = type_
notification.created_on = datetime.datetime.now()
for u in recipients:
assoc = UserNotification()
assoc.notification = notification
assoc.user_id = u.user_id
Session().add(assoc)
Session().add(notification)
Session().flush() # assign notificaiton.notification_id
return notification
def description(self):
from kallithea.model.notification import NotificationModel
return NotificationModel().make_description(self)
class UserNotification(Base, BaseModel):
__tablename__ = 'user_to_notification'
UniqueConstraint('user_id', 'notification_id'),
user_id = Column(Integer(), ForeignKey('users.user_id'), primary_key=True)
notification_id = Column(Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
read = Column(Boolean, default=False)
sent_on = Column(DateTime(timezone=False), nullable=True, unique=None)
notification = relationship('Notification')
def mark_as_read(self):
self.read = True
class Gist(Base, BaseModel):
__tablename__ = 'gists'
Index('g_gist_access_id_idx', 'gist_access_id'),
Index('g_created_on_idx', 'created_on'),
GIST_PUBLIC = u'public'
GIST_PRIVATE = u'private'
DEFAULT_FILENAME = u'gistfile1.txt'
gist_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
gist_access_id = Column(Unicode(250))
gist_description = Column(UnicodeText(1024))
gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
gist_expires = Column(Float(53), nullable=False)
gist_type = Column(Unicode(128), nullable=False)
return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
res = cls.query().filter(cls.gist_access_id == id_).scalar()
def get_by_access_id(cls, gist_access_id):
return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
def gist_url(self):
alias_url = kallithea.CONFIG.get('gist_alias_url')
if alias_url:
return alias_url.replace('{gistid}', self.gist_access_id)
return h.canonical_url('gist', gist_id=self.gist_access_id)
Returns base path where all gists are stored
from kallithea.model.gist import GIST_STORE_LOC
.filter(Ui.ui_key == URL_SEP)
return os.path.join(q.one().ui_value, GIST_STORE_LOC)
Common function for generating gist related data for API
gist = self
gist_id=gist.gist_id,
type=gist.gist_type,
access_id=gist.gist_access_id,
description=gist.gist_description,
url=gist.gist_url(),
expires=gist.gist_expires,
created_on=gist.created_on,
## SCM functions
from kallithea.lib.vcs import get_repo
base_path = self.base_path()
return get_repo(os.path.join(*map(safe_str,
[base_path, self.gist_access_id])))
class DbMigrateVersion(Base, BaseModel):
__tablename__ = 'db_migrate_version'
repository_id = Column(String(250), nullable=False, unique=True, primary_key=True)
repository_path = Column(Text)
version = Column(Integer)
Status change: