Changeset - 1f92cded1bd2
[Not reviewed]
default
0 5 0
Mads Kiilerich (mads) - 5 years ago 2020-12-29 22:07:41
mads@kiilerich.com
celery: move Whoosh indexing task to whoosh library where it belongs

Avoid bundling everything from many different layers in one big task library.

This is more feasible now when we don't need kallithea.CELERY_APP set at import
time.
5 files changed with 18 insertions and 14 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/admin/settings.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.settings
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
settings controller for Kallithea admin
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 

	
 
import formencode
 
from formencode import htmlfill
 
from tg import config, request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound
 

	
 
import kallithea
 
import kallithea.lib.indexers.daemon
 
from kallithea.controllers import base
 
from kallithea.lib import webutils
 
from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired
 
from kallithea.lib.utils import repo2db_mapper, set_app_settings
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.lib.vcs import VCSError
 
from kallithea.lib.webutils import url
 
from kallithea.model import async_tasks, db, meta
 
from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm
 
from kallithea.model.notification import EmailNotificationModel
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class SettingsController(base.BaseController):
 

	
 
    @LoginRequired(allow_default_user=True)
 
    def _before(self, *args, **kwargs):
 
        super(SettingsController, self)._before(*args, **kwargs)
 

	
 
    def _get_hg_ui_settings(self):
 
        ret = db.Ui.query().all()
 

	
 
        settings = {}
 
        for each in ret:
 
            k = each.ui_section + '_' + each.ui_key
 
            v = each.ui_value
 
            if k == 'paths_/':
 
                k = 'paths_root_path'
 

	
 
            k = k.replace('.', '_')
 

	
 
            if each.ui_section in ['hooks', 'extensions']:
 
                v = each.ui_active
 

	
 
            settings[k] = v
 
        return settings
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_vcs(self):
 
        c.active = 'vcs'
 
        if request.POST:
 
            application_form = ApplicationUiSettingsForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                     base.render('admin/settings/settings.html'),
 
                     defaults=errors.value,
 
                     errors=errors.error_dict or {},
 
                     prefix_error=False,
 
                     encoding="UTF-8",
 
                     force_defaults=False)
 

	
 
            try:
 
                if c.visual.allow_repo_location_change:
 
                    sett = db.Ui.get_by_key('paths', '/')
 
                    sett.ui_value = form_result['paths_root_path']
 

	
 
                # HOOKS
 
                sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE)
 
                sett.ui_active = form_result['hooks_changegroup_update']
 

	
 
                sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_REPO_SIZE)
 
                sett.ui_active = form_result['hooks_changegroup_repo_size']
 

	
 
                ## EXTENSIONS
 
                sett = db.Ui.get_or_create('extensions', 'largefiles')
 
                sett.ui_active = form_result['extensions_largefiles']
 

	
 
#                sett = db.Ui.get_or_create('extensions', 'hggit')
 
#                sett.ui_active = form_result['extensions_hggit']
 

	
 
                meta.Session().commit()
 

	
 
                webutils.flash(_('Updated VCS settings'), category='success')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                webutils.flash(_('Error occurred while updating '
 
                          'application settings'), category='error')
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_mapping(self):
 
        c.active = 'mapping'
 
        if request.POST:
 
            rm_obsolete = request.POST.get('destroy', False)
 
            install_git_hooks = request.POST.get('hooks', False)
 
            overwrite_git_hooks = request.POST.get('hooks_overwrite', False)
 
            invalidate_cache = request.POST.get('invalidate', False)
 
            log.debug('rescanning repo location with destroy obsolete=%s, '
 
                      'install git hooks=%s and '
 
                      'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks))
 

	
 
            filesystem_repos = ScmModel().repo_scan()
 
            added, removed = repo2db_mapper(filesystem_repos, rm_obsolete,
 
                                            install_git_hooks=install_git_hooks,
 
                                            user=request.authuser.username,
 
                                            overwrite_git_hooks=overwrite_git_hooks)
 
            added_msg = webutils.HTML(', ').join(
 
                webutils.link_to(safe_str(repo_name), webutils.url('summary_home', repo_name=repo_name)) for repo_name in added
 
            ) or '-'
 
            removed_msg = webutils.HTML(', ').join(
 
                safe_str(repo_name) for repo_name in removed
 
            ) or '-'
 
            webutils.flash(webutils.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) %
 
                    (added_msg, removed_msg), category='success')
 

	
 
            if invalidate_cache:
 
                log.debug('invalidating all repositories cache')
 
                i = 0
 
                for repo in db.Repository.query():
 
                    try:
 
                        ScmModel().mark_for_invalidation(repo.repo_name)
 
                        i += 1
 
                    except VCSError as e:
 
                        log.warning('VCS error invalidating %s: %s', repo.repo_name, e)
 
                webutils.flash(_('Invalidated %s repositories') % i, category='success')
 

	
 
            raise HTTPFound(location=url('admin_settings_mapping'))
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_global(self):
 
        c.active = 'global'
 
        if request.POST:
 
            application_form = ApplicationSettingsForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                    base.render('admin/settings/settings.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
 
                    encoding="UTF-8",
 
                    force_defaults=False)
 

	
 
            try:
 
                for setting in (
 
                    'title',
 
                    'realm',
 
                    'ga_code',
 
                    'captcha_public_key',
 
                    'captcha_private_key',
 
                ):
 
                    db.Setting.create_or_update(setting, form_result[setting])
 

	
 
                meta.Session().commit()
 
                set_app_settings(config)
 
                webutils.flash(_('Updated application settings'), category='success')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                webutils.flash(_('Error occurred while updating '
 
                          'application settings'),
 
                          category='error')
 

	
 
            raise HTTPFound(location=url('admin_settings_global'))
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_visual(self):
 
        c.active = 'visual'
 
        if request.POST:
 
            application_form = ApplicationVisualisationForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                    base.render('admin/settings/settings.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
 
                    encoding="UTF-8",
 
                    force_defaults=False)
 

	
 
            try:
 
                settings = [
 
                    ('show_public_icon', 'show_public_icon', 'bool'),
 
                    ('show_private_icon', 'show_private_icon', 'bool'),
 
                    ('stylify_metalabels', 'stylify_metalabels', 'bool'),
 
                    ('repository_fields', 'repository_fields', 'bool'),
 
                    ('dashboard_items', 'dashboard_items', 'int'),
 
                    ('admin_grid_items', 'admin_grid_items', 'int'),
 
                    ('show_version', 'show_version', 'bool'),
 
                    ('use_gravatar', 'use_gravatar', 'bool'),
 
                    ('gravatar_url', 'gravatar_url', 'unicode'),
 
                    ('clone_uri_tmpl', 'clone_uri_tmpl', 'unicode'),
 
                    ('clone_ssh_tmpl', 'clone_ssh_tmpl', 'unicode'),
 
                ]
 
                for setting, form_key, type_ in settings:
 
                    db.Setting.create_or_update(setting, form_result[form_key], type_)
 

	
 
                meta.Session().commit()
 
                set_app_settings(config)
 
                webutils.flash(_('Updated visualisation settings'),
 
                        category='success')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                webutils.flash(_('Error occurred during updating '
 
                          'visualisation settings'),
 
                        category='error')
 

	
 
            raise HTTPFound(location=url('admin_settings_visual'))
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_email(self):
 
        c.active = 'email'
 
        if request.POST:
 
            test_email = request.POST.get('test_email')
 
            test_email_subj = 'Kallithea test email'
 
            test_body = ('Kallithea Email test, '
 
                               'Kallithea version: %s' % c.kallithea_version)
 
            if not test_email:
 
                webutils.flash(_('Please enter email address'), category='error')
 
                raise HTTPFound(location=url('admin_settings_email'))
 

	
 
            test_email_txt_body = EmailNotificationModel() \
 
                .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT,
 
                                'txt', body=test_body)
 
            test_email_html_body = EmailNotificationModel() \
 
                .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT,
 
                                'html', body=test_body)
 

	
 
            recipients = [test_email] if test_email else None
 

	
 
            async_tasks.send_email(recipients, test_email_subj,
 
                             test_email_txt_body, test_email_html_body)
 

	
 
            webutils.flash(_('Send email task created'), category='success')
 
            raise HTTPFound(location=url('admin_settings_email'))
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        c.ini = kallithea.CONFIG
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_hooks(self):
 
        c.active = 'hooks'
 
        if request.POST:
 
            if c.visual.allow_custom_hooks_settings:
 
                ui_key = request.POST.get('new_hook_ui_key')
 
                ui_value = request.POST.get('new_hook_ui_value')
 

	
 
                hook_id = request.POST.get('hook_id')
 

	
 
                try:
 
                    ui_key = ui_key and ui_key.strip()
 
                    if ui_key in (x.ui_key for x in db.Ui.get_custom_hooks()):
 
                        webutils.flash(_('Hook already exists'), category='error')
 
                    elif ui_key in (x.ui_key for x in db.Ui.get_builtin_hooks()):
 
                        webutils.flash(_('Builtin hooks are read-only. Please use another hook name.'), category='error')
 
                    elif ui_value and ui_key:
 
                        db.Ui.create_or_update_hook(ui_key, ui_value)
 
                        webutils.flash(_('Added new hook'), category='success')
 
                    elif hook_id:
 
                        db.Ui.delete(hook_id)
 
                        meta.Session().commit()
 

	
 
                    # check for edits
 
                    update = False
 
                    _d = request.POST.dict_of_lists()
 
                    for k, v, ov in zip(_d.get('hook_ui_key', []),
 
                                        _d.get('hook_ui_value_new', []),
 
                                        _d.get('hook_ui_value', [])):
 
                        if v != ov:
 
                            db.Ui.create_or_update_hook(k, v)
 
                            update = True
 

	
 
                    if update:
 
                        webutils.flash(_('Updated hooks'), category='success')
 
                    meta.Session().commit()
 
                except Exception:
 
                    log.error(traceback.format_exc())
 
                    webutils.flash(_('Error occurred during hook creation'),
 
                            category='error')
 

	
 
                raise HTTPFound(location=url('admin_settings_hooks'))
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        c.hooks = db.Ui.get_builtin_hooks()
 
        c.custom_hooks = db.Ui.get_custom_hooks()
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_search(self):
 
        c.active = 'search'
 
        if request.POST:
 
            repo_location = self._get_hg_ui_settings()['paths_root_path']
 
            full_index = request.POST.get('full_index', False)
 
            async_tasks.whoosh_index(repo_location, full_index)
 
            kallithea.lib.indexers.daemon.whoosh_index(repo_location, full_index)
 
            webutils.flash(_('Whoosh reindex task scheduled'), category='success')
 
            raise HTTPFound(location=url('admin_settings_search'))
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_system(self):
 
        c.active = 'system'
 

	
 
        defaults = db.Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        c.ini = kallithea.CONFIG
 
        server_info = db.Setting.get_server_info()
 
        for key, val in server_info.items():
 
            setattr(c, key, val)
 

	
 
        return htmlfill.render(
 
            base.render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
kallithea/lib/celery_app.py
Show inline comments
 
# -*- coding: utf-8 -*-
 

	
 
"""
 
Kallithea wrapper of Celery
 

	
 
The Celery configuration is in the Kallithea ini file but must be converted to an
 
entirely different format before Celery can use it.
 

	
 
We read the configuration from tg.config at module import time. This module can
 
thus not be imported in global scope but must be imported on demand in function
 
scope after tg.config has been initialized.
 

	
 
To make sure that the config really has been initialized, we check one of the
 
mandatory settings.
 
"""
 

	
 
import logging
 

	
 

	
 
class CeleryConfig(object):
 
    imports = ['kallithea.model.async_tasks']
 
    imports = [
 
        'kallithea.lib.indexers.daemon',
 
        'kallithea.model.async_tasks',
 
    ]
 
    task_always_eager = False
 

	
 
list_config_names = {'imports', 'accept_content'}
 

	
 

	
 
desupported = set([
 
    'broker.url',
 
    'celery.accept.content',
 
    'celery.always.eager',
 
    'celery.amqp.task.result.expires',
 
    'celeryd.concurrency',
 
    'celeryd.max.tasks.per.child',
 
    'celery.result.backend',  # Note: the .ini template used this instead of 'celery.result_backend' in 0.6
 
    'celery.result.dburi',
 
    'celery.result.serialier',
 
    'celery.result.serializer',
 
    'celery.send.task.error.emails',
 
    'celery.task_always_eager',  # still a valid configuration in celery, but not supported in Kallithea
 
    'celery.task.serializer',
 
])
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def make_celery_config(config):
 
    """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
 

	
 
    celery_config = CeleryConfig()
 

	
 
    for config_key, config_value in sorted(config.items()):
 
        if config_key in desupported and config_value:
 
            log.error('Celery configuration setting %r is no longer supported', config_key)
 
        parts = config_key.split('.', 1)
 
        if parts[0] == 'celery' and len(parts) == 2:  # Celery 4 config key
 
            celery_key = parts[1]
 
        else:
 
            continue
 
        if not isinstance(config_value, str):
 
            continue
 
        if celery_key in list_config_names:
 
            celery_value = config_value.split()
 
        elif config_value.isdigit():
 
            celery_value = int(config_value)
 
        elif config_value.lower() in ['true', 'false']:
 
            celery_value = config_value.lower() == 'true'
 
        else:
 
            celery_value = config_value
 
        setattr(celery_config, celery_key, celery_value)
 
    return celery_config
kallithea/lib/indexers/daemon.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.indexers.daemon
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
A daemon will read from task table and run tasks
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 26, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import os
 
import traceback
 
from shutil import rmtree
 
from time import mktime
 

	
 
from tg import config
 
from whoosh.index import create_in, exists_in, open_dir
 
from whoosh.qparser import QueryParser
 

	
 
from kallithea.lib import celerylib
 
from kallithea.lib.conf import INDEX_EXTENSIONS, INDEX_FILENAMES
 
from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, NodeDoesNotExistError, RepositoryError
 
from kallithea.model import db
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger('whoosh_indexer')
 

	
 

	
 
class WhooshIndexingDaemon(object):
 
    """
 
    Daemon for atomic indexing jobs
 
    """
 

	
 
    def __init__(self, indexname=IDX_NAME, index_location=None,
 
                 repo_location=None, repo_list=None,
 
                 repo_update_list=None):
 
        self.indexname = indexname
 

	
 
        self.index_location = index_location
 
        if not index_location:
 
            raise Exception('You have to provide index location')
 

	
 
        self.repo_location = repo_location
 
        if not repo_location:
 
            raise Exception('You have to provide repositories location')
 

	
 
        self.repo_paths = ScmModel().repo_scan(self.repo_location)
 

	
 
        # filter repo list
 
        if repo_list:
 
            repo_list = set(repo_list)
 
            self.filtered_repo_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_list:
 
                    self.filtered_repo_paths[repo_name] = repo
 

	
 
            self.repo_paths = self.filtered_repo_paths
 

	
 
        # filter update repo list
 
        self.filtered_repo_update_paths = {}
 
        if repo_update_list:
 
            self.filtered_repo_update_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_update_list:
 
                    self.filtered_repo_update_paths[repo_name] = repo
 
            self.repo_paths = self.filtered_repo_update_paths
 

	
 
        self.initial = True
 
        if not os.path.isdir(self.index_location):
 
            os.makedirs(self.index_location)
 
            log.info('Cannot run incremental index since it does not '
 
                     'yet exist - running full build')
 
        elif not exists_in(self.index_location, IDX_NAME):
 
            log.info('Running full index build, as the file content '
 
                     'index does not exist')
 
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
 
            log.info('Running full index build, as the changeset '
 
                     'index does not exist')
 
        else:
 
            self.initial = False
 

	
 
    def _get_index_revision(self, repo):
 
        db_repo = db.Repository.get_by_repo_name(repo.name)
 
        landing_rev = 'tip'
 
        if db_repo:
 
            _rev_type, _rev = db_repo.landing_rev
 
            landing_rev = _rev
 
        return landing_rev
 

	
 
    def _get_index_changeset(self, repo, index_rev=None):
 
        if not index_rev:
 
            index_rev = self._get_index_revision(repo)
 
        cs = repo.get_changeset(index_rev)
 
        return cs
 

	
 
    def get_paths(self, repo):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 
        """
 
        index_paths_ = set()
 
        try:
 
            cs = self._get_index_changeset(repo)
 
            for _topnode, _dirs, files in cs.walk('/'):
 
                for f in files:
 
                    index_paths_.add(os.path.join(repo.path, f.path))
 

	
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            pass
 
        return index_paths_
 

	
 
    def get_node(self, repo, path, index_rev=None):
 
        """
 
        gets a filenode based on given full path.
 

	
 
        :param repo: scm repo instance
 
        :param path: full path including root location
 
        :return: FileNode
 
        """
 
        # FIXME: paths should be normalized ... or even better: don't include repo.path
 
        assert path.startswith(repo.path)
 
        assert path[len(repo.path)] in (os.path.sep, os.path.altsep)
 
        node_path = path[len(repo.path) + 1:]
 
        cs = self._get_index_changeset(repo, index_rev=index_rev)
 
        node = cs.get_node(node_path)
 
        return node
 

	
 
    def is_indexable_node(self, node):
 
        """
 
        Just index the content of chosen files, skipping binary files
 
        """
 
        return (node.extension in INDEX_EXTENSIONS or node.name in INDEX_FILENAMES) and \
 
               not node.is_binary
 

	
 
    def get_node_mtime(self, node):
 
        return mktime(node.last_changeset.date.timetuple())
 

	
 
    def add_doc(self, writer, path, repo, repo_name, index_rev=None):
 
        """
 
        Adding doc to writer this function itself fetches data from
 
        the instance of vcs backend
 
        """
 
        try:
 
            node = self.get_node(repo, path, index_rev)
 
        except (ChangesetError, NodeDoesNotExistError):
 
            log.debug("    >> %s - not found in %s %s", path, repo, index_rev)
 
            return 0, 0
 

	
 
        indexed = indexed_w_content = 0
 
        if self.is_indexable_node(node):
 
            bytes_content = node.content
 
            if b'\0' in bytes_content:
 
                log.warning('    >> %s - no text content', path)
 
                u_content = ''
 
            else:
 
                log.debug('    >> %s', path)
 
                u_content = safe_str(bytes_content)
 
                indexed_w_content += 1
 

	
 
        else:
 
            log.debug('    >> %s - not indexable', path)
 
            # just index file name without it's content
 
            u_content = ''
 
            indexed += 1
 

	
 
        writer.add_document(
 
            fileid=path,
 
            repository_rawname=repo_name,
 
            repository=repo_name,
 
            path=path,
 
            content=u_content,
 
            modtime=self.get_node_mtime(node),
 
            extension=node.extension
 
        )
 
        return indexed, indexed_w_content
 

	
 
    def index_changesets(self, writer, repo_name, repo, start_rev=None):
 
        """
 
        Add all changeset in the vcs repo starting at start_rev
 
        to the index writer
 

	
 
        :param writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository from whence the
 
          changeset originates including the repository group
 
        :param repo: the vcs repository instance to index changesets for,
 
          the presumption is the repo has changesets to index
 
        :param start_rev=None: the full sha id to start indexing from
 
          if start_rev is None then index from the first changeset in
 
          the repo
 
        """
 

	
 
        if start_rev is None:
 
            start_rev = repo[0].raw_id
 

	
 
        log.debug('Indexing changesets in %s, starting at rev %s',
 
                  repo_name, start_rev)
 

	
 
        indexed = 0
 
        cs_iter = repo.get_changesets(start=start_rev)
 
        total = len(cs_iter)
 
        for cs in cs_iter:
 
            indexed += 1
 
            log.debug('    >> %s %s/%s', cs, indexed, total)
 
            writer.add_document(
 
                raw_id=cs.raw_id,
 
                date=cs._timestamp,
 
                repository_rawname=repo_name,
 
                repository=repo_name,
 
                author=cs.author,
 
                message=cs.message,
 
                last=cs.last,
 
                added=' '.join(node.path for node in cs.added).lower(),
 
                removed=' '.join(node.path for node in cs.removed).lower(),
 
                changed=' '.join(node.path for node in cs.changed).lower(),
 
                parents=' '.join(cs.raw_id for cs in cs.parents),
 
            )
 

	
 
        return indexed
 

	
 
    def index_files(self, file_idx_writer, repo_name, repo):
 
        """
 
        Index files for given repo_name
 

	
 
        :param file_idx_writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository we're indexing
 
        :param repo: instance of vcs repo
 
        """
 
        i_cnt = iwc_cnt = 0
 
        log.debug('Building file index for %s @revision:%s', repo_name,
 
                                                self._get_index_revision(repo))
 
        index_rev = self._get_index_revision(repo)
 
        for idx_path in self.get_paths(repo):
 
            i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
 
            i_cnt += i
 
            iwc_cnt += iwc
 

	
 
        log.debug('added %s files %s with content for repo %s',
 
                  i_cnt + iwc_cnt, iwc_cnt, repo.path)
 
        return i_cnt, iwc_cnt
 

	
 
    def update_changeset_index(self):
 
        idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME)
 

	
 
        with idx.searcher() as searcher:
 
            writer = idx.writer()
 
            writer_is_dirty = False
 
            try:
 
                indexed_total = 0
 
                repo_name = None
 
                for repo_name, repo in sorted(self.repo_paths.items()):
 
                    log.debug('Updating changeset index for repo %s', repo_name)
 
                    # skip indexing if there aren't any revs in the repo
 
                    num_of_revs = len(repo)
 
                    if num_of_revs < 1:
 
                        continue
 

	
 
                    qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
 
                    q = qp.parse("last:t AND %s" % repo_name)
 

	
 
                    results = searcher.search(q)
 

	
 
                    # default to scanning the entire repo
 
                    last_rev = 0
 
                    start_id = None
 

	
 
                    if len(results) > 0:
 
                        # assuming that there is only one result, if not this
 
                        # may require a full re-index.
 
                        start_id = results[0]['raw_id']
 
                        try:
 
                            last_rev = repo.get_changeset(revision=start_id).revision
 
                        except ChangesetDoesNotExistError:
 
                            log.error('previous last revision %s not found - indexing from scratch', start_id)
 
                            start_id = None
 

	
 
                    # there are new changesets to index or a new repo to index
 
                    if last_rev == 0 or num_of_revs > last_rev + 1:
 
                        # delete the docs in the index for the previous
 
                        # last changeset(s)
 
                        for hit in results:
 
                            q = qp.parse("last:t AND %s AND raw_id:%s" %
 
                                            (repo_name, hit['raw_id']))
 
                            writer.delete_by_query(q)
 

	
 
                        # index from the previous last changeset + all new ones
 
                        indexed_total += self.index_changesets(writer,
 
                                                repo_name, repo, start_id)
 
                        writer_is_dirty = True
 
                log.debug('indexed %s changesets for repo %s',
 
                          indexed_total, repo_name
 
                )
 
            finally:
 
                if writer_is_dirty:
 
                    log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<')
 
                    writer.commit(merge=True)
 
                    log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<')
 
                else:
 
                    log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
 

	
 
    def update_file_index(self):
 
        log.debug('STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
 
                  'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
 

	
 
        idx = open_dir(self.index_location, indexname=self.indexname)
 
        # The set of all paths in the index
 
        indexed_paths = set()
 
        # The set of all paths we need to re-index
 
        to_index = set()
 

	
 
        writer = idx.writer()
 
        writer_is_dirty = False
 
        try:
 
            with idx.reader() as reader:
 

	
 
                # Loop over the stored fields in the index
 
                for fields in reader.all_stored_fields():
 
                    indexed_path = fields['path']
 
                    indexed_repo_path = fields['repository']
 
                    indexed_paths.add(indexed_path)
 

	
 
                    if indexed_repo_path not in self.filtered_repo_update_paths:
 
                        continue
 

	
 
                    repo = self.repo_paths[indexed_repo_path]
 

	
 
                    try:
 
                        node = self.get_node(repo, indexed_path)
 
                        # Check if this file was changed since it was indexed
 
                        indexed_time = fields['modtime']
 
                        mtime = self.get_node_mtime(node)
 
                        if mtime > indexed_time:
 
                            # The file has changed, delete it and add it to
 
                            # the list of files to reindex
 
                            log.debug(
 
                                'adding to reindex list %s mtime: %s vs %s',
 
                                    indexed_path, mtime, indexed_time
 
                            )
 
                            writer.delete_by_term('fileid', indexed_path)
 
                            writer_is_dirty = True
 

	
 
                            to_index.add(indexed_path)
 
                    except (ChangesetError, NodeDoesNotExistError):
 
                        # This file was deleted since it was indexed
 
                        log.debug('removing from index %s', indexed_path)
 
                        writer.delete_by_term('path', indexed_path)
 
                        writer_is_dirty = True
 

	
 
            # Loop over the files in the filesystem
 
            # Assume we have a function that gathers the filenames of the
 
            # documents to be indexed
 
            ri_cnt_total = 0  # indexed
 
            riwc_cnt_total = 0  # indexed with content
 
            for repo_name, repo in sorted(self.repo_paths.items()):
 
                log.debug('Updating file index for repo %s', repo_name)
 
                # skip indexing if there aren't any revisions
 
                if len(repo) < 1:
 
                    continue
 
                ri_cnt = 0   # indexed
 
                riwc_cnt = 0  # indexed with content
 
                for path in self.get_paths(repo):
 
                    if path in to_index or path not in indexed_paths:
 
                        # This is either a file that's changed, or a new file
 
                        # that wasn't indexed before. So index it!
 
                        i, iwc = self.add_doc(writer, path, repo, repo_name)
 
                        writer_is_dirty = True
 
                        ri_cnt += i
 
                        ri_cnt_total += 1
 
                        riwc_cnt += iwc
 
                        riwc_cnt_total += iwc
 
                log.debug('added %s files %s with content for repo %s',
 
                             ri_cnt + riwc_cnt, riwc_cnt, repo.path
 
                )
 
            log.debug('indexed %s files in total and %s with content',
 
                        ri_cnt_total, riwc_cnt_total
 
            )
 
        finally:
 
            if writer_is_dirty:
 
                log.debug('>> COMMITING CHANGES TO FILE INDEX <<')
 
                writer.commit(merge=True)
 
                log.debug('>>> FINISHED REBUILDING FILE INDEX <<<')
 
            else:
 
                log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<')
 
                writer.cancel()
 

	
 
    def build_indexes(self):
 
        if os.path.exists(self.index_location):
 
            log.debug('removing previous index')
 
            rmtree(self.index_location)
 

	
 
        if not os.path.exists(self.index_location):
 
            os.mkdir(self.index_location)
 

	
 
        chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA,
 
                               indexname=CHGSET_IDX_NAME)
 
        chgset_idx_writer = chgset_idx.writer()
 

	
 
        file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
 
        file_idx_writer = file_idx.writer()
 
        log.debug('BUILDING INDEX FOR EXTENSIONS %s '
 
                  'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
 

	
 
        for repo_name, repo in sorted(self.repo_paths.items()):
 
            log.debug('Updating indices for repo %s', repo_name)
 
            # skip indexing if there aren't any revisions
 
            if len(repo) < 1:
 
                continue
 

	
 
            self.index_files(file_idx_writer, repo_name, repo)
 
            self.index_changesets(chgset_idx_writer, repo_name, repo)
 

	
 
        log.debug('>> COMMITING CHANGES <<')
 
        file_idx_writer.commit(merge=True)
 
        chgset_idx_writer.commit(merge=True)
 
        log.debug('>>> FINISHED BUILDING INDEX <<<')
 

	
 
    def update_indexes(self):
 
        self.update_file_index()
 
        self.update_changeset_index()
 

	
 
    def run(self, full_index=False):
 
        """Run daemon"""
 
        if full_index or self.initial:
 
            self.build_indexes()
 
        else:
 
            self.update_indexes()
 

	
 

	
 
@celerylib.task
 
@celerylib.locked_task
 
def whoosh_index(repo_location, full_index):
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location) \
 
                         .run(full_index=full_index)
kallithea/model/async_tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.async_tasks
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea task modules, containing all task that suppose to be run
 
by celery daemon
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 6, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import email.message
 
import email.utils
 
import os
 
import smtplib
 
import time
 
import traceback
 
from collections import OrderedDict
 
from operator import itemgetter
 
from time import mktime
 

	
 
import celery.utils.log
 
from tg import config
 

	
 
import kallithea
 
from kallithea.lib import celerylib, conf, ext_json, hooks
 
from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
from kallithea.lib.utils2 import asbool, ascii_bytes
 
from kallithea.lib.vcs.utils import author_email, author_name
 
from kallithea.model import db, meta, repo, userlog
 

	
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
 
__all__ = ['get_commits_stats', 'send_email']
 

	
 

	
 
log = celery.utils.log.get_task_logger(__name__)
 

	
 

	
 
@celerylib.task
 
@celerylib.locked_task
 
def whoosh_index(repo_location, full_index):
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location) \
 
                         .run(full_index=full_index)
 

	
 

	
 
def _author_username(author):
 
    """Return the username of the user identified by the email part of the 'author' string,
 
    default to the name or email.
 
    Kind of similar to h.person() ."""
 
    email = author_email(author)
 
    if email:
 
        user = db.User.get_by_email(email)
 
        if user is not None:
 
            return user.username
 
    # Still nothing?  Just pass back the author name if any, else the email
 
    return author_name(author) or email
 

	
 

	
 
@celerylib.task
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
 
    lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    log.info('running task with lockkey %s', lockkey)
 
    try:
 
        lock = celerylib.DaemonLock(os.path.join(config['cache_dir'], lockkey))
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        db_repo = db.Repository.get_by_repo_name(repo_name)
 
        if db_repo is None:
 
            return
 

	
 
        scm_repo = db_repo.scm_instance
 
        repo_size = scm_repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config.get('commit_parse_limit'))
 
        last_rev = None
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        dbrepo = db.Repository.query() \
 
            .filter(db.Repository.repo_name == repo_name).scalar()
 
        cur_stats = db.Statistics.query() \
 
            .filter(db.Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == scm_repo.get_changeset().revision and repo_size > 1:
 
            # pass silently without any work if we're not on first revision or
 
            # current state of parsing revision(from db marker) is the
 
            # last revision
 
            lock.release()
 
            return
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(ext_json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s', parse_limit)
 

	
 
        last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
 
        log.debug('Getting revisions from %s to %s',
 
             last_rev, last_rev + parse_limit
 
        )
 
        usernames_cache = {}
 
        for cs in scm_repo[last_rev:last_rev + parse_limit]:
 
            log.debug('parsing %s', cs)
 
            last_cs = cs  # remember last parsed changeset
 
            tt = cs.date.timetuple()
 
            k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
 

	
 
            # get username from author - similar to what h.person does
 
            username = usernames_cache.get(cs.author)
 
            if username is None:
 
                username = _author_username(cs.author)
 
                usernames_cache[cs.author] = username
 

	
 
            if username in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[username]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = None
 

	
 
                if time_pos is not None and time_pos >= 0:
 
                    datadict = \
 
                        co_day_auth_aggr[username]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[username]['data'] \
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[username] = {
 
                                        "label": username,
 
                                        "data": [{"time": k,
 
                                                 "commits": 1,
 
                                                 "added": len(cs.added),
 
                                                 "changed": len(cs.changed),
 
                                                 "removed": len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            # gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        stats = cur_stats if cur_stats else db.Statistics()
 
        stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
 
        stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
 

	
 
        log.debug('last revision %s', last_rev)
 
        leftovers = len(scm_repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s', leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            meta.Session().add(stats)
 
            meta.Session().commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            meta.Session().rollback()
 
            lock.release()
 
            return
 

	
 
        # final release
 
        lock.release()
 

	
 
        # execute another task if celery is enabled
 
        if len(scm_repo.revisions) > 1 and asbool(kallithea.CONFIG.get('use_celery')) and recurse_limit > 0:
 
            get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
 
        elif recurse_limit <= 0:
 
            log.debug('Not recursing - limit has been reached')
 
        else:
 
            log.debug('Not recursing')
 
    except celerylib.LockHeld:
 
        log.info('Task with key %s already running', lockkey)
 

	
 

	
 
@celerylib.task
 
def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, if this is None, the defined email
 
        address from field 'email_to' and all admins is used instead
 
    :param subject: subject of the mail
 
    :param body: plain text body of the mail
 
    :param html_body: html version of body
 
    :param headers: dictionary of prepopulated e-mail headers
 
    :param from_name: full name to be used as sender of this mail - often a
 
    .full_name_or_username value
 
    """
 
    assert isinstance(recipients, list), recipients
 
    if headers is None:
 
        headers = {}
 
    else:
 
        # do not modify the original headers object passed by the caller
 
        headers = headers.copy()
 

	
 
    email_config = config
 
    email_prefix = email_config.get('email_prefix', '')
 
    if email_prefix:
 
        subject = "%s %s" % (email_prefix, subject)
 

	
 
    if not recipients:
 
        # if recipients are not defined we send to email_config + all admins
 
        recipients = [u.email for u in db.User.query()
 
                      .filter(db.User.admin == True).all()]
 
        if email_config.get('email_to') is not None:
 
            recipients += email_config.get('email_to').split(',')
 

	
 
        # If there are still no recipients, there are no admins and no address
 
        # configured in email_to, so return.
 
        if not recipients:
 
            log.error("No recipients specified and no fallback available.")
 
            return
 

	
 
        log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
 

	
 
    # SMTP sender
 
    app_email_from = email_config.get('app_email_from', 'Kallithea')
 
    # 'From' header
 
    if from_name is not None:
 
        # set From header based on from_name but with a generic e-mail address
 
        # In case app_email_from is in "Some Name <e-mail>" format, we first
 
        # extract the e-mail address.
 
        envelope_addr = author_email(app_email_from)
 
        headers['From'] = '"%s" <%s>' % (
 
            email.utils.quote('%s (no-reply)' % from_name),
 
            envelope_addr)
 

	
 
    smtp_server = email_config.get('smtp_server')
 
    smtp_port = email_config.get('smtp_port')
 
    smtp_use_tls = asbool(email_config.get('smtp_use_tls'))
 
    smtp_use_ssl = asbool(email_config.get('smtp_use_ssl'))
 
    smtp_auth = email_config.get('smtp_auth')  # undocumented - overrule automatic choice of auth mechanism
 
    smtp_username = email_config.get('smtp_username')
 
    smtp_password = email_config.get('smtp_password')
 

	
 
    logmsg = ("Mail details:\n"
 
              "recipients: %s\n"
 
              "headers: %s\n"
 
              "subject: %s\n"
 
              "body:\n%s\n"
 
              "html:\n%s\n"
 
              % (' '.join(recipients), headers, subject, body, html_body))
 

	
 
    if smtp_server:
 
        log.debug("Sending e-mail. " + logmsg)
 
    else:
 
        log.error("SMTP mail server not configured - cannot send e-mail.")
 
        log.warning(logmsg)
 
        return
 

	
 
    msg = email.message.EmailMessage()
 
    msg['Subject'] = subject
 
    msg['From'] = app_email_from  # fallback - might be overridden by a header
 
    msg['To'] = ', '.join(recipients)
 
    msg['Date'] = email.utils.formatdate(time.time())
 

	
 
    for key, value in headers.items():
 
        del msg[key]  # Delete key first to make sure add_header will replace header (if any), no matter the casing
 
        msg.add_header(key, value)
 

	
 
    msg.set_content(body)
 
    msg.add_alternative(html_body, subtype='html')
 

	
 
    try:
 
        if smtp_use_ssl:
 
            smtp_serv = smtplib.SMTP_SSL(smtp_server, smtp_port)
 
        else:
 
            smtp_serv = smtplib.SMTP(smtp_server, smtp_port)
 

	
 
        if smtp_use_tls:
 
            smtp_serv.starttls()
 

	
 
        if smtp_auth:
 
            smtp_serv.ehlo()  # populate esmtp_features
 
            smtp_serv.esmtp_features["auth"] = smtp_auth
 

	
 
        if smtp_username and smtp_password is not None:
 
            smtp_serv.login(smtp_username, smtp_password)
 

	
 
        smtp_serv.sendmail(app_email_from, recipients, msg.as_string())
 
        smtp_serv.quit()
 

	
 
        log.info('Mail was sent to: %s' % recipients)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 

	
 

	
 
@celerylib.task
 
def create_repo(form_data, cur_user):
 
    cur_user = db.User.guess_instance(cur_user)
 

	
 
    owner = cur_user
 
    repo_name = form_data['repo_name']
 
    repo_name_full = form_data['repo_name_full']
 
    repo_type = form_data['repo_type']
 
    description = form_data['repo_description']
 
    private = form_data['repo_private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['repo_landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 
    copy_group_permissions = form_data.get('repo_copy_permissions')
 
    fork_of = form_data.get('fork_parent_id')
 
    state = form_data.get('repo_state', db.Repository.STATE_PENDING)
 

	
 
    # repo creation defaults, private and repo_type are filled in form
 
    defs = db.Setting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_downloads = defs.get('repo_enable_downloads')
 

	
 
    try:
 
        db_repo = repo.RepoModel()._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=description,
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions,
 
            copy_group_permissions=copy_group_permissions,
 
            enable_statistics=enable_statistics,
 
            enable_downloads=enable_downloads,
 
            state=state
 
        )
 

	
 
        userlog.action_logger(cur_user, 'user_created_repo',
 
                      form_data['repo_name_full'], '')
 

	
 
        meta.Session().commit()
 
        # now create this repo on Filesystem
 
        repo.RepoModel()._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=db.RepoGroup.guess_instance(repo_group),
 
            clone_uri=clone_uri,
 
        )
 
        db_repo = db.Repository.get_by_repo_name(repo_name_full)
 
        hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        db_repo.update_changeset_cache()
 

	
 
        # set new created state
 
        db_repo.set_state(db.Repository.STATE_CREATED)
 
        meta.Session().commit()
 
    except Exception as e:
 
        log.warning('Exception %s occurred when forking repository, '
 
                    'doing cleanup...' % e)
 
        # rollback things manually !
 
        db_repo = db.Repository.get_by_repo_name(repo_name_full)
 
        if db_repo:
 
            db.Repository.delete(db_repo.repo_id)
 
            meta.Session().commit()
 
            repo.RepoModel()._delete_filesystem_repo(db_repo)
 
        raise
 

	
 

	
 
@celerylib.task
 
def create_repo_fork(form_data, cur_user):
 
    """
 
    Creates a fork of repository using interval VCS methods
 

	
 
    :param form_data:
 
    :param cur_user:
 
    """
 
    base_path = kallithea.CONFIG['base_path']
 
    cur_user = db.User.guess_instance(cur_user)
 

	
 
    repo_name = form_data['repo_name']  # fork in this case
 
    repo_name_full = form_data['repo_name_full']
 

	
 
    repo_type = form_data['repo_type']
 
    owner = cur_user
 
    private = form_data['private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 

	
 
    try:
 
        fork_of = db.Repository.guess_instance(form_data.get('fork_parent_id'))
 

	
 
        repo.RepoModel()._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=form_data['description'],
 
            owner=owner,
scripts/deps.py
Show inline comments
 
#!/usr/bin/env python3
 

	
 

	
 
import re
 
import sys
 

	
 

	
 
ignored_modules = set('''
 
argparse
 
base64
 
bcrypt
 
binascii
 
bleach
 
calendar
 
celery
 
celery
 
chardet
 
click
 
collections
 
configparser
 
copy
 
csv
 
ctypes
 
datetime
 
dateutil
 
decimal
 
decorator
 
difflib
 
distutils
 
docutils
 
email
 
errno
 
fileinput
 
functools
 
getpass
 
grp
 
hashlib
 
hmac
 
html
 
http
 
imp
 
importlib
 
inspect
 
io
 
ipaddr
 
IPython
 
isapi_wsgi
 
itertools
 
json
 
kajiki
 
ldap
 
logging
 
mako
 
markdown
 
mimetypes
 
mock
 
msvcrt
 
multiprocessing
 
operator
 
os
 
paginate
 
paginate_sqlalchemy
 
pam
 
paste
 
pkg_resources
 
platform
 
posixpath
 
pprint
 
pwd
 
pyflakes
 
pytest
 
pytest_localserver
 
random
 
re
 
routes
 
setuptools
 
shlex
 
shutil
 
smtplib
 
socket
 
ssl
 
stat
 
string
 
struct
 
subprocess
 
sys
 
tarfile
 
tempfile
 
textwrap
 
tgext
 
threading
 
time
 
traceback
 
traitlets
 
types
 
urllib
 
urlobject
 
uuid
 
warnings
 
webhelpers2
 
webob
 
webtest
 
whoosh
 
win32traceutil
 
zipfile
 
'''.split())
 

	
 
top_modules = set('''
 
kallithea.alembic
 
kallithea.bin
 
kallithea.config
 
kallithea.controllers
 
kallithea.templates.py
 
scripts
 
'''.split())
 

	
 
bottom_external_modules = set('''
 
tg
 
mercurial
 
sqlalchemy
 
alembic
 
formencode
 
pygments
 
dulwich
 
beaker
 
psycopg2
 
docs
 
setup
 
conftest
 
'''.split())
 

	
 
normal_modules = set('''
 
kallithea
 
kallithea.controllers.base
 
kallithea.lib
 
kallithea.lib.auth
 
kallithea.lib.auth_modules
 
kallithea.lib.celerylib
 
kallithea.lib.db_manage
 
kallithea.lib.helpers
 
kallithea.lib.hooks
 
kallithea.lib.indexers
 
kallithea.lib.utils
 
kallithea.lib.utils2
 
kallithea.lib.vcs
 
kallithea.lib.webutils
 
kallithea.model
 
kallithea.model.async_tasks
 
kallithea.model.scm
 
kallithea.templates.py
 
'''.split())
 

	
 
shown_modules = normal_modules | top_modules
 

	
 
# break the chains somehow - this is a cleanup TODO list
 
known_violations = set([
 
('kallithea.lib.auth_modules', 'kallithea.lib.auth'),  # needs base&facade
 
('kallithea.lib.utils', 'kallithea.model'),  # clean up utils
 
('kallithea.lib.utils', 'kallithea.model.db'),
 
('kallithea.lib.utils', 'kallithea.model.scm'),
 
('kallithea.model.async_tasks', 'kallithea.lib.hooks'),
 
('kallithea.model.async_tasks', 'kallithea.lib.indexers'),
 
('kallithea.model.async_tasks', 'kallithea.model'),
 
('kallithea.model', 'kallithea.lib.auth'),  # auth.HasXXX
 
('kallithea.model', 'kallithea.lib.auth_modules'),  # validators
 
('kallithea.model', 'kallithea.lib.hooks'),  # clean up hooks
 
('kallithea.model', 'kallithea.model.scm'),
 
('kallithea.model.scm', 'kallithea.lib.hooks'),
 
])
 

	
 
extra_edges = [
 
('kallithea.config', 'kallithea.controllers'),  # through TG
 
('kallithea.lib.auth', 'kallithea.lib.auth_modules'),  # custom loader
 
]
 

	
 

	
 
def normalize(s):
 
    """Given a string with dot path, return the string it should be shown as."""
 
    parts = s.replace('.__init__', '').split('.')
 
    short_2 = '.'.join(parts[:2])
 
    short_3 = '.'.join(parts[:3])
 
    short_4 = '.'.join(parts[:4])
 
    if parts[0] in ['scripts', 'contributor_data', 'i18n_utils']:
 
        return 'scripts'
 
    if short_3 == 'kallithea.model.meta':
 
        return 'kallithea.model.db'
 
    if parts[:4] == ['kallithea', 'lib', 'vcs', 'ssh']:
 
        return 'kallithea.lib.vcs.ssh'
 
    if short_4 in shown_modules:
 
        return short_4
 
    if short_3 in shown_modules:
 
        return short_3
 
    if short_2 in shown_modules:
 
        return short_2
 
    if short_2 == 'kallithea.tests':
 
        return None
 
    if parts[0] in ignored_modules:
 
        return None
 
    assert parts[0] in bottom_external_modules, parts
 
    return parts[0]
 

	
 

	
 
def main(filenames):
 
    if not filenames or filenames[0].startswith('-'):
 
        print('''\
 
Usage:
 
    hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/deps.py
 
    dot -Tsvg deps.dot > deps.svg
 
        ''')
 
        raise SystemExit(1)
 

	
 
    files_imports = dict()  # map filenames to its imports
 
    import_deps = set()  # set of tuples with module name and its imports
 
    for fn in filenames:
 
        with open(fn) as f:
 
            s = f.read()
 

	
 
        dot_name = (fn[:-3] if fn.endswith('.py') else fn).replace('/', '.')
 
        file_imports = set()
 
        for m in re.finditer(r'^ *(?:from ([^ ]*) import (?:([a-zA-Z].*)|\(([^)]*)\))|import (.*))$', s, re.MULTILINE):
 
            m_from, m_from_import, m_from_import2, m_import = m.groups()
 
            if m_from:
 
                pre = m_from + '.'
 
                if pre.startswith('.'):
 
                    pre = dot_name.rsplit('.', 1)[0] + pre
 
                importlist = m_from_import or m_from_import2
 
            else:
 
                pre = ''
 
                importlist = m_import
 
            for imp in importlist.split('#', 1)[0].split(','):
 
                full_imp = pre + imp.strip().split(' as ', 1)[0]
 
                file_imports.add(full_imp)
 
                import_deps.add((dot_name, full_imp))
 
        files_imports[fn] = file_imports
 

	
 
    # dump out all deps for debugging and analysis
 
    with open('deps.txt', 'w') as f:
 
        for fn, file_imports in sorted(files_imports.items()):
 
            for file_import in sorted(file_imports):
 
                if file_import.split('.', 1)[0] in ignored_modules:
 
                    continue
 
                f.write('%s: %s\n' % (fn, file_import))
 

	
 
    # find leafs that haven't been ignored - they are the important external dependencies and shown in the bottom row
 
    only_imported = set(
 
        set(normalize(b) for a, b in import_deps) -
 
        set(normalize(a) for a, b in import_deps) -
 
        set([None, 'kallithea'])
 
    )
 

	
 
    normalized_dep_edges = set()
 
    for dot_name, full_imp in import_deps:
 
        a = normalize(dot_name)
 
        b = normalize(full_imp)
 
        if a is None or b is None or a == b:
 
            continue
 
        normalized_dep_edges.add((a, b))
 
        #print((dot_name, full_imp, a, b))
 
    normalized_dep_edges.update(extra_edges)
 

	
 
    unseen_shown_modules = shown_modules.difference(a for a, b in normalized_dep_edges).difference(b for a, b in normalized_dep_edges)
 
    assert not unseen_shown_modules, unseen_shown_modules
 

	
 
    with open('deps.dot', 'w') as f:
 
        f.write('digraph {\n')
 
        f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(top_modules)))
 
        f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(only_imported)))
 
        for a, b in sorted(normalized_dep_edges):
 
            f.write('  "%s" -> "%s"%s\n' % (a, b, ' [color=red]' if (a, b) in known_violations else ' [color=green]' if (a, b) in extra_edges else ''))
 
        f.write('}\n')
 

	
 
    # verify dependencies by untangling dependency chain bottom-up:
 
    todo = set(normalized_dep_edges)
 
    unseen_violations = known_violations.difference(todo)
 
    assert not unseen_violations, unseen_violations
 
    for x in known_violations:
 
        todo.remove(x)
 

	
 
    while todo:
 
        depending = set(a for a, b in todo)
 
        depended = set(b for a, b in todo)
 
        drop = depended - depending
 
        if not drop:
 
            print('ERROR: cycles:', len(todo))
 
            for x in sorted(todo):
 
                print('%s,' % (x,))
 
            raise SystemExit(1)
 
        #for do_b in sorted(drop):
 
        #    print('Picking', do_b, '- unblocks:', ' '.join(a for a, b in sorted((todo)) if b == do_b))
 
        todo = set((a, b) for a, b in todo if b in depending)
 
        #print()
 

	
 

	
 
if __name__ == '__main__':
 
    main(sys.argv[1:])
0 comments (0 inline, 0 general)