#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# 2020-11-18 Henning Hollermann <henning.hollermann@netknights.it>
#            Allow import and export of events, resolvers and policies
# 2018-08-07 Cornelius Kölbel <cornelius.koelbel@netknights.it>
#            Allow creation of HSM keys
# 2017-10-08 Cornelius Kölbel <cornelius.koelbel@netknights.it>
#            Allow cleaning up different actions with different
#            retention times.
# 2017-07-12 Cornelius Kölbel <cornelius.koelbel@netknights.it>
#            Add generation of PGP keys
# 2017-02-23 Cornelius Kölbel <cornelius.koelbel@netknights.it>
#            Add CA sub commands
# 2017-01-27 Diogenes S. Jesus
#            Cornelius Kölbel <cornelius.koelbel@netknights.it>
#            Add creation of more detailed policy
# 2016-04-15 Cornelius Kölbel <cornelius@privacyidea.org>
#            Add backup for pymysql driver
# 2016-01-29 Cornelius Kölbel <cornelius@privacyidea.org>
#            Add profiling
# 2015-10-09 Cornelius Kölbel <cornelius@privacyidea.org>
#            Set file permissions
# 2015-09-24 Cornelius Kölbel <cornelius@privacyidea.org>
#            Add validate call
# 2015-06-16 Cornelius Kölbel <cornelius@privacyidea.org>
#            Add creation of JWT token
# 2015-03-27 Cornelius Kölbel, cornelius@privacyidea.org
#            Add sub command for policies
# 2014-12-15 Cornelius Kölbel, info@privacyidea.org
#            Initial creation
#
# (c) Cornelius Kölbel
# Info: http://www.privacyidea.org
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
# ./manage.py db init
# ./manage.py db migrate
# ./manage.py createdb
#
from __future__ import print_function
import os
import sys
import datetime
from datetime import timedelta
import re
from subprocess import call, Popen, PIPE
from getpass import getpass
import gnupg
import yaml
import contextlib
import argparse
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
import flask
from six.moves import shlex_quote
import importlib_metadata

from privacyidea.lib.sqlutils import delete_matching_rows
from privacyidea.lib.security.default import DefaultSecurityModule
from privacyidea.lib.crypto import geturandom
from privacyidea.lib.auth import (create_db_admin, list_db_admin,
                                  delete_db_admin)
from privacyidea.lib.policy import (delete_policy, enable_policy,
                                    PolicyClass, set_policy)
from privacyidea.lib.event import (delete_event, enable_event,
                                   set_event, EventConfiguration)
from privacyidea.lib.resolver import get_resolver_list, save_resolver
from privacyidea.lib.caconnector import (get_caconnector_list,
                                         get_caconnector_class,
                                         get_caconnector_object,
                                         save_caconnector)
from privacyidea.app import create_app
from privacyidea.lib.auth import ROLE
from flask_script import Manager, Command, Option
from privacyidea.app import db
from flask_migrate import MigrateCommand, stamp as fm_stamp
# Wee need to import something, so that the models will be created.
from privacyidea.models import Audit
from sqlalchemy import create_engine, desc, MetaData
from sqlalchemy.orm import sessionmaker
from privacyidea.lib.auditmodules.sqlaudit import LogEntry
from privacyidea.lib.audit import getAudit
from privacyidea.lib.authcache import cleanup as authcache_cleanup
from privacyidea.lib.utils import parse_timedelta, get_version_number
from privacyidea.lib.crypto import create_hsm_object
from privacyidea.lib.importotp import parseOATHcsv
from privacyidea.lib.token import import_token
from privacyidea.lib.utils.export import EXPORT_FUNCTIONS, IMPORT_FUNCTIONS
import jwt
import ast
import base64
import tarfile

SILENT = True
MYSQL_DIALECTS = ["mysql", "pymysql", "mysql+pymysql"]
DEFAULT_CONFTYPE_LIST = ("policy", "resolver", "event")

app = create_app(config_name='production', silent=SILENT)
manager = Manager(app)
admin_manager = Manager(usage='Create new administrators or modify existing '
                              'ones.')
backup_manager = Manager(usage='Create database backup and restore')
realm_manager = Manager(usage='Create new realms, delete existing realms '
                              'or set the default realm')
resolver_manager = Manager(usage='Create new resolver')
policy_manager = Manager(usage='Manage policies')
event_manager = Manager(usage='Manage events')
api_manager = Manager(usage="Manage API keys")
ca_manager = Manager(usage="Manage Certificate Authorities")
audit_manager = Manager(usage="Manage Audit log")
authcache_manager = Manager(usage="Manage AuthCache")
hsm_manager = Manager(usage="Manage HSM")
token_manager = Manager(usage="Manage tokens")
config_manager = Manager(usage="Manage the privacyIDEA configuration")
config_import_manager = Manager(usage="import configuration")
config_export_manager = Manager(usage="export configuration")
manager.add_command('db', MigrateCommand)
manager.add_command('admin', admin_manager)
manager.add_command('backup', backup_manager)
manager.add_command('realm', realm_manager)
manager.add_command('resolver', resolver_manager)
manager.add_command('policy', policy_manager)
manager.add_command('event', event_manager)
manager.add_command('api', api_manager)
manager.add_command('ca', ca_manager)
manager.add_command('audit', audit_manager)
manager.add_command('authcache', authcache_manager)
manager.add_command('hsm', hsm_manager)
manager.add_command('token', token_manager)
manager.add_command('config', config_manager)
config_manager.add_command('import', config_import_manager)
config_manager.add_command('export', config_export_manager)


@hsm_manager.command
def create_keys():
    """
    Create new encryption keys on the HSM. Be sure to first setup the HSM module, the PKCS11
    module and the slot/password for the given HSM in your pi.cfg.
    Set the variables PI_HSM_MODULE, PI_HSM_MODULE_MODULE, PI_HSM_MODULE_SLOT,
                      PI_HSM_MODULE_PASSWORD.
    """
    hsm_object = create_hsm_object(app.config)
    r = hsm_object.create_keys()
    print("Please add the following to your pi.cfg:")
    print("PI_HSM_MODULE_KEY_LABEL_TOKEN = '{0}'".format(r.get("token")))
    print("PI_HSM_MODULE_KEY_LABEL_CONFIG = '{0}'".format(r.get("config")))
    print("PI_HSM_MODULE_KEY_LABEL_VALUE = '{0}'".format(r.get("value")))


def list_ca(verbose=False):
    """
    List the Certificate Authorities.
    """
    lst = get_caconnector_list()
    for ca in lst:
        print("{ca!s} (type {typ!s})".format(ca=ca.get("connectorname"),
                                             typ=ca.get("type")))
        if verbose:
            for (k, v) in ca.get("data").items():
                print("\t{key!s:20}: {value!s}".format(key=k, value=v))


ca_manager.add_command('list', Command(list_ca))


@ca_manager.command
def create_crl(ca, force=False):
    ca_obj = get_caconnector_object(ca)
    r = ca_obj.create_crl(check_validity=not force)
    if not r:
        print("The CRL was not created.")
    else:
        print("The CRL {name!s} was created.".format(name=r))


@ca_manager.option('name', help='The name of the new CA')
@ca_manager.option('-t', '--type',
                   help='The type of the new CA. The default is "local"',
                   dest='catype')
def create(name, catype='local'):
    """
    Create a new CA connector. In case of the "localca" also the directory
    structure, the openssl.cnf and the key pair is created.
    """
    ca = get_caconnector_object(name)
    if ca:
        print("A CA connector with the name '{0!s}' already exists.".format(
            name))
        sys.exit(1)
    if not catype:
        catype = "local"
    print("Creating CA connector of type {0!s}.".format(catype))
    ca_class = get_caconnector_class(catype)
    ca_params = ca_class.create_ca(name)
    r = save_caconnector(ca_params)
    if r:
        print("Saved CA Connector with ID {0!s}.".format(r))
    else:
        print("Error saving CA connector.")


@admin_manager.command
def add(username, email=None, password=None):
    """
    Register a new administrator in the database.
    """
    db.create_all()
    if not password:
        password = getpass()
        password2 = getpass(prompt='Confirm: ')
        if password != password2:
            import sys
            sys.exit('Error: passwords do not match.')

    create_db_admin(app, username, email, password)
    print('Admin {0} was registered successfully.'.format(username))


def list_admins():
    """
    List all administrators.
    """
    list_db_admin()


admin_manager.add_command('list', Command(list_admins))


@admin_manager.command
def delete(username):
    """
    Delete an existing administrator.
    """
    delete_db_admin(username)


@admin_manager.command
def change(username, email=None, password_prompt=False):
    """
    Change the email address or the password of an existing administrator.
    """
    if password_prompt:
        password = getpass()
        password2 = getpass(prompt='Confirm: ')
        if password != password2:
            import sys
            sys.exit('Error: passwords do not match.')
    else:
        password = None

    create_db_admin(app, username, email, password)


@backup_manager.command
def create(directory="/var/lib/privacyidea/backup/",
           conf_dir="/etc/privacyidea/",
           radius_directory=None,
           enckey=False):
    """
    Create a new backup of the database and the configuration. The default
    does not include the encryption key. Use the 'enckey' option to also
    backup the encryption key. Then you should make sure, that the backups
    are stored safely.

    If you want to also include the RADIUS configuration into the backup
    specify a directory using 'radius_directory'.
    """
    CONF_DIR = conf_dir
    DATE = datetime.datetime.now().strftime("%Y%m%d-%H%M")
    BASE_NAME = "privacyidea-backup"

    directory = os.path.abspath(directory)
    call(["mkdir", "-p", directory])
    # set correct owner, if possible
    if os.geteuid() == 0:
        encfile_stat = os.stat(app.config.get("PI_ENCFILE"))
        os.chown(directory, encfile_stat.st_uid, encfile_stat.st_gid)

    sqlfile = "%s/dbdump-%s.sql" % (directory, DATE)
    backup_file = "%s/%s-%s.tgz" % (directory, BASE_NAME, DATE)

    sqluri = app.config.get("SQLALCHEMY_DATABASE_URI")
    sqltype = sqluri.split(":")[0]
    if sqltype == "sqlite":
        productive_file = sqluri[len("sqlite:///"):]
        print("Backup SQLite %s" % productive_file)
        sqlfile = "%s/db-%s.sqlite" % (directory, DATE)
        call(["cp", productive_file, sqlfile])
    elif sqltype in MYSQL_DIALECTS:
        m = re.match(r".*mysql://(.*):(.*)@(.*)/(\w*)\??(.*)", sqluri)
        username = m.groups()[0]
        password = m.groups()[1]
        datahost = m.groups()[2]
        database = m.groups()[3]
        # We strip parameters, but we do not use them
        _parameters = m.groups()[4]
        defaults_file = "{0!s}/mysql.cnf".format(conf_dir)
        _write_mysql_defaults(defaults_file, username, password)
        call("mysqldump --defaults-file=%s -h %s %s > %s" % (
            shlex_quote(defaults_file),
            shlex_quote(datahost),
            shlex_quote(database),
            shlex_quote(sqlfile)), shell=True)
    else:
        print("unsupported SQL syntax: %s" % sqltype)
        sys.exit(2)
    enc_file = app.config.get("PI_ENCFILE")

    backup_call = ["tar", "-zcf",
                   backup_file, CONF_DIR, sqlfile]

    if radius_directory:
        # Simply append the radius directory to the backup command
        backup_call.append(radius_directory)

    if not enckey:
        # Exclude enckey from backup
        # since tar v1.30 --exclude cannot be appended
        backup_call.insert(1, "--exclude={0!s}".format(enc_file))

    call(backup_call)
    os.unlink(sqlfile)
    os.chmod(backup_file, 0o600)


def _write_mysql_defaults(filename, username, password):
    """
    Write the defaults_file for mysql commands

    :param filename: THe name of the file
    :param username: The username to connect to the database
    :param password: The password to connect to the database
    :return:
    """
    with open(filename, "w") as f:
        f.write("""[client]
user={0!s}
password={1!s}
[mysqldump]
no-tablespaces=True""".format(username, password))

    os.chmod(filename, 0o600)
    # set correct owner, if possible
    if os.geteuid() == 0:
        directory_stat = os.stat(os.path.dirname(filename))
        os.chown(filename, directory_stat.st_uid, directory_stat.st_gid)


@backup_manager.command
def restore(backup_file):
    """
    Restore a previously made backup. You need to specify the tgz file.
    """
    sqluri = None
    config_file = None
    sqlfile = None
    enckey_contained = False

    p = Popen(["tar", "-ztf", backup_file], stdout=PIPE, universal_newlines=True)
    std_out, err_out = p.communicate()
    for line in std_out.split("\n"):
        if re.search(r"/pi.cfg$", line):
            config_file = "/{0!s}".format(line.strip())
        elif re.search(r"\.sql", line):
            sqlfile = "/{0!s}".format(line.strip())
        elif re.search(r"/enckey", line):
            enckey_contained = True

    if not config_file:
        raise Exception("Missing config file pi.cfg in backup file.")
    if not sqlfile:
        raise Exception("Missing database dump in backup file.")

    if enckey_contained:
        print("Also restoring encryption key 'enckey'")
    else:
        print("NO FILE 'enckey' CONTAINED! BE SURE TO RESTORE THE ENCRYPTION "
              "KEY MANUALLY!")
    print("Restoring to {0!s} with data from {1!s}".format(config_file,
                                                           sqlfile))

    call(["tar", "-zxf", backup_file, "-C", "/"])
    print(60 * "=")
    with open(config_file, "r") as f:
        # Determine the SQLAlchemy URI
        for line in f:
            if re.search("^SQLALCHEMY_DATABASE_URI", line):
                key, value = line.split("=", 1)
                # Strip whitespaces, and ' "
                sqluri = value.strip().strip("'").strip('"')

    if sqluri is None:
        print("No SQLALCHEMY_DATABASE_URI found in {0!s}".format(config_file))
        sys.exit(2)
    sqltype = sqluri.split(":")[0]
    if sqltype == "sqlite":
        productive_file = sqluri[len("sqlite:///"):]
        print("Restore SQLite %s" % productive_file)
        call(["cp", sqlfile, productive_file])
        os.unlink(sqlfile)
    elif sqltype in MYSQL_DIALECTS:
        m = re.match(r".*mysql://(.*):(.*)@(.*)/(\w*)\??(.*)", sqluri)
        username = m.groups()[0]
        password = m.groups()[1]
        datahost = m.groups()[2]
        database = m.groups()[3]
        defaults_file = "/etc/privacyidea/mysql.cnf"
        _write_mysql_defaults(defaults_file, username, password)
        # Rewriting database
        print("Restoring database.")
        call("mysql --defaults-file=%s -h %s %s < %s" % (shlex_quote(defaults_file),
                                                         shlex_quote(datahost),
                                                         shlex_quote(database),
                                                         shlex_quote(sqlfile)), shell=True)
        os.unlink(sqlfile)
    else:
        print("unsupported SQL syntax: %s" % sqltype)
        sys.exit(2)


@manager.command
def test():
    """
    Run all nosetests.
    """
    call(['nosetests', '-v',
          '--with-coverage', '--cover-package=privacyidea', '--cover-branches',
          '--cover-erase', '--cover-html', '--cover-html-dir=cover'])


@manager.command
def encrypt_enckey(encfile):
    """
    You will be asked for a password and the encryption key in the specified
    file will be encrypted with an AES key derived from your password.

    The encryption key in the file is a 96 bit binary key.

    The password based encrypted encryption key is a hex combination of an IV
    and the encrypted data.

    The result can be piped to a new enckey file.
    """
    # TODO we just print out a string here and assume, the user pipes it into a file.
    #      Maybe we should write the file here so we know what is in there
    password = getpass()
    password2 = getpass(prompt='Confirm: ')
    if password != password2:
        import sys
        sys.exit('Error: passwords do not match.')
    with open(encfile, "rb") as f:
        enckey = f.read()
    res = DefaultSecurityModule.password_encrypt(enckey, password)
    print(res)


@manager.command
def create_enckey(enckey_b64=None):
    """
    If the key of the given configuration does not exist, it will be created.


    :param enckey_b64: (Optional) base64 encoded plain text key
    :return:
    """
    print()
    filename = app.config.get("PI_ENCFILE")
    if os.path.isfile(filename):
        print("The file \n\t%s\nalready exist. We do not overwrite it!" %
              filename)
        sys.exit(1)
    with open(filename, "wb") as f:
        if enckey_b64 is None:
            f.write(DefaultSecurityModule.random(96))
        else:
            print("Warning: Passing enckey via cli input is considered harmful.")
            bin_enckey = base64.b64decode(enckey_b64)
            if len(bin_enckey) != 96:
                print("Error: enckey must be 96 bytes length")
                sys.exit(1)
            f.write(bin_enckey)
    print("Encryption key written to %s" % filename)
    os.chmod(filename, 0o400)
    print("The file permission of %s was set to 400!" % filename)
    print("Please ensure, that it is owned by the right user.   ")


@manager.command
def create_pgp_keys(keysize=2048, force=False):
    """
    Generate PGP keys to allow encrypted token import.
    """
    GPG_HOME = app.config.get("PI_GNUPG_HOME", "/etc/privacyidea/gpg")
    gpg = gnupg.GPG(gnupghome=GPG_HOME)
    keys = gpg.list_keys(True)
    if len(keys) and not force:
        print("There are already private keys. If you want to "
              "generate a new private key, use the parameter --force.")
        print(keys)
        sys.exit(1)
    input_data = gpg.gen_key_input(key_type="RSA", key_length=keysize,
                                   name_real="privacyIDEA Server",
                                   name_comment="Import")
    inputs = input_data.split("\n")
    if inputs[-2] == "%commit":
        del (inputs[-1])
        del (inputs[-1])
        inputs.append("%no-protection")
        inputs.append("%commit")
        inputs.append("")
        input_data = "\n".join(inputs)
    gpg.gen_key(input_data)


@manager.command
def create_audit_keys(keysize=2048):
    """
    Create the RSA signing keys for the audit log.
    You may specify an additional keysize.
    The default keysize is 2048 bit.
    """
    filename = app.config.get("PI_AUDIT_KEY_PRIVATE")
    if os.path.isfile(filename):
        print("The file \n\t%s\nalready exist. We do not overwrite it!" %
              filename)
        sys.exit(1)
    new_key = rsa.generate_private_key(public_exponent=65537,
                                       key_size=keysize,
                                       backend=default_backend())
    priv_pem = new_key.private_bytes(
        encoding=serialization.Encoding.PEM,
        format=serialization.PrivateFormat.TraditionalOpenSSL,
        encryption_algorithm=serialization.NoEncryption())
    with open(filename, "wb") as f:
        f.write(priv_pem)

    pub_key = new_key.public_key()
    pub_pem = pub_key.public_bytes(
        encoding=serialization.Encoding.PEM,
        format=serialization.PublicFormat.SubjectPublicKeyInfo)
    with open(app.config.get("PI_AUDIT_KEY_PUBLIC"), "wb") as f:
        f.write(pub_pem)

    print("Signing keys written to %s and %s" %
          (filename, app.config.get("PI_AUDIT_KEY_PUBLIC")))
    os.chmod(filename, 0o400)
    print("The file permission of %s was set to 400!" % filename)
    print("Please ensure, that it is owned by the right user.")


@manager.option('--stamp', '-s', help='Stamp database to current head revision.',
                default=False, action='store_true')
def createdb(stamp=False):
    """
    Initially create the tables in the database. The database must exist
    (an SQLite database will be created).
    """
    print(db)
    db.create_all()
    if stamp:
        # get the path to the migration directory from the distribution
        p = [x.locate() for x in importlib_metadata.files('privacyidea') if
             'migrations/env.py' in str(x)]
        migration_dir = os.path.dirname(os.path.abspath(p[0]))
        fm_stamp(directory=migration_dir)
    db.session.commit()


@manager.command
def dropdb(dropit=None):
    """
    This drops all the privacyIDEA database tables.
    Use with caution! All data will be lost!

    For safety reason you need to pass
        --dropit==yes
    Otherwise the command will not drop anything.
    """
    if dropit == "yes":
        print("Dropping all database tables!")
        db.drop_all()
        table_name = "alembic_version"
        db.reflect()
        table = db.Model.metadata.tables.get(table_name, None)
        if table:
            db.Model.metadata.drop_all(bind=db.engine,
                                       tables=[table],
                                       checkfirst=True)
    else:
        print("Not dropping anything!")


@manager.command
def validate(user, password, realm=None):
    """
    Do an authentication request
    """
    from privacyidea.lib.user import get_user_from_param
    from privacyidea.lib.token import check_user_pass
    try:
        user = get_user_from_param({"user": user, "realm": realm})
        auth, details = check_user_pass(user, password)
        print("RESULT=%s" % auth)
        print("DETAILS=%s" % details)
    except Exception as exx:
        print("RESULT=Error")
        print("ERROR=%s" % exx)


class CommandOutsideRequestContext(Command):
    """
    In contrast to flask_script's `Command`, this command class does
    not push a request context before running the command.
    """

    def __call__(self, app=None, *args, **kwargs):
        return self.run(*args, **kwargs)

    def run(self, *arg, **kwargs):
        pass


def profile(length=30, profile_dir=None):
    """
    Start the application in profiling mode.
    """
    from werkzeug.middleware.profiler import ProfilerMiddleware
    if flask.has_request_context():
        print("WARNING: The app may behave unrealistically during profiling.")
    app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length],
                                      profile_dir=profile_dir)
    app.run()


# If flask_script's `Command` is used here instead of `CommandOutsideRequestContext`,
# the request context will persist over requests. Thus, `g` is not
# cleared properly between requests, which makes privacyIDEA behave unrealistically.
try:
    manager.add_command('profile', CommandOutsideRequestContext(profile))
except TypeError:
    # Apparently, we are using Flask-Script 0.6.7, which does not support the API used above.
    # So we just add the `profile` command without using `CommandOutsideRequestContext`.
    profile = manager.command(profile)


@authcache_manager.command
def cleanup(minutes=480):
    """
    Remove entries from the authcache, where last_auth entry is older than
    the given number of minutes.
    """
    r = authcache_cleanup(int(minutes))
    print(u"Entries deleted: {0!s}".format(r))


@manager.option('--highwatermark', '--hw', help="If entries exceed this value, "
                                                "old entries are deleted.")
@manager.option('--lowwatermark', '--lw', help="Keep this number of entries.")
@manager.option('--age', help="Delete audit entries older than these number "
                              "of days.")
@manager.option('--config', help="Read config from the specified yaml file.")
@manager.option('--dryrun', help="Do not actually delete, only show "
                                 "what would be done.", action="store_true")
@manager.option('--chunksize', help="Delete entries in chunks of the given size "
                                    "to avoid deadlocks")
@audit_manager.option('--highwatermark', '--hw', help="If entries exceed this value, "
                                                      "old entries are deleted.")
@audit_manager.option('--lowwatermark', '--lw', help="Keep this number of entries.")
@audit_manager.option('--age', help="Delete audit entries older than these number "
                                    "of days.")
@audit_manager.option('--config', help="Read config from the specified yaml file.")
@audit_manager.option('--dryrun', help="Do not actually delete, only show "
                                       "what would be done.", action="store_true")
@audit_manager.option('--chunksize', help="Delete entries in chunks of the given size "
                                          "to avoid deadlocks")
def rotate_audit(highwatermark=10000, lowwatermark=5000, age=0, config=None,
                 dryrun=False, chunksize=None):
    """
    Clean the SQL audit log.

    You can either clean the audit log based on the number of entries of
    based on the age of the entries.

    Cleaning based on number of entries:

    If more than 'highwatermark' entries are in the audit log old entries
    will be deleted, so that 'lowwatermark' entries remain.

    Cleaning based on age:

    Entries older than the specified number of days are deleted.

    Cleaning based on config file:

    You can clean different type of entries with different ages or watermark.
    See the documentation for the format of the config file
    """
    metadata = MetaData()
    highwatermark = int(highwatermark or 10000)
    lowwatermark = int(lowwatermark or 5000)
    if chunksize is not None:
        chunksize = int(chunksize)

    default_module = "privacyidea.lib.auditmodules.sqlaudit"
    token_db_uri = app.config.get("SQLALCHEMY_DATABASE_URI")
    audit_db_uri = app.config.get("PI_AUDIT_SQL_URI", token_db_uri)
    audit_module = app.config.get("PI_AUDIT_MODULE", default_module)
    if audit_module != default_module:
        raise Exception("We only rotate SQL audit module. You are using %s" %
                        audit_module)
    if config:
        print("Cleaning up with config file.")
    elif age:
        age = int(age)
        print("Cleaning up with age: {0!s}.".format(age))
    else:
        print("Cleaning up with high: {0!s}, low: {1!s}.".format(highwatermark,
                                                                 lowwatermark))

    engine = create_engine(audit_db_uri)
    # create a configured "Session" class
    session = sessionmaker(bind=engine)()
    # create a Session
    metadata.create_all(engine)
    if config:
        with open(config, 'r') as f:
            yml_config = yaml.load(f)
        auditlogs = session.query(LogEntry).all()
        delete_list = []
        for log in auditlogs:
            print("investigating log entry {0!s}".format(log.id))
            for rule in yml_config:
                age = int(rule.get("rotate"))
                rotate_date = datetime.datetime.now() - datetime.timedelta(days=age)

                match = False
                for key in rule.keys():
                    if key not in ["rotate"]:
                        search_value = rule.get(key)
                        print(" + searching for {0!r} in {1!s}".format(search_value,
                                                                       getattr(LogEntry, key)))
                        audit_value = getattr(log, key) or ""
                        m = re.search(search_value, audit_value)
                        if m:
                            # it matches!
                            print(" + -- found {0!r}".format(audit_value))
                            match = True
                        else:
                            # It does not match, we continue to next rule
                            print(" + NO MATCH - SKIPPING rest of conditions!")
                            match = False
                            break

                if match:
                    if log.date < rotate_date:
                        # Delete it!
                        print(" + Deleting {0!s} due to rule {1!s}".format(log.id, rule))
                        # Delete it
                        delete_list.append(log.id)
                    # skip all other rules and go to the next log entry
                    break
        if dryrun:
            print("If you only would let me I would clean up "
                  "{0!s} entries!".format(len(delete_list)))
        else:
            print("Cleaning up {0!s} entries.".format(len(delete_list)))
            delete_matching_rows(session, LogEntry.__table__,
                                 LogEntry.id.in_(delete_list), chunksize)
    elif age:
        now = datetime.datetime.now() - datetime.timedelta(days=age)
        print("Deleting entries older than {0!s}".format(now))
        criterion = LogEntry.date < now
        if dryrun:
            r = LogEntry.query.filter(criterion).count()
            print("Would delete {0!s} entries.".format(r))
        else:
            r = delete_matching_rows(session, LogEntry.__table__, criterion, chunksize)
            print("{0!s} entries deleted.".format(r))
    else:
        count = session.query(LogEntry.id).count()
        last_id = 0
        for l in session.query(LogEntry.id).order_by(desc(LogEntry.id)).limit(1):
            last_id = l[0]
        print("The log audit log has %i entries, the last one is %i" % (count,
                                                                        last_id))
        # deleting old entries
        if count > highwatermark:
            print("More than %i entries, deleting..." % highwatermark)
            cut_id = last_id - lowwatermark
            # delete all entries less than cut_id
            print("Deleting entries smaller than %i" % cut_id)
            criterion = LogEntry.id < cut_id
            if dryrun:
                r = LogEntry.query.filter(criterion).count()
            else:
                r = delete_matching_rows(session, LogEntry.__table__, criterion, chunksize)
            print("{0!s} entries deleted.".format(r))


@contextlib.contextmanager
def smartopen(filename):
    if filename and filename != '-':
        fh = open(filename, 'w')
    else:
        fh = sys.stdout

    try:
        yield fh
    finally:
        if fh is not sys.stdout:
            fh.close()


@audit_manager.option('--timelimit', '-t', help="Limit the dumped audit entries to a certain "
                                                "period (i.e. '5d' or '3h' for the entries from "
                                                "the last five days or three hours. By default "
                                                "all audit entries will be dumped.")
@audit_manager.option('--filename', '-f', help="Name of the 'csv' file to dump the audit entries "
                                               "into. By default write to stdout.", default='-')
def dump(filename, timelimit=None):
    """Dump the audit log in csv format."""
    audit = getAudit(app.config)
    tl = parse_timedelta(timelimit) if timelimit else None
    with smartopen(filename) as fh:
        for line in audit.csv_generator(timelimit=tl):
            fh.write(line)


@resolver_manager.command
def create(name, rtype, filename):
    """
    Create a new resolver with name and type (ldapresolver, sqlresolver).
    Read the necessary resolver parameters from the filename. The file should
    contain a python dictionary.

    :param name: The name of the resolver
    :param rtype: The type of the resolver like ldapresolver or sqlresolver
    :param filename: The name of the config file.
    :return:
    """
    from privacyidea.lib.resolver import save_resolver

    with open(filename, 'r') as f:
        contents = f.read()

    params = ast.literal_eval(contents)
    params["resolver"] = name
    params["type"] = rtype
    save_resolver(params)


@resolver_manager.command
def create_internal(name):
    """
    This creates a new internal, editable sqlresolver. The users will be
    stored in the token database in a table called 'users_<name>'. You can then
    add this resolver to a new real using the command 'pi-manage.py realm'.
    """
    from privacyidea.lib.resolver import save_resolver
    sqluri = app.config.get("SQLALCHEMY_DATABASE_URI")
    sqlelements = sqluri.split("/")
    # mysql://user:password@localhost/pi
    # sqlite:////home/cornelius/src/privacyidea/data.sqlite
    sql_driver = sqlelements[0][:-1]
    user_pw_host = sqlelements[2]
    database = "/".join(sqlelements[3:])
    username = ""
    password = ""
    host = ""
    # determine host and user
    hostparts = user_pw_host.split("@")
    if len(hostparts) > 2:
        print("Invalid database URI: %s" % sqluri)
        sys.exit(2)
    elif len(hostparts) == 1:
        host = hostparts[0] or "/"
    elif len(hostparts) == 2:
        host = hostparts[1] or "/"
        # split hostname and password
        userparts = hostparts[0].split(":")
        if len(userparts) == 2:
            username = userparts[0]
            password = userparts[1]
        elif len(userparts) == 1:
            username = userparts[0]
        else:
            print("Invalid username and password in database URI: %s" % sqluri)
            sys.exit(3)
    # now we can create the resolver
    params = {
        'resolver': name,
        'type': "sqlresolver",
        'Server': host,
        'Driver': sql_driver,
        'User': username,
        'Password': password,
        'Database': database,
        'Table': 'users_' + name,
        'Limit': '500',
        'Editable': '1',
        'Map': '{"userid": "id", "username": "username", '
               '"email":"email", "password": "password", '
               '"phone":"phone", "mobile":"mobile", "surname":"surname", '
               '"givenname":"givenname", "description": "description"}'}
    save_resolver(params)

    # Now we create the database table
    from sqlalchemy import create_engine
    from sqlalchemy import Table, MetaData, Column
    from sqlalchemy import Integer, String
    engine = create_engine(sqluri)
    metadata = MetaData()
    Table('users_%s' % name,
          metadata,
          Column('id', Integer, primary_key=True),
          Column('username', String(40), unique=True),
          Column('email', String(80)),
          Column('password', String(255)),
          Column('phone', String(40)),
          Column('mobile', String(40)),
          Column('surname', String(40)),
          Column('givenname', String(40)),
          Column('description', String(255)))
    metadata.create_all(engine)


def r_export(filename=None, name=None, print_passwords=False):
    """
    Export the resolver, specified by 'resolver' to a file. If no resolver name
    is given, all resolver configurations are exported. By default, the content is censored.
    This behavior may be changed by 'print_passwords'.
    If the filename is omitted, the resolvers are written to stdout.
    """
    conf_export(conftype="resolver", filename=filename, name=name,
                print_passwords=print_passwords)


def r_import(filename=None, cleanup=None, update=False):
    """
    Import the resolvers from a json file. Existing resolvers are skipped by default.
    If 'update' is specified the configuration of any existing resolver is updated.
    Values given as __CENSORED__ (like e.g. passwords) are not touched during the update.
    """
    # Todo: Support the cleanup option to remove all resolvers which do not exist in the imported file
    conf_import(conftype="resolver", filename=filename, cleanup=cleanup, update=update)


# unfortunately it is not possible in flask_script to add a command with a
# different name and options. So we create an appropriate command class.
class ListResolver(Command):
    """
    Command class to list the available resolvers and the type.
    """
    option_list = (
        Option('-v', '--verbose',
               help="Verbose output - also print the configuration of the resolvers.",
               dest="verbose", action="store_true"),
    )

    def run(self, verbose=False):
        from privacyidea.lib.resolver import get_resolver_list
        resolver_list = get_resolver_list()

        if not verbose:
            for (name, resolver) in resolver_list.items():
                print("{0!s:16} - ({1!s})".format(name, resolver.get("type")))
        else:
            for (name, resolver) in resolver_list.items():
                print("{0!s:16} - ({1!s})".format(name, resolver.get("type")))
                print("." * 32)
                data = resolver.get("data", {})
                for (k, v) in data.items():
                    if k.lower() in ["bindpw", "password"]:
                        v = "xxxxx"
                    print("{0!s:>24}: {1!r}".format(k, v))
                print("")


resolver_manager.add_command('list', ListResolver)


def list_realms():
    """
    list the available realms
    """
    from privacyidea.lib.realm import get_realms
    realm_list = get_realms()
    for (name, realm_data) in realm_list.items():
        resolvernames = [x.get("name") for x in realm_data.get("resolver")]
        print("%16s: %s" % (name, resolvernames))


realm_manager.add_command('list', Command(list_realms))


@realm_manager.command
def create(name, resolvers):
    """
    Create a new realm.
    This will create a new realm with the given resolver
    or a comma-separated list of resolvers. An existing realm
    with the same name will be replaced.
    """
    from privacyidea.lib.realm import set_realm
    resolvers = resolvers.split(",")
    set_realm(name, resolvers)


@realm_manager.command
def delete(realm):
    """
    Delete the given realm
    """
    from privacyidea.lib.realm import delete_realm
    delete_realm(realm)


@realm_manager.command
def set_default(realm):
    """
    Set the given realm to default
    """
    from privacyidea.lib.realm import set_default_realm
    set_default_realm(realm)


@realm_manager.command
def clear_default():
    """
    Unset the default realm
    """
    from privacyidea.lib.realm import set_default_realm
    set_default_realm(None)


# Event interface


def list_events():
    """
    List events
    """
    conf = EventConfiguration()
    events = conf.events
    print("{0:7} {4:4} {1:30}\t{2:20}\t{3}".format("Active", "Name", "Module", "Action", "ID"))
    print(90 * "=")
    for event in events:
        print("[{0!s:>5}] {4:4} {1:30}\t{2:20}\t{3}".format(event.get("active"),
                                                            event.get("name")[0:30],
                                                            event.get("handlermodule"),
                                                            event.get("action"), event.get("id"), ))


event_manager.add_command('list', Command(list_events))


@event_manager.command
def enable(eid):
    """
    enable en event by ID
    """
    r = enable_event(eid)
    print(r)


@event_manager.command
def disable(eid):
    """
    disable an event by ID
    """
    r = enable_event(eid, enable=False)
    print(r)


@event_manager.command
def delete(eid):
    """
    delete an event by ID
    """
    r = delete_event(eid)
    print(r)


@event_manager.command
def e_export(filename=None, name=None):
    """
    Export the specified event or all events to a file.
    If the filename is omitted, the event configurations are written to stdout.
    """
    conf_export(conftype="event", filename=filename, name=name)


@event_manager.command
def e_import(filename=None, cleanup=False, update=False):
    """
    Import the events from a file.
    If 'cleanup' is specified the existing events are deleted before the
    events from the file are imported.
    """
    conf_import(conftype="event", filename=filename, cleanup=cleanup, update=update)


# Policy interface

def list_policies():
    """
    list the policies
    """
    pol_cls = PolicyClass()
    policies = pol_cls.list_policies()
    print("Active \t Name \t Scope")
    print(40 * "=")
    for policy in policies:
        print("%s \t %s \t %s" % (policy.get("active"), policy.get("name"),
                                  policy.get("scope")))


policy_manager.add_command('list', Command(list_policies))


@policy_manager.command
def enable(name):
    """
    enable a policy by name
    """
    r = enable_policy(name)
    print(r)


@policy_manager.command
def disable(name):
    """
    disable a policy by name
    """
    r = enable_policy(name, False)
    print(r)


@policy_manager.command
def delete(name):
    """
    delete a policy by name
    """
    r = delete_policy(name)
    print(r)


@policy_manager.command
def p_export(filename=None, name=None):
    """
    Export the specified policy or all policies to a file.
    If the filename is omitted, the policies are written to stdout.
    """
    conf_export(conftype="policy", filename=filename, name=name)


@policy_manager.command
def p_import(filename=None, cleanup=False, update=False):
    """
    Import the policies from a file.
    If 'cleanup' is specified the existing policies are deleted before the
    policies from the file are imported.
    """
    conf_import(conftype="policy", filename=filename, cleanup=cleanup, update=update)


@policy_manager.command
def create(name, scope, action, filename=None):
    """
    create a new policy. 'FILENAME' must contain a dictionary and its content
    takes precedence over CLI parameters.
    I.e. if you are specifying a FILENAME,
    the parameters name, scope and action need to be specified, but are ignored.

    Note: This will only create one policy per file.
    """
    if filename:
        try:
            with open(filename, 'r') as f:
                contents = f.read()

            params = ast.literal_eval(contents)

            if params.get("name") and params.get("name") != name:
                print("Found name '{0!s}' in file, will use that instead of "
                      "'{1!s}'.".format(params.get("name"), name))
            else:
                print("name not defined in file, will use the cli value "
                      "{0!s}.".format(name))
                params["name"] = name

            if params.get("scope") and params.get("scope") != scope:
                print("Found scope '{0!s}' in file, will use that instead of "
                      "'{1!s}'.".format(params.get("scope"), scope))
            else:
                print("scope not defined in file, will use the cli value "
                      "{0!s}.".format(scope))
                params["scope"] = scope

            if params.get("action") and params.get("action") != action:
                print("Found action in file: '{0!s}', will use that instead "
                      "of: '{1!s}'.".format(params.get("action"), action))
            else:
                print("action not defined in file, will use the cli value "
                      "{0!s}.".format(action))
                params["action"] = action

            r = set_policy(params.get("name"),
                           scope=params.get("scope"),
                           action=params.get("action"),
                           realm=params.get("realm"),
                           resolver=params.get("resolver"),
                           user=params.get("user"),
                           time=params.get("time"),
                           client=params.get("client"),
                           active=params.get("active", True),
                           adminrealm=params.get("adminrealm"),
                           adminuser=params.get("adminuser"),
                           check_all_resolvers=params.get(
                               "check_all_resolvers", False))
            return r

        except Exception as _e:
            print("Unexpected error: {0!s}".format(sys.exc_info()[1]))

    else:
        r = set_policy(name, scope, action)
        return r


@api_manager.option('-r', '--role',
                    help="The role of the API key can either be "
                         "'admin' or 'validate' to access the admin "
                         "API or the validate API.",
                    default=ROLE.ADMIN)
@api_manager.option('-d', '--days',
                    help='The number of days the access token should be valid.'
                         ' Defaults to 365.',
                    default=365)
@api_manager.option('-R', '--realm',
                    help='The realm of the admin. Defaults to "API"',
                    default="API")
@api_manager.option('-u', '--username',
                    help='The username of the admin.')
def createtoken(role, days, realm, username):
    """
    Create an API authentication token
    for administrative or validate use.
    Possible roles are "admin" or "validate".
    """
    if role not in ["admin", "validate"]:
        print("ERROR: The role must be 'admin' or 'validate'!")
        sys.exit(1)
    username = username or geturandom(hex=True)
    secret = app.config.get("SECRET_KEY")
    authtype = "API"
    validity = timedelta(days=int(days))
    token = jwt.encode({
        "username": username,
        "realm": realm,
        "nonce": geturandom(hex=True),
        "role": role,
        "authtype": authtype,
        "exp": datetime.datetime.utcnow() + validity,
        "rights": "TODO"},
        secret)
    print("Username:   {0!s}".format(username))
    print("Realm:      {0!s}".format(realm))
    print("Role:       {0!s}".format(role))
    print("Validity:   {0!s} days".format(days))
    print("Auth-Token: {0!s}".format(token))


def import_tokens(file, tokenrealm=None):
    """
    Import Tokens from a CSV file
    """
    contents = ""
    with open(file, "r") as f:
        contents = f.read()
    tokens = parseOATHcsv(contents)
    tokenrealms = [tokenrealm] if tokenrealm else []
    i = 0
    for serial in tokens:
        i += 1
        print(u"{0!s}/{1!s} Importing token {2!s}".format(i, len(tokens), serial))

        import_token(serial, tokens[serial], tokenrealms=tokenrealm)


token_manager.add_command('import', Command(import_tokens))

from functools import partial
import json

exp_fmt_dict = {
    'python': str,
    'json': partial(json.dumps, indent=2),
    'yaml': yaml.safe_dump}


@config_manager.option('-o', '--output', type=argparse.FileType('w'),
                       default=sys.stdout,
                       help='The filename to export the data to. Write to '
                            '<stdout> if this argument is not given.')
@config_manager.option('-f', '--format', default='python', dest='fmt',
                       choices=exp_fmt_dict.keys(),
                       help='Output format, default is \'python\'')
# TODO: we need to have an eye on the help output, it might get less readable
#  when more exporter functions are added
@config_manager.option('-t', '--types', nargs='*', default=['all'],
                       choices=['all'] + list(EXPORT_FUNCTIONS.keys()),
                       help='The types of configuration to export. By default create '
                            'export using all available exporter types. Currently registered '
                            'exporter types are: '
                            '{0!s}'.format(', '.join(['all'] + list(EXPORT_FUNCTIONS.keys()))),
                       )
@config_manager.option('-n', '--name',
                       help = 'The name of the configuration object to export (default: export all)')
def exporter(output, fmt, types, name=None):
    """
    Export server configuration using specific or all registered exporter types.
    """
    exp_types = EXPORT_FUNCTIONS.keys() if 'all' in types else types

    out = {}
    for typ in exp_types:
        out.update({typ: EXPORT_FUNCTIONS[typ](name=name)})

    if out:
        res = exp_fmt_dict.get(fmt.lower())(out) + '\n'
        output.write(res)


imp_fmt_dict = {
    'python': ast.literal_eval,
    'json': json.loads,
    'yaml': yaml.safe_load}


@config_manager.option('-i', '--input', type=argparse.FileType('r'),
                       default=sys.stdin, dest='infile',
                       help='The filename to import the data from. Try to read '
                            'from <stdin> if this argument is not given.')
@config_manager.option('-t', '--types', nargs='*', default=['all'],
                       choices=['all'] + list(IMPORT_FUNCTIONS.keys()),
                       help='The types of configuration to import. By default import all '
                            'available data if a corresponding importer type exists. '
                            'Currently registered importer types are: '
                            '{0!s}'.format(', '.join(['all'] + list(IMPORT_FUNCTIONS.keys()))))
@config_manager.option('-n', '--name',
                       help = 'The name of the configuration object to import (default: import all)')
def importer(infile, types, name=None):
    """
    Import server configuration using specific or all registered importer types.
    """
    data = None
    imp_types = IMPORT_FUNCTIONS.keys() if 'all' in types else types

    content = infile.read()

    for fmt in imp_fmt_dict:
        try:
            data = imp_fmt_dict[fmt](content)
            break
        except (SyntaxError, json.decoder.JSONDecodeError, yaml.error.YAMLError) as _e:
            continue
    if not data:
        print('Could not read input format! '
              'Accepting: {0!s}.'.format(', '.join(imp_fmt_dict.keys())),
              file=sys.stderr)
        sys.exit(1)

    # we need to go through the importer functions based on priority
    for typ, value in sorted(IMPORT_FUNCTIONS.items(), key=lambda x: x[1]['prio']):
        if typ in imp_types:
            if typ in data:
                print('Importing configuration type "{0!s}".'.format(typ))
                value['func'](data[typ], name=name)


# conf export menu
def _get_conf_event(name=None, print_passwords=None):
    """ helper function for conf_export """
    event_cls = EventConfiguration()
    if name:
        conf = [e for e in event_cls.events if (e.get("name") == name)]
    else:
        conf = event_cls.events
    return conf


def _get_conf_resolver(name=None, print_passwords=False):
    """ helper function for conf_export """
    from privacyidea.lib.resolver import get_resolver_list
    resolver_dict = get_resolver_list(filter_resolver_name=name,
                                      censor=not print_passwords)
    return list(resolver_dict.values())


def _get_conf_policy(name=None, print_passwords=None):
    """ helper function for conf_export """
    pol_cls = PolicyClass()
    return pol_cls.list_policies(name=name)


def conf_export(conftype=DEFAULT_CONFTYPE_LIST, filename=None, name=None, print_passwords=False):
    """
    Export configurations to a file or write them to stdout if no filename is given.
    """
    import pprint
    pp = pprint.PrettyPrinter(indent=4)

    ret_dict = {}
    conf = None
    if isinstance(conftype, list) or isinstance(conftype, tuple):
        conftype_list = conftype
    else:
        conftype_list = [conftype]

    for conftype in conftype_list:
        if '_get_conf_' + conftype in globals():
            conf = globals()['_get_conf_' + conftype](name=name, print_passwords=print_passwords)
        if not conf:
            print("The requested {0!s} configuration is empty.".format(conftype), file=sys.stderr)
        ret_dict[conftype] = conf

    ret_str = pp.pformat(ret_dict)
    if filename:
        with open(filename, 'w') as f:
            f.write(ret_str)
    if not filename:
        print(ret_str)


class FullExport(Command):
    """
    This action exports resolvers, policies and event handlers to standard output or to a file and optionally
    compresses them as tar.gz archive
    """
    option_list = (
        Option("--print_passwords", "-p", action="store_true",
               help="Print the passwords used in the resolver configuration. "
                    "This will overwrite existing passwords on import."),
        Option("--archive", "-a", action="store_true",
               help="Compress the created config-backup as tar.gz archive instead "
                    "of printing to standard out."),
        Option("--directory", "-d", action="store_true",
               help="Directory where the backup will be stored.")
    )

    def run(self, directory=None, archive=False, print_passwords=False):
        print("Exporting privacyIDEA configuration.", file=sys.stderr)
        if archive or directory:
            from socket import gethostname
            DATE = datetime.datetime.now().strftime("%Y%m%d-%H%M")
            BASE_NAME = "privacyidea-config-backup"
            HOSTNAME = gethostname()
            if not directory:
                directory = './'
            else:
                call(["mkdir", "-p", directory])
            config_backup_file_base = "%s/%s-%s-%s" % (directory, BASE_NAME, HOSTNAME, DATE)
            config_backup_file = config_backup_file_base + ".py"
            conf_export(filename=config_backup_file, print_passwords=print_passwords)
            if archive:
                config_backup_archive = config_backup_file_base + ".tar.gz"
                tar = tarfile.open(config_backup_archive, "w:gz")
                tar.add(config_backup_file)
                tar.close()
                # cleanup
                if tarfile.is_tarfile(config_backup_archive):
                    os.remove(config_backup_file)
        else:
            conf_export(filename=None, print_passwords=print_passwords)


config_export_manager.add_command('full', FullExport)
config_export_manager.add_command('policy', Command(p_export))
config_export_manager.add_command('resolver', Command(r_export))
config_export_manager.add_command('event', Command(e_export))


# conf import menu

def _import_conf_resolver(config_list, cleanup=False, update=False):
    """
    import resolver configuration from a resolver list
    """
    if cleanup:
        print("No cleanup for resolvers implemented")

    for config in config_list:
        action_str = "Added"

        name = config.get("resolvername")
        exists = get_resolver_list(filter_resolver_name=name)
        if exists:
            if not update:
                print("Resolver {0!s} exists and -u is not specified, "
                      "skipping import.".format(name))
                continue
            else:
                action_str = "Updated"

        resolvertype = config.get("type")
        data = config.get("data")
        # now we can create the resolver
        params = {'resolver': name, 'type': resolvertype}
        for key in data.keys():
            params.update({key: data.get(key)})
        r = save_resolver(params)
        print("{0!s} resolver {1!s} with result {2!s}".format(action_str, name, r))


def _import_conf_event(config_list, cleanup=False, update=False):
    """
    import event configuration from an event list
    """
    cls = EventConfiguration()
    if cleanup:
        print("Cleanup old events.")
        events = cls.events
        for event in events:
            name = event.get("name")
            r = delete_event(event.get("id"))
            print("Deleted event '{0!s}' with result {1!s}".format(name, r),
                  file=sys.stderr)

    for event in config_list:
        action_str = "Added"
        # Todo: This check does not work properly. The event is created nevertheless
        name = event.get("name")
        events_with_name = [e for e in cls.events if (name in e.get("name"))]
        if events_with_name:
            exists = True
            event_id = events_with_name[0].get("id")
        else:
            exists = False
            event_id = None
        if exists:
            if not update:
                print("Event {0!s} exists and -u is not specified, "
                      "skipping import.".format(name))
                continue
            else:
                action_str = "Updated"
        r = set_event(name, event.get("event"), event.get("handlermodule"),
                      event.get("action"),
                      conditions=event.get("conditions"),
                      ordering=event.get("ordering"),
                      options=event.get("options"),
                      active=event.get("active"),
                      position=event.get("position", "post"),
                      id=event_id)
        print("{0!s} event {1!s} with result {2!s}".format(action_str, name, r))


def _import_conf_policy(config_list, cleanup=False, update=False):
    """
    import policy configuration from a policy list
    """

    cls = PolicyClass()

    if cleanup:
        print("Cleanup old policies.")
        policies = cls.list_policies()
        for policy in policies:
            name = policy.get("name")
            r = delete_policy(name)
            print("Deleted policy {0!s} with result {1!s}".format(name, r))

    for policy in config_list:
        action_str = "Added"
        name = policy.get("name")
        exists = cls.list_policies(name=name)
        if exists:
            if not update:
                print("Policy {0!s} exists and -u is not specified, "
                      "skipping import.".format(name))
                continue
            else:
                action_str = "Updated"
        r = set_policy(name,
                       action=policy.get("action"),
                       active=policy.get("active", True),
                       adminrealm=policy.get("adminrealm"),
                       adminuser=policy.get("adminuser"),
                       check_all_resolvers=policy.get(
                           "check_all_resolvers", False),
                       client=policy.get("client"),
                       conditions=policy.get("conditions"),
                       pinode=policy.get("pinode"),
                       priority=policy.get("priority"),
                       realm=policy.get("realm"),
                       resolver=policy.get("resolver"),
                       scope=policy.get("scope"),
                       time=policy.get("time"),
                       user=policy.get("user"))
        print("{0!s} policy {1!s} with result {2!s}".format(action_str, name, r))


def conf_import(filename=None, conftype=None, cleanup=False, update=False):
    """
    import privacyIDEA configuration from file
    """
    if filename:
        with open(filename, 'r') as f:
            contents = f.read()
    else:
        filename = "Standard input"
        contents = sys.stdin.read()

    contents_var = ast.literal_eval(contents)

    # be backwards-compatible. In old versions of pi-manage config were exported to
    # individual files as python list without dict key
    if isinstance(contents_var, list):
        conftype_list = [conftype]
        contents_var = {conftype: contents_var}
    else:
        if conftype:
            conftype_list = [conftype]
        else:
            conftype_list = list(contents_var.keys())

    for conftype in conftype_list:

        print("Importing {0!s} from {1!s}".format(conftype, filename))

        config_list = contents_var[conftype]
        if '_import_conf_' + conftype in globals():
            globals()['_import_conf_' + conftype](config_list,
                                                  cleanup=cleanup, update=update)


class FullImport(Command):
    """
    This option reads configuration-backups from a plain file, a tar.gz archive or from standard input and imports
    the contained resolvers, policies and event handlers.
    """
    option_list = (
        Option("--file", "-f", dest="file",
               help="The file to import. It can be a plain python file or a tar.gz archive "
                    "containing a configuration backup file with a name containing "
                    "'privacyidea-config-backup'."),
        Option("--update", "-u", action="store_true",
               help="Update the existing configuration. New policies, resolvers and events will also "
                    "be added."),
        Option("--cleanup", "-c", action="store_true",
               help="The configuration on the target machine will be wiped before the import."),
        Option("--wipe", "-w", action="store_true", dest="cleanup",
               help="Wipe is an alias for cleanup."),
    )

    def run(self, file=None, cleanup=False, update=False):
        if file:
            if os.path.isfile(file):
                if tarfile.is_tarfile(file):
                    tarinfo_objects = []
                    tar = tarfile.open(file)
                    for member in tar.members:
                        if re.search(r"privacyidea-config-backup", member.name):
                            tarinfo_objects.append(member)
                    tar.extractall(members=tarinfo_objects)
                    tar.close()
                    for tarinfo in tarinfo_objects:
                        conf_import(filename=tarinfo.name, cleanup=cleanup, update=update)
                        # cleanup extracted files
                        os.remove(tarinfo.name)
                else:
                    conf_import(filename=file, cleanup=cleanup, update=update)
        else:
            conf_import(cleanup=cleanup, update=update)


config_import_manager.add_command('full', FullImport)
config_import_manager.add_command('policy', Command(p_import))
config_import_manager.add_command('resolver', Command(r_import))
config_import_manager.add_command('event', Command(e_import))

if __name__ == '__main__':
    # We add one blank line, to separate the messages from the initialization
    print("""
             _                    _______  _______
   ___  ____(_)  _____ _______ __/  _/ _ \/ __/ _ |
  / _ \/ __/ / |/ / _ `/ __/ // // // // / _// __ |
 / .__/_/ /_/|___/\_,_/\__/\_, /___/____/___/_/ |_|
/_/                       /___/
{0!s:>51}
    """.format('v{0!s}'.format(get_version_number())), file=sys.stderr)
    manager.run()
